From be0377f1037c23df39745d8cf9c560f0d02bdb4b Mon Sep 17 00:00:00 2001 From: Ralph Amissah Date: Sun, 22 Aug 2010 17:46:59 -0400 Subject: markup sample (headers & yaml), links added/updated (minor cleaning) --- data/v1/samples/_sisu/skin/yml/list.yml | 3 ++ data/v1/samples/_sisu/skin/yml/promo.yml | 39 +++++++++++++++++++++- data/v1/samples/accelerando.charles_stross.sst | 4 +-- data/v1/samples/autonomy_markup0.sst | 2 +- data/v1/samples/content.cory_doctorow.sst | 3 ++ ...lman_crusade_for_free_software.sam_williams.sst | 4 ++- data/v1/samples/free_culture.lawrence_lessig.sst | 2 ++ data/v1/samples/free_for_all.peter_wayner.sst | 4 ++- data/v1/samples/gpl3.fsf.sst | 2 +- data/v1/samples/little_brother.cory_doctorow.sst | 24 ++----------- ...the_cathedral_and_the_bazaar.eric_s_raymond.sst | 6 ++-- .../the_wealth_of_networks.yochai_benkler.sst | 4 ++- data/v1/samples/two_bits.christopher_kelty.sst | 4 ++- ...international_sale_of_goods_convention_1980.sst | 2 +- data/v2/samples/_sisu/skin/yml/list.yml | 2 +- data/v2/samples/_sisu/skin/yml/promo.yml | 20 ++++++++++- data/v2/samples/accelerando.charles_stross.sst | 4 --- data/v2/samples/autonomy_markup0.sst | 4 --- data/v2/samples/content.cory_doctorow.sst | 1 + .../democratizing_innovation.eric_von_hippel.sst | 1 + ...lman_crusade_for_free_software.sam_williams.sst | 9 ++--- data/v2/samples/free_culture.lawrence_lessig.sst | 9 +---- data/v2/samples/free_for_all.peter_wayner.sst | 5 +-- data/v2/samples/gpl2.fsf.sst | 3 +- data/v2/samples/gpl3.fsf.sst | 4 +-- data/v2/samples/little_brother.cory_doctorow.sst | 28 +++------------- ...the_cathedral_and_the_bazaar.eric_s_raymond.sst | 3 +- .../the_wealth_of_networks.yochai_benkler.sst | 1 + data/v2/samples/two_bits.christopher_kelty.sst | 1 + 29 files changed, 105 insertions(+), 93 deletions(-) diff --git a/data/v1/samples/_sisu/skin/yml/list.yml b/data/v1/samples/_sisu/skin/yml/list.yml index 4aea9b9..388ef36 100644 --- a/data/v1/samples/_sisu/skin/yml/list.yml +++ b/data/v1/samples/_sisu/skin/yml/list.yml @@ -3,8 +3,11 @@ sisu: - sisu open_society: site: + - vs - twon - fc + - content + - di - faif - twobits - ffa diff --git a/data/v1/samples/_sisu/skin/yml/promo.yml b/data/v1/samples/_sisu/skin/yml/promo.yml index a3a6781..9b9e3da 100644 --- a/data/v1/samples/_sisu/skin/yml/promo.yml +++ b/data/v1/samples/_sisu/skin/yml/promo.yml @@ -1,4 +1,4 @@ -# Author: Ralph@Amissah.com +# Author: ralph@amissah.com site: sisu_icon: url: SiSU @@ -47,6 +47,15 @@ site: - title: Ruby Application Archive url: http://raa.ruby-lang.org/project/sisu/ + vs: + title: Viral Spiral + author: David Bollier + year: 2009 + url: viral_spiral.david_bollier + links: + - + title: Source Wiki + url: http://viralspiral.cc/ twon: title: The Wealth of Networks subtitle: How Social Production Transforms Markets and Freedom @@ -76,6 +85,18 @@ site: - title: Source url: http://www.free-culture.cc/ + di: + title: Democratizing Innovation + author: Eric von Hippel + year: 2005 + url: democratizing_innovation.eric_von_hippel + links: + - + title: Wikipedia entry + url: http://en.wikipedia.org/wiki/Democratizing_Innovation + - + title: Source + url: http://web.mit.edu/evhippel/www/democ1.htm faif: title: Free As In Freedom subtitle: Richard Stallman's Crusade for Free Software @@ -160,6 +181,19 @@ site: - title: Software License List url: http://www.fsf.org/licensing/licenses/ + content: + title: CONTENT + subtitle: Selected Essays on Technology, Creativity, Copyright and the Future of the Future + author: Cory Doctorow + year: 2008 + url: content.cory_doctorow + links: + - + title: Home + url: http://craphound.com/content + - + title: Wikipedia entry + url: http://en.wikipedia.org/wiki/Cory_Doctorow littlebrother: title: Little Brother author: Cory Doctorow @@ -169,6 +203,9 @@ site: - title: Home url: http://craphound.com/littlebrother + - + title: Wikipedia entry + url: http://en.wikipedia.org/wiki/Little_Brother_(Cory_Doctorow_novel) search: sisu_books_libre_sisusearch: type: sisusearch diff --git a/data/v1/samples/accelerando.charles_stross.sst b/data/v1/samples/accelerando.charles_stross.sst index db53d74..f30d009 100644 --- a/data/v1/samples/accelerando.charles_stross.sst +++ b/data/v1/samples/accelerando.charles_stross.sst @@ -1,4 +1,4 @@ -% SiSU 0.38 +% SiSU 1.0 @title: Accelerando @@ -39,8 +39,6 @@ :A~ @title @author -% :B~ A novel by Charles Stross - 1~dedication Dedication For Feòrag, with love diff --git a/data/v1/samples/autonomy_markup0.sst b/data/v1/samples/autonomy_markup0.sst index de859d3..55fc8e9 100644 --- a/data/v1/samples/autonomy_markup0.sst +++ b/data/v1/samples/autonomy_markup0.sst @@ -1,4 +1,4 @@ -% SiSU 0.42 +% SiSU 1.0 @title: Revisiting the Autonomous Contract diff --git a/data/v1/samples/content.cory_doctorow.sst b/data/v1/samples/content.cory_doctorow.sst index 7c5eb3c..41e5d83 100644 --- a/data/v1/samples/content.cory_doctorow.sst +++ b/data/v1/samples/content.cory_doctorow.sst @@ -26,8 +26,11 @@ {@ Amazon.com}http://www.amazon.com/Content-Selected-Technology-Creativity-Copyright/dp/1892391813 {@ Barnes & Noble}http://search.barnesandnoble.com/Content/Cory-Doctorow/e/9781892391810/?itm=1&USRI=content+cory+doctorow {Little Brother, Cory Doctorow @ SiSU}http://www.jus.uio.no/sisu/little_brother.cory_doctorow +{Free Culture, Lawrence Lessig @ SiSU}http://www.jus.uio.no/sisu/free_culture.lawrence_lessig {The Wealth of Networks, Yochai Benkler @ SiSU}http://www.jus.uio.no/sisu/the_wealth_of_networks.yochai_benkler {Two Bits, Christopher Kelty @ SiSU}http://www.jus.uio.no/sisu/two_bits.christopher_kelty +{Viral Spiral, David Bollier@ SiSU}http://www.jus.uio.no/sisu/viral_spiral.david_bollier +{Democratizing Innovation, Eric von Hippel @ SiSU}http://www.jus.uio.no/sisu/democratizing_innovation.eric_von_hippel {Free as in Freedom (on Richard M. Stallman), Sam Williams @ SiSU}http://www.jus.uio.no/sisu/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams {Free For All, Peter Wayner @ SiSU}http://www.jus.uio.no/sisu/free_for_all.peter_wayner {The Cathedral and the Bazaar, Eric S. Raymond @ SiSU }http://www.jus.uio.no/sisu/the_cathedral_and_the_bazaar.eric_s_raymond diff --git a/data/v1/samples/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams.sst b/data/v1/samples/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams.sst index 98a78f7..e94a4cc 100644 --- a/data/v1/samples/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams.sst +++ b/data/v1/samples/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams.sst @@ -23,13 +23,15 @@ {@ Wikipedia}http://en.wikipedia.org/wiki/Free_as_in_Freedom:_Richard_Stallman%27s_Crusade_for_Free_Software {@ Amazon.com}http://www.amazon.com/gp/product/0596002874 {@ Barnes & Noble}http://search.barnesandnoble.com/booksearch/isbnInquiry.asp?isbn=0596002874 +{Viral Spiral, David Bollier@ SiSU}http://www.jus.uio.no/sisu/viral_spiral.david_bollier +{Democratizing Innovation, Eric von Hippel @ SiSU}http://www.jus.uio.no/sisu/democratizing_innovation.eric_von_hippel {The Wealth of Networks, Yochai Benkler @ SiSU}http://www.jus.uio.no/sisu/the_wealth_of_networks.yochai_benkler {Two Bits, Christopher Kelty @ SiSU}http://www.jus.uio.no/sisu/two_bits.christopher_kelty {Free For All, Peter Wayner @ SiSU}http://www.jus.uio.no/sisu/free_for_all.peter_wayner +{The Cathedral and the Bazaar, Eric S. Raymond @ SiSU }http://www.jus.uio.no/sisu/the_cathedral_and_the_bazaar.eric_s_raymond {Free Culture, Lawrence Lessig @ SiSU}http://www.jus.uio.no/sisu/free_culture.lawrence_lessig {CONTENT, Cory Doctorow @ SiSU}http://www.jus.uio.no/sisu/content.cory_doctorow {Little Brother, Cory Doctorow @ SiSU}http://www.jus.uio.no/sisu/little_brother.cory_doctorow -{The Cathedral and the Bazaar, Eric S. Raymond @ SiSU }http://www.jus.uio.no/sisu/the_cathedral_and_the_bazaar.eric_s_raymond @skin: skin_rms diff --git a/data/v1/samples/free_culture.lawrence_lessig.sst b/data/v1/samples/free_culture.lawrence_lessig.sst index b172d0a..0851d53 100644 --- a/data/v1/samples/free_culture.lawrence_lessig.sst +++ b/data/v1/samples/free_culture.lawrence_lessig.sst @@ -44,8 +44,10 @@ {@ Barnes & Noble}http://search.barnesandnoble.com/booksearch/isbnInquiry.asp?isbn=1594200068 {The Wealth of Networks, Yochai Benkler @ SiSU}http://www.jus.uio.no/sisu/the_wealth_of_networks.yochai_benkler {CONTENT, Cory Doctorow @ SiSU }http://www.jus.uio.no/sisu/content.cory_doctorow +{Viral Spiral, David Bollier@ SiSU}http://www.jus.uio.no/sisu/viral_spiral.david_bollier {Two Bits, Christopher Kelty @ SiSU}http://www.jus.uio.no/sisu/two_bits.christopher_kelty {Free as in Freedom (on Richard M. Stallman), Sam Williams @ SiSU}http://www.jus.uio.no/sisu/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams +{Democratizing Innovation, Eric von Hippel @ SiSU}http://www.jus.uio.no/sisu/democratizing_innovation.eric_von_hippel {Free For All, Peter Wayner @ SiSU}http://www.jus.uio.no/sisu/free_for_all.peter_wayner {The Cathedral and the Bazaar, Eric S. Raymond @ SiSU }http://www.jus.uio.no/sisu/the_cathedral_and_the_bazaar.eric_s_raymond {Little Brother, Cory Doctorow @ SiSU }http://www.jus.uio.no/sisu/little_brother.cory_doctorow diff --git a/data/v1/samples/free_for_all.peter_wayner.sst b/data/v1/samples/free_for_all.peter_wayner.sst index 9578ba0..7d70454 100644 --- a/data/v1/samples/free_for_all.peter_wayner.sst +++ b/data/v1/samples/free_for_all.peter_wayner.sst @@ -1,4 +1,4 @@ -% SiSU 0.38 +% SiSU 1.0 @title: Free For All @@ -41,6 +41,8 @@ {@ Amazon.com}http://www.amazon.com/gp/product/0066620503 {@ Barnes & Noble}http://search.barnesandnoble.com/booksearch/isbnInquiry.asp?isbn=0066620503 {Free as in Freedom (on Richard M. Stallman), Sam Williams @ SiSU}http://www.jus.uio.no/sisu/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams +{Democratizing Innovation, Eric von Hippel @ SiSU}http://www.jus.uio.no/sisu/democratizing_innovation.eric_von_hippel +{Viral Spiral, David Bollier@ SiSU}http://www.jus.uio.no/sisu/viral_spiral.david_bollier {Free Culture, Lawrence Lessig @ SiSU}http://www.jus.uio.no/sisu/free_culture.lawrence_lessig {CONTENT, Cory Doctorow @ SiSU }http://www.jus.uio.no/sisu/content.cory_doctorow {The Wealth of Networks, Yochai Benkler @ SiSU}http://www.jus.uio.no/sisu/the_wealth_of_networks.yochai_benkler diff --git a/data/v1/samples/gpl3.fsf.sst b/data/v1/samples/gpl3.fsf.sst index 92b026a..5a4a34c 100644 --- a/data/v1/samples/gpl3.fsf.sst +++ b/data/v1/samples/gpl3.fsf.sst @@ -20,7 +20,7 @@ {GPL3 @ FSF}http://gplv3.fsf.org/ {GPL @ SiSU}http://www.jus.uio.no/sisu/gpl3.fsf {GPL3 source text}http://www.gnu.org/licenses/gpl-3.0.txt -{ Free as In Freedom - Richard Stallman's Crusade for Free Software }http://www.jus.uio.no/sisu/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams +{Free as In Freedom - Richard Stallman's Crusade for Free Software}http://www.jus.uio.no/sisu/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams @skin: skin_gnu diff --git a/data/v1/samples/little_brother.cory_doctorow.sst b/data/v1/samples/little_brother.cory_doctorow.sst index 881aa0b..0513027 100644 --- a/data/v1/samples/little_brother.cory_doctorow.sst +++ b/data/v1/samples/little_brother.cory_doctorow.sst @@ -18,9 +18,12 @@ {@ Amazon.com}http://www.amazon.com/Little-Brother-Cory-Doctorow/dp/B002IT5OMA {@ Barnes & Noble}http://search.barnesandnoble.com/Little-Brother/Cory-Doctorow/e/9780765319852 {CONTENT, Cory Doctorow @ SiSU }http://www.jus.uio.no/sisu/content.cory_doctorow +{Free Culture, Lawrence Lessig @ SiSU}http://www.jus.uio.no/sisu/free_culture.lawrence_lessig {The Wealth of Networks, Yochai Benkler @ SiSU}http://www.jus.uio.no/sisu/the_wealth_of_networks.yochai_benkler +{Viral Spiral, David Bollier@ SiSU}http://www.jus.uio.no/sisu/viral_spiral.david_bollier {Two Bits, Christopher Kelty @ SiSU}http://www.jus.uio.no/sisu/two_bits.christopher_kelty {Free as in Freedom (on Richard M. Stallman), Sam Williams @ SiSU}http://www.jus.uio.no/sisu/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams +{Democratizing Innovation, Eric von Hippel @ SiSU}http://www.jus.uio.no/sisu/democratizing_innovation.eric_von_hippel {Free For All, Peter Wayner @ SiSU}http://www.jus.uio.no/sisu/free_for_all.peter_wayner {The Cathedral and the Bazaar, Eric S. Raymond @ SiSU }http://www.jus.uio.no/sisu/the_cathedral_and_the_bazaar.eric_s_raymond @@ -6527,24 +6530,3 @@ Creative Commons is not a party to this License, and makes no warranty whatsoeve Except for the limited purpose of indicating to the public that the Work is licensed under the CCPL, Creative Commons does not authorize the use by either party of the trademark "Creative Commons" or any related trademark or logo of Creative Commons without the prior written consent of Creative Commons. Any permitted use will be in compliance with Creative Commons' then-current trademark usage guidelines, as may be published on its website or otherwise made available upon request from time to time. For the avoidance of doubt, this trademark restriction does not form part of this License. Creative Commons may be contacted at http://creativecommons.org/. - -% -% Creative Commons License -% -%
-% -% Little Brother -% -% -% by -% -% -% Cory Doctorow -% -% -% is licensed under a -% -% -% Creative Commons Attribution-Noncommercial-Share Alike 3.0 United States License -% . -% diff --git a/data/v1/samples/the_cathedral_and_the_bazaar.eric_s_raymond.sst b/data/v1/samples/the_cathedral_and_the_bazaar.eric_s_raymond.sst index bba13e6..1fe50e0 100644 --- a/data/v1/samples/the_cathedral_and_the_bazaar.eric_s_raymond.sst +++ b/data/v1/samples/the_cathedral_and_the_bazaar.eric_s_raymond.sst @@ -1,4 +1,4 @@ -% SiSU 0.38 +% SiSU 1.0 @title: The Cathedral and the Bazaar @@ -23,6 +23,8 @@ @links: {The Cathedral and the Bazaar @ SiSU }http://www.jus.uio.no/sisu/the_cathedral_and_the_bazaar.eric_s_raymond {The Cathedral and the Bazaar, Source }http://www.catb.org/~esr/writings/cathedral-bazaar/cathedral-bazaar/ {@ Wikipedia}http://en.wikipedia.org/wiki/The_Cathedral_and_the_Bazaar +{Viral Spiral, David Bollier@ SiSU}http://www.jus.uio.no/sisu/viral_spiral.david_bollier +{Democratizing Innovation, Eric von Hippel @ SiSU}http://www.jus.uio.no/sisu/democratizing_innovation.eric_von_hippel {The Wealth of Networks, Yochai Benkler @ SiSU}http://www.jus.uio.no/sisu/the_wealth_of_networks.yochai_benkler {Free Culture, Lawrence Lessig @ SiSU}http://www.jus.uio.no/sisu/free_culture.lawrence_lessig {Free as in Freedom (on Richard Stallman), Sam Williams @ SiSU}http://www.jus.uio.no/sisu/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams @@ -38,8 +40,6 @@ :A~ @title @author -% :B~ Eric Steven Raymond - 1~ The Cathedral and the Bazaar Linux is subversive. Who would have thought even five years ago (1991) that a world-class operating system could coalesce as if by magic out of part-time hacking by several thousand developers scattered all over the planet, connected only by the tenuous strands of the Internet? diff --git a/data/v1/samples/the_wealth_of_networks.yochai_benkler.sst b/data/v1/samples/the_wealth_of_networks.yochai_benkler.sst index f7934c8..a7ae407 100644 --- a/data/v1/samples/the_wealth_of_networks.yochai_benkler.sst +++ b/data/v1/samples/the_wealth_of_networks.yochai_benkler.sst @@ -1,4 +1,4 @@ -% SiSU 0.69 +% SiSU 1.0 @title: The Wealth of Networks @@ -43,7 +43,9 @@ {The Wealth of Networks, Yochai Benkler @ SiSU}http://www.jus.uio.no/sisu/the_wealth_of_networks.yochai_benkler {tWoN book index @ SiSU}http://www.jus.uio.no/sisu/the_wealth_of_networks.book_index.yochai_benkler/doc.html {@ Wikipedia}http://en.wikipedia.org/wiki/The_Wealth_of_Networks +{Viral Spiral, David Bollier@ SiSU}http://www.jus.uio.no/sisu/viral_spiral.david_bollier {Two Bits, Christopher Kelty @ SiSU}http://www.jus.uio.no/sisu/two_bits.christopher_kelty +{Democratizing Innovation, Eric von Hippel @ SiSU}http://www.jus.uio.no/sisu/democratizing_innovation.eric_von_hippel {Free Culture, Lawrence Lessig @ SiSU}http://www.jus.uio.no/sisu/free_culture.lawrence_lessig {Free as in Freedom (on Richard M. Stallman), Sam Williams @ SiSU}http://www.jus.uio.no/sisu/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams {Free For All, Peter Wayner @ SiSU}http://www.jus.uio.no/sisu/free_for_all.peter_wayner diff --git a/data/v1/samples/two_bits.christopher_kelty.sst b/data/v1/samples/two_bits.christopher_kelty.sst index e183869..39e34b6 100644 --- a/data/v1/samples/two_bits.christopher_kelty.sst +++ b/data/v1/samples/two_bits.christopher_kelty.sst @@ -1,4 +1,4 @@ -% SiSU 0.69.0 +% SiSU 1.0 @title: Two Bits @@ -38,6 +38,8 @@ @links: {Two Bits, Christopher Kelty: home page}http://twobits.net/ {Two Bits, Christopher Kelty @ SiSU}http://www.jus.uio.no/sisu/two_bits.christopher_kelty +{Democratizing Innovation, Eric von Hippel @ SiSU}http://www.jus.uio.no/sisu/democratizing_innovation.eric_von_hippel +{Viral Spiral, David Bollier@ SiSU}http://www.jus.uio.no/sisu/viral_spiral.david_bollier {The Wealth of Networks, Yochai Benkler @ SiSU}http://www.jus.uio.no/sisu/the_wealth_of_networks.yochai_benkler {Free Culture, Lawrence Lessig @ SiSU}http://www.jus.uio.no/sisu/free_culture.lawrence_lessig {CONTENT, Cory Doctorow @ SiSU}http://www.jus.uio.no/sisu/content.cory_doctorow diff --git a/data/v1/samples/un_contracts_international_sale_of_goods_convention_1980.sst b/data/v1/samples/un_contracts_international_sale_of_goods_convention_1980.sst index a95ee7f..a73fe25 100644 --- a/data/v1/samples/un_contracts_international_sale_of_goods_convention_1980.sst +++ b/data/v1/samples/un_contracts_international_sale_of_goods_convention_1980.sst @@ -1,4 +1,4 @@ -% SiSU 0.38 +% SiSU 1.0 @title: United Nations Convention On Contracts For The International Sale Of Goods, 1980 (CISG) diff --git a/data/v2/samples/_sisu/skin/yml/list.yml b/data/v2/samples/_sisu/skin/yml/list.yml index 9b49f2e..388ef36 100644 --- a/data/v2/samples/_sisu/skin/yml/list.yml +++ b/data/v2/samples/_sisu/skin/yml/list.yml @@ -3,11 +3,11 @@ sisu: - sisu open_society: site: + - vs - twon - fc - content - di - - ffa - faif - twobits - ffa diff --git a/data/v2/samples/_sisu/skin/yml/promo.yml b/data/v2/samples/_sisu/skin/yml/promo.yml index fd79607..9b9e3da 100644 --- a/data/v2/samples/_sisu/skin/yml/promo.yml +++ b/data/v2/samples/_sisu/skin/yml/promo.yml @@ -1,4 +1,4 @@ -# Author: Ralph@Amissah.com +# Author: ralph@amissah.com site: sisu_icon: url: SiSU @@ -15,6 +15,15 @@ site: - title: SiSU Book Samples and Markup Examples url: SiSU/examples.html + - + title: Manual + url: http://sisudoc.org/sisu/sisu_manual/ + - + title: Markup + url: http://sisudoc.org/sisu/sisu_markup/ + - + title: Commands + url: http://sisudoc.org/sisu/sisu_commands/ - title: SiSU Download url: SiSU/download.html @@ -38,6 +47,15 @@ site: - title: Ruby Application Archive url: http://raa.ruby-lang.org/project/sisu/ + vs: + title: Viral Spiral + author: David Bollier + year: 2009 + url: viral_spiral.david_bollier + links: + - + title: Source Wiki + url: http://viralspiral.cc/ twon: title: The Wealth of Networks subtitle: How Social Production Transforms Markets and Freedom diff --git a/data/v2/samples/accelerando.charles_stross.sst b/data/v2/samples/accelerando.charles_stross.sst index c07ef20..1d10d75 100644 --- a/data/v2/samples/accelerando.charles_stross.sst +++ b/data/v2/samples/accelerando.charles_stross.sst @@ -17,8 +17,6 @@ :oclc: 57682282 :isbn: 9780441012848 -% @keywords: - @date: :published: 2005-07-05 :available: 2005-07-05 @@ -43,8 +41,6 @@ :A~ @title @author -% :B~ A novel by Charles Stross - 1~dedication Dedication For Feòrag, with love diff --git a/data/v2/samples/autonomy_markup0.sst b/data/v2/samples/autonomy_markup0.sst index fb3ba49..5803660 100644 --- a/data/v2/samples/autonomy_markup0.sst +++ b/data/v2/samples/autonomy_markup0.sst @@ -31,10 +31,6 @@ {Arbitration}http://www.jus.uio.no/lm/arbitration/toc.html {Electronic Commerce}http://www.jus.uio.no/lm/electronic.commerce/toc.html -% @skin: skin_sisu - -% @promo: sisu, ruby, sisu_search_libre, open_society - % (Draft 0.90 - 2000-08-27) :A~ @title @author~{* Ralph Amissah is a Fellow of Pace University, Institute for International Commercial Law. http://www.cisg.law.pace.edu/
RA lectured on the private law aspects of international trade whilst at the Law Faculty of the University of Tromsø, Norway. http://www.jus.uit.no/
RA built the first web site related to international trade law, now known as lexmercatoria.org and described as "an (international | transnational) commercial law and e-commerce infrastructure monitor". http://lexmercatoria.org/
RA is interested in the law, technology, commerce nexus. RA works with the law firm Amissahs.
/{[This is a draft document and subject to change.]}/
All errors are very much my own.
ralph@amissah.com }~ diff --git a/data/v2/samples/content.cory_doctorow.sst b/data/v2/samples/content.cory_doctorow.sst index 952db53..d19adb1 100644 --- a/data/v2/samples/content.cory_doctorow.sst +++ b/data/v2/samples/content.cory_doctorow.sst @@ -33,6 +33,7 @@ { Little Brother, Cory Doctorow @ SiSU }http://www.jus.uio.no/sisu/little_brother.cory_doctorow { Free Culture, Lawrence Lessig @ SiSU }http://www.jus.uio.no/sisu/free_culture.lawrence_lessig { The Wealth of Networks, Yochai Benkler @ SiSU }http://www.jus.uio.no/sisu/the_wealth_of_networks.yochai_benkler + { Viral Spiral, David Bollier@ SiSU }http://www.jus.uio.no/sisu/viral_spiral.david_bollier { Democratizing Innovation, Eric von Hippel @ SiSU }http://www.jus.uio.no/sisu/democratizing_innovation.eric_von_hippel { Two Bits, Christopher Kelty @ SiSU }http://www.jus.uio.no/sisu/two_bits.christopher_kelty { Free as in Freedom (on Richard M. Stallman), Sam Williams @ SiSU }http://www.jus.uio.no/sisu/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams diff --git a/data/v2/samples/democratizing_innovation.eric_von_hippel.sst b/data/v2/samples/democratizing_innovation.eric_von_hippel.sst index 6af237e..886455f 100644 --- a/data/v2/samples/democratizing_innovation.eric_von_hippel.sst +++ b/data/v2/samples/democratizing_innovation.eric_von_hippel.sst @@ -36,6 +36,7 @@ {Eric von Hippel}http://web.mit.edu/evhippel/www/ {Democratizing Innovation, Eric von Hippel @ SiSU}http://www.jus.uio.no/sisu/democratizing_innovation.eric_von_hippel {@ Wikipedia}http://en.wikipedia.org/wiki/Democratizing_Innovation + {Viral Spiral, David Bollier@ SiSU}http://www.jus.uio.no/sisu/viral_spiral.david_bollier {Two Bits, Christopher Kelty @ SiSU}http://www.jus.uio.no/sisu/two_bits.christopher_kelty {Free Culture, Lawrence Lessig @ SiSU}http://www.jus.uio.no/sisu/free_culture.lawrence_lessig {CONTENT, Cory Doctorow @ SiSU}http://www.jus.uio.no/sisu/content.cory_doctorow diff --git a/data/v2/samples/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams.sst b/data/v2/samples/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams.sst index 025d980..a205aa7 100644 --- a/data/v2/samples/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams.sst +++ b/data/v2/samples/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams.sst @@ -16,20 +16,17 @@ @date: :published: 2002 -@language: US - @notes: March 2002 -% @catalogue: isbn=0596002874 - @links: { Home and Source }http://faifzilla.org/ {Free as in Freedom (on Richard Stallman), Sam Williams @ SiSU}http://www.jus.uio.no/sisu/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams {@ Wikipedia}http://en.wikipedia.org/wiki/Free_as_in_Freedom:_Richard_Stallman%27s_Crusade_for_Free_Software {@ Amazon.com}http://www.amazon.com/gp/product/0596002874 {@ Barnes & Noble}http://search.barnesandnoble.com/booksearch/isbnInquiry.asp?isbn=0596002874 - {The Wealth of Networks, Yochai Benkler @ SiSU}http://www.jus.uio.no/sisu/the_wealth_of_networks.yochai_benkler + {Viral Spiral, David Bollier@ SiSU}http://www.jus.uio.no/sisu/viral_spiral.david_bollier {Democratizing Innovation, Eric von Hippel @ SiSU}http://www.jus.uio.no/sisu/democratizing_innovation.eric_von_hippel + {The Wealth of Networks, Yochai Benkler @ SiSU}http://www.jus.uio.no/sisu/the_wealth_of_networks.yochai_benkler {Two Bits, Christopher Kelty @ SiSU}http://www.jus.uio.no/sisu/two_bits.christopher_kelty {Free For All, Peter Wayner @ SiSU}http://www.jus.uio.no/sisu/free_for_all.peter_wayner {The Cathedral and the Bazaar, Eric S. Raymond @ SiSU }http://www.jus.uio.no/sisu/the_cathedral_and_the_bazaar.eric_s_raymond @@ -41,8 +38,6 @@ :skin: skin_rms :breaks: new=:A,:B,:C,1 -% @promo: sisu_icon, sample_search, fsf, open_society, sisu - :A~ @title @author 1~preface Preface diff --git a/data/v2/samples/free_culture.lawrence_lessig.sst b/data/v2/samples/free_culture.lawrence_lessig.sst index cb5fe2c..f38b6a0 100644 --- a/data/v2/samples/free_culture.lawrence_lessig.sst +++ b/data/v2/samples/free_culture.lawrence_lessig.sst @@ -23,14 +23,6 @@ :isbn: 9781594200069 :oclc: 53324884 -% @date.created: 2004-04-08 - -% @catalogue: isbn=1594200068 - -@language: US - -@vocabulary: none - @make: :breaks: new=:B; break=1 :skin: skin_lessig @@ -43,6 +35,7 @@ {@ Amazon.com}http://www.amazon.com/gp/product/1594200068 {@ Barnes & Noble}http://search.barnesandnoble.com/booksearch/isbnInquiry.asp?isbn=1594200068 {The Wealth of Networks, Yochai Benkler @ SiSU}http://www.jus.uio.no/sisu/the_wealth_of_networks.yochai_benkler + {Viral Spiral, David Bollier@ SiSU}http://www.jus.uio.no/sisu/viral_spiral.david_bollier {Democratizing Innovation, Eric von Hippel @ SiSU}http://www.jus.uio.no/sisu/democratizing_innovation.eric_von_hippel {Two Bits, Christopher Kelty @ SiSU}http://www.jus.uio.no/sisu/two_bits.christopher_kelty {Free as in Freedom (on Richard M. Stallman), Sam Williams @ SiSU}http://www.jus.uio.no/sisu/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams diff --git a/data/v2/samples/free_for_all.peter_wayner.sst b/data/v2/samples/free_for_all.peter_wayner.sst index 7a16827..5eb6ba0 100644 --- a/data/v2/samples/free_for_all.peter_wayner.sst +++ b/data/v2/samples/free_for_all.peter_wayner.sst @@ -31,10 +31,6 @@ :skin: skin_wayner :image: center -@vocabulary: none - -% @catalogue isbn=0066620503 - @links: {The Original Authoritative and Updated Version of the Text available in pdf}http://www.wayner.org/books/ffa {Free For All @ SiSU}http://www.jus.uio.no/sisu/free_for_all.peter_wayner @@ -42,6 +38,7 @@ {@ Barnes & Noble}http://search.barnesandnoble.com/booksearch/isbnInquiry.asp?isbn=0066620503 {Free as in Freedom (on Richard M. Stallman), Sam Williams @ SiSU}http://www.jus.uio.no/sisu/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams {The Wealth of Networks, Yochai Benkler @ SiSU}http://www.jus.uio.no/sisu/the_wealth_of_networks.yochai_benkler + {Viral Spiral, David Bollier@ SiSU}http://www.jus.uio.no/sisu/viral_spiral.david_bollier {Democratizing Innovation, Eric von Hippel @ SiSU}http://www.jus.uio.no/sisu/democratizing_innovation.eric_von_hippel {Two Bits, Christopher Kelty @ SiSU}http://www.jus.uio.no/sisu/two_bits.christopher_kelty {The Cathedral and the Bazaar, Eric S. Raymond @ SiSU }http://www.jus.uio.no/sisu/the_cathedral_and_the_bazaar.eric_s_raymond diff --git a/data/v2/samples/gpl2.fsf.sst b/data/v2/samples/gpl2.fsf.sst index 0842642..9949f41 100644 --- a/data/v2/samples/gpl2.fsf.sst +++ b/data/v2/samples/gpl2.fsf.sst @@ -19,7 +19,8 @@ {GPL @ SiSU}http://www.jus.uio.no/sisu/gpl2.fsf {Markup}http://www.jus.uio.no/sisu/sample/markup/gpl2.fsf.sst {Syntax}http://www.jus.uio.no/sisu/sample/syntax/gpl2.fsf.sst.html - { Free as In Freedom - Richard Stallman's Crusade for Free Software }http://www.jus.uio.no/sisu/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams + {Free as In Freedom - Richard Stallman's Crusade for Free Software}http://www.jus.uio.no/sisu/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams + {Viral Spiral, David Bollier@ SiSU}http://www.jus.uio.no/sisu/viral_spiral.david_bollier @make: :skin: skin_gnu diff --git a/data/v2/samples/gpl3.fsf.sst b/data/v2/samples/gpl3.fsf.sst index 547125d..fd69535 100644 --- a/data/v2/samples/gpl3.fsf.sst +++ b/data/v2/samples/gpl3.fsf.sst @@ -24,13 +24,11 @@ {GPL @ SiSU}http://www.jus.uio.no/sisu/gpl3.fsf { Syntax }http://www.jus.uio.no/sisu/sample/syntax/gpl.fsf.sst.html {GPL3 source text}http://www.gnu.org/licenses/gpl-3.0.txt - { Free as In Freedom - Richard Stallman's Crusade for Free Software }http://www.jus.uio.no/sisu/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams + {Free as In Freedom - Richard Stallman's Crusade for Free Software}http://www.jus.uio.no/sisu/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams @make: :skin: skin_gnu -% @promo: sisu_icon, fsf, open_society, sisu - :A~ GNU GENERAL PUBLIC LICENSE :B~ Version 3, 29 June 2007 diff --git a/data/v2/samples/little_brother.cory_doctorow.sst b/data/v2/samples/little_brother.cory_doctorow.sst index 16d6607..b4b6464 100644 --- a/data/v2/samples/little_brother.cory_doctorow.sst +++ b/data/v2/samples/little_brother.cory_doctorow.sst @@ -25,16 +25,19 @@ :breaks: break=1 :skin: skin_little_brother -@links: { Little Brother home }http://craphound.com/littlebrother +@links: + { Little Brother home }http://craphound.com/littlebrother {Little Brother, Cory Doctorow @ SiSU }http://www.jus.uio.no/sisu/little_brother.cory_doctorow - { @ Wikipedia }http://en.wikipedia.org/wiki/Little_Brother_(Cory_Doctorow_novel) + {@ Wikipedia }http://en.wikipedia.org/wiki/Little_Brother_(Cory_Doctorow_novel) {@ Amazon.com}http://www.amazon.com/Little-Brother-Cory-Doctorow/dp/B002IT5OMA {@ Barnes & Noble}http://search.barnesandnoble.com/Little-Brother/Cory-Doctorow/e/9780765319852 {CONTENT, Cory Doctorow @ SiSU }http://www.jus.uio.no/sisu/content.cory_doctorow {Free Culture, Lawrence Lessig @ SiSU}http://www.jus.uio.no/sisu/free_culture.lawrence_lessig {The Wealth of Networks, Yochai Benkler @ SiSU}http://www.jus.uio.no/sisu/the_wealth_of_networks.yochai_benkler + {Viral Spiral, David Bollier@ SiSU}http://www.jus.uio.no/sisu/viral_spiral.david_bollier {Two Bits, Christopher Kelty @ SiSU}http://www.jus.uio.no/sisu/two_bits.christopher_kelty {Free as in Freedom (on Richard M. Stallman), Sam Williams @ SiSU}http://www.jus.uio.no/sisu/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams + {Democratizing Innovation, Eric von Hippel @ SiSU}http://www.jus.uio.no/sisu/democratizing_innovation.eric_von_hippel {Free For All, Peter Wayner @ SiSU}http://www.jus.uio.no/sisu/free_for_all.peter_wayner {The Cathedral and the Bazaar, Eric S. Raymond @ SiSU }http://www.jus.uio.no/sisu/the_cathedral_and_the_bazaar.eric_s_raymond @@ -6541,24 +6544,3 @@ Creative Commons is not a party to this License, and makes no warranty whatsoeve Except for the limited purpose of indicating to the public that the Work is licensed under the CCPL, Creative Commons does not authorize the use by either party of the trademark "Creative Commons" or any related trademark or logo of Creative Commons without the prior written consent of Creative Commons. Any permitted use will be in compliance with Creative Commons' then-current trademark usage guidelines, as may be published on its website or otherwise made available upon request from time to time. For the avoidance of doubt, this trademark restriction does not form part of this License. Creative Commons may be contacted at http://creativecommons.org/. - -% -% Creative Commons License -% -%
-% -% Little Brother -% -% -% by -% -% -% Cory Doctorow -% -% -% is licensed under a -% -% -% Creative Commons Attribution-Noncommercial-Share Alike 3.0 United States License -% . -% diff --git a/data/v2/samples/the_cathedral_and_the_bazaar.eric_s_raymond.sst b/data/v2/samples/the_cathedral_and_the_bazaar.eric_s_raymond.sst index 40c59b4..207ed53 100644 --- a/data/v2/samples/the_cathedral_and_the_bazaar.eric_s_raymond.sst +++ b/data/v2/samples/the_cathedral_and_the_bazaar.eric_s_raymond.sst @@ -25,6 +25,7 @@ {The Cathedral and the Bazaar, Source }http://www.catb.org/~esr/writings/cathedral-bazaar/cathedral-bazaar/ {@ Wikipedia}http://en.wikipedia.org/wiki/The_Cathedral_and_the_Bazaar {Democratizing Innovation, Eric von Hippel @ SiSU}http://www.jus.uio.no/sisu/democratizing_innovation.eric_von_hippel + {Viral Spiral, David Bollier@ SiSU}http://www.jus.uio.no/sisu/viral_spiral.david_bollier {Two Bits, Christopher Kelty @ SiSU}http://www.jus.uio.no/sisu/two_bits.christopher_kelty {Free as in Freedom (on Richard Stallman), Sam Williams @ SiSU}http://www.jus.uio.no/sisu/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams {The Wealth of Networks, Yochai Benkler @ SiSU}http://www.jus.uio.no/sisu/the_wealth_of_networks.yochai_benkler @@ -44,8 +45,6 @@ :A~ @title @author -% :B~ Eric Steven Raymond - 1~ The Cathedral and the Bazaar Linux is subversive. Who would have thought even five years ago (1991) that a world-class operating system could coalesce as if by magic out of part-time hacking by several thousand developers scattered all over the planet, connected only by the tenuous strands of the Internet? diff --git a/data/v2/samples/the_wealth_of_networks.yochai_benkler.sst b/data/v2/samples/the_wealth_of_networks.yochai_benkler.sst index 200fb92..db276f9 100644 --- a/data/v2/samples/the_wealth_of_networks.yochai_benkler.sst +++ b/data/v2/samples/the_wealth_of_networks.yochai_benkler.sst @@ -42,6 +42,7 @@ {The Wealth of Networks, Yochai Benkler @ SiSU}http://www.jus.uio.no/sisu/the_wealth_of_networks.yochai_benkler {@ Wikipedia}http://en.wikipedia.org/wiki/The_Wealth_of_Networks {Democratizing Innovation, Eric von Hippel @ SiSU}http://www.jus.uio.no/sisu/democratizing_innovation.eric_von_hippel + {Viral Spiral, David Bollier@ SiSU}http://www.jus.uio.no/sisu/viral_spiral.david_bollier {Two Bits, Christopher Kelty @ SiSU}http://www.jus.uio.no/sisu/two_bits.christopher_kelty {Free Culture, Lawrence Lessig @ SiSU}http://www.jus.uio.no/sisu/free_culture.lawrence_lessig {CONTENT, Cory Doctorow @ SiSU}http://www.jus.uio.no/sisu/content.cory_doctorow diff --git a/data/v2/samples/two_bits.christopher_kelty.sst b/data/v2/samples/two_bits.christopher_kelty.sst index cf63805..1a022e8 100644 --- a/data/v2/samples/two_bits.christopher_kelty.sst +++ b/data/v2/samples/two_bits.christopher_kelty.sst @@ -25,6 +25,7 @@ @links: {Two Bits, Christopher Kelty: home page}http://twobits.net/ {Two Bits, Christopher Kelty @ SiSU}http://www.jus.uio.no/sisu/two_bits.christopher_kelty + {Viral Spiral, David Bollier@ SiSU}http://www.jus.uio.no/sisu/viral_spiral.david_bollier {Democratizing Innovation, Eric von Hippel @ SiSU}http://www.jus.uio.no/sisu/democratizing_innovation.eric_von_hippel {The Wealth of Networks, Yochai Benkler @ SiSU}http://www.jus.uio.no/sisu/the_wealth_of_networks.yochai_benkler {Free Culture, Lawrence Lessig @ SiSU}http://www.jus.uio.no/sisu/free_culture.lawrence_lessig -- cgit v1.2.3 From 5eac7b5545016533093a7c87b1218dd3db55eb30 Mon Sep 17 00:00:00 2001 From: Ralph Amissah Date: Sun, 22 Aug 2010 17:49:30 -0400 Subject: markup samples, headers small fixes --- data/v1/samples/free_culture.lawrence_lessig.sst | 2 +- data/v1/samples/little_brother.cory_doctorow.sst | 2 +- data/v2/samples/accelerando.charles_stross.sst | 3 ++- data/v2/samples/democratizing_innovation.eric_von_hippel.sst | 7 ++++--- ...ichard_stallman_crusade_for_free_software.sam_williams.sst | 1 + data/v2/samples/free_culture.lawrence_lessig.sst | 9 ++++++++- data/v2/samples/free_for_all.peter_wayner.sst | 5 +++-- data/v2/samples/gpl2.fsf.sst | 3 ++- data/v2/samples/gpl3.fsf.sst | 3 ++- data/v2/samples/little_brother.cory_doctorow.sst | 2 +- .../samples/the_cathedral_and_the_bazaar.eric_s_raymond.sst | 3 ++- data/v2/samples/the_wealth_of_networks.yochai_benkler.sst | 11 +++++------ data/v2/samples/two_bits.christopher_kelty.sst | 9 +++++---- 13 files changed, 37 insertions(+), 23 deletions(-) diff --git a/data/v1/samples/free_culture.lawrence_lessig.sst b/data/v1/samples/free_culture.lawrence_lessig.sst index 0851d53..81c5e6f 100644 --- a/data/v1/samples/free_culture.lawrence_lessig.sst +++ b/data/v1/samples/free_culture.lawrence_lessig.sst @@ -1,4 +1,4 @@ -% SiSU 0.69 +% SiSU 1.0 @title: Free Culture diff --git a/data/v1/samples/little_brother.cory_doctorow.sst b/data/v1/samples/little_brother.cory_doctorow.sst index 0513027..36ec4ab 100644 --- a/data/v1/samples/little_brother.cory_doctorow.sst +++ b/data/v1/samples/little_brother.cory_doctorow.sst @@ -4,7 +4,7 @@ @creator: Doctorow, Cory |email doctorow@craphound.com -@rights: Copyright (C) Cory Doctorow, 2008. This book is distributed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 license. That means:
You are free:
* to Share - to copy, distribute and transmit the work
* to Remix - to adapt the work
Under the following conditions:
* Attribution. You must attribute the work in the manner specified by the author or licensor (but not in any way that suggests that they endorse you or your use of the work).
* Noncommercial. You may not use this work for commercial purposes.
* Share Alike. If you alter, transform, or build upon this work, you may distribute the resulting work only under the same or similar license to this one.
* For any reuse or distribution, you must make clear to others the license terms of this work. The best way to do this is with a link http://craphound.com/littlebrother
* Any of the above conditions can be waived if you get my permission
More info here: http://creativecommons.org/licenses/by-nc-sa/3.0/
See the end of this file for the complete legalese. [Creative Commons Attribution-Noncommercial-Share Alike 3.0 United States License] +@rights: Copyright (C) Cory Doctorow, 2008. This book is distributed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 license. That means:
You are free:
* to Share - to copy, distribute and transmit the work
* to Remix - to adapt the work
Under the following conditions:
* Attribution. You must attribute the work in the manner specified by the author or licensor (but not in any way that suggests that they endorse you or your use of the work).
* Noncommercial. You may not use this work for commercial purposes.
* Share Alike. If you alter, transform, or build upon this work, you may distribute the resulting work only under the same or similar license to this one.
* For any reuse or distribution, you must make clear to others the license terms of this work. The best way to do this is with a link http://craphound.com/littlebrother
* Any of the above conditions can be waived if you get my permission
More info here: http://creativecommons.org/licenses/by-nc-sa/3.0/us/
See the end of this file for the complete legalese. [Creative Commons Attribution-Noncommercial-Share Alike 3.0 United States License] @topic_register: SiSU:markup sample:book;book:novel:fiction diff --git a/data/v2/samples/accelerando.charles_stross.sst b/data/v2/samples/accelerando.charles_stross.sst index 1d10d75..c750493 100644 --- a/data/v2/samples/accelerando.charles_stross.sst +++ b/data/v2/samples/accelerando.charles_stross.sst @@ -2,7 +2,8 @@ @title: Accelerando -@creator: Stross, Charles +@creator: + :author: Stross, Charles @rights: :copyright: Copyright (C) Charles Stross, 2005. diff --git a/data/v2/samples/democratizing_innovation.eric_von_hippel.sst b/data/v2/samples/democratizing_innovation.eric_von_hippel.sst index 886455f..ee567f0 100644 --- a/data/v2/samples/democratizing_innovation.eric_von_hippel.sst +++ b/data/v2/samples/democratizing_innovation.eric_von_hippel.sst @@ -1,13 +1,14 @@ % SiSU 2.0 @title: Democratizing Innovation - :languae: US + :language: US -@creator: von Hipel, Eric +@creator: + :author: von Hipel, Eric @classify: :type: Book - :topic_register: SiSU:markup sample:book;technological innovations:economic aspects;diffusion of innovations;democracy + :topic_register: SiSU:markup sample:book;innovation;technological innovations:economic aspects;diffusion of innovations;democracy;open source software:innovation :isbn: 9780262720472 :oclc: 56880369 diff --git a/data/v2/samples/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams.sst b/data/v2/samples/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams.sst index a205aa7..563dd23 100644 --- a/data/v2/samples/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams.sst +++ b/data/v2/samples/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams.sst @@ -12,6 +12,7 @@ @classify: :topic_register: SiSU:markup sample:book;copyright;GNU/Linux:GPL|copyleft|free software;free software;Software:Software Libré;GPL;Linux:GNU|Software Libré;book:biography;programming :oclc: 49044520 + :isbn: 9780596002879 @date: :published: 2002 diff --git a/data/v2/samples/free_culture.lawrence_lessig.sst b/data/v2/samples/free_culture.lawrence_lessig.sst index f38b6a0..147483c 100644 --- a/data/v2/samples/free_culture.lawrence_lessig.sst +++ b/data/v2/samples/free_culture.lawrence_lessig.sst @@ -3,7 +3,8 @@ @title: Free Culture :subtitle: How Big Media Uses Technology and the Law to Lock Down Culture and Control Creativity -@creator: Lessig, Lawrence +@creator: + :author: Lessig, Lawrence @date: :published: 2004-03-25 @@ -13,6 +14,8 @@ :modified: 2004-03-25 :valid: 2004-03-25 +% :created: 2004-04-08 + @rights: :copyright: Copyright (C) Lawrence Lessig, 2004. :license: Free Culture is Licensed under a Creative Commons License. This License permits non-commercial use of this work, so long as attribution is given. For more information about the license, visit http://creativecommons.org/licenses/by-nc/1.0/ @@ -23,6 +26,10 @@ :isbn: 9781594200069 :oclc: 53324884 +% :isbn: 1594200068 + +% :language: US + @make: :breaks: new=:B; break=1 :skin: skin_lessig diff --git a/data/v2/samples/free_for_all.peter_wayner.sst b/data/v2/samples/free_for_all.peter_wayner.sst index 5eb6ba0..605648c 100644 --- a/data/v2/samples/free_for_all.peter_wayner.sst +++ b/data/v2/samples/free_for_all.peter_wayner.sst @@ -3,7 +3,8 @@ @title: Free For All :subtitle: How Linux and the Free Software Movement Undercut the High Tech Titans -@creator: Wayner, Peter +@creator: + :author: Wayner, Peter @classify: :type: Book @@ -23,7 +24,7 @@ :modified: 2002-12-22 :valid: 2002-12-22 -@language: US +% :language: US @make: :num_top: 1 diff --git a/data/v2/samples/gpl2.fsf.sst b/data/v2/samples/gpl2.fsf.sst index 9949f41..1d06226 100644 --- a/data/v2/samples/gpl2.fsf.sst +++ b/data/v2/samples/gpl2.fsf.sst @@ -2,7 +2,8 @@ @title: GNU GENERAL PUBLIC LICENSE v2 -@creator: Free Software Foundation +@creator: + :author: Free Software Foundation @rights: :copyright: Copyright 1989, 1991 Free Software Foundation, Inc. 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. diff --git a/data/v2/samples/gpl3.fsf.sst b/data/v2/samples/gpl3.fsf.sst index fd69535..2767991 100644 --- a/data/v2/samples/gpl3.fsf.sst +++ b/data/v2/samples/gpl3.fsf.sst @@ -2,7 +2,8 @@ @title: GNU GENERAL PUBLIC LICENSE v3 -@creator: Free Software Foundation +@creator: + :author: Free Software Foundation @rights: :copyright: Copyright (C) 2007 Free Software Foundation, Inc. http://fsf.org/ diff --git a/data/v2/samples/little_brother.cory_doctorow.sst b/data/v2/samples/little_brother.cory_doctorow.sst index b4b6464..9b96d38 100644 --- a/data/v2/samples/little_brother.cory_doctorow.sst +++ b/data/v2/samples/little_brother.cory_doctorow.sst @@ -11,7 +11,7 @@ @rights: :copyright: Copyright (C) Cory Doctorow, 2008. :illustrations: Richard Wilkinson, 2009 - :license: This book is distributed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 license. That means:
You are free:
* to Share - to copy, distribute and transmit the work
* to Remix - to adapt the work
Under the following conditions:
* Attribution. You must attribute the work in the manner specified by the author or licensor (but not in any way that suggests that they endorse you or your use of the work).
* Noncommercial. You may not use this work for commercial purposes.
* Share Alike. If you alter, transform, or build upon this work, you may distribute the resulting work only under the same or similar license to this one.
* For any reuse or distribution, you must make clear to others the license terms of this work. The best way to do this is with a link http://craphound.com/littlebrother
* Any of the above conditions can be waived if you get my permission
More info here: http://creativecommons.org/licenses/by-nc-sa/3.0/
See the end of this file for the complete legalese. [Creative Commons Attribution-Noncommercial-Share Alike 3.0 United States License] + :license: This book is distributed under a Creative Commons Attribution-NonCommercial-ShareAlike 3.0 license. That means:
You are free:
* to Share - to copy, distribute and transmit the work
* to Remix - to adapt the work
Under the following conditions:
* Attribution. You must attribute the work in the manner specified by the author or licensor (but not in any way that suggests that they endorse you or your use of the work).
* Noncommercial. You may not use this work for commercial purposes.
* Share Alike. If you alter, transform, or build upon this work, you may distribute the resulting work only under the same or similar license to this one.
* For any reuse or distribution, you must make clear to others the license terms of this work. The best way to do this is with a link http://craphound.com/littlebrother
* Any of the above conditions can be waived if you get my permission
More info here: http://creativecommons.org/licenses/by-nc-sa/3.0/us/
See the end of this file for the complete legalese. [Creative Commons Attribution-Noncommercial-Share Alike 3.0 United States License] @classify: :subject: Novel diff --git a/data/v2/samples/the_cathedral_and_the_bazaar.eric_s_raymond.sst b/data/v2/samples/the_cathedral_and_the_bazaar.eric_s_raymond.sst index 207ed53..ab7d53e 100644 --- a/data/v2/samples/the_cathedral_and_the_bazaar.eric_s_raymond.sst +++ b/data/v2/samples/the_cathedral_and_the_bazaar.eric_s_raymond.sst @@ -2,7 +2,8 @@ @title: The Cathedral and the Bazaar -@creator: Raymond, Eric S. +@creator: + :author: Raymond, Eric S. @classify: :type: Book diff --git a/data/v2/samples/the_wealth_of_networks.yochai_benkler.sst b/data/v2/samples/the_wealth_of_networks.yochai_benkler.sst index db276f9..d45d6be 100644 --- a/data/v2/samples/the_wealth_of_networks.yochai_benkler.sst +++ b/data/v2/samples/the_wealth_of_networks.yochai_benkler.sst @@ -4,7 +4,8 @@ :subtitle: How Social Production Transforms Markets and Freedom :language: US -@creator: Benkler, Yochai +@creator: + :author: Benkler, Yochai @classify: :type: Book @@ -12,6 +13,8 @@ :isbn: 9780300110562 :oclc: 61881089 +% :isbn: 0300110561 + @rights: :copyright: Copyright (C) 2006 Yochai Benkler. :license: All rights reserved. Subject to the exception immediately following, this book may not be reproduced, in whole or in part, including illustrations, in any form (beyond that copying permitted by Sections 107 and 108 of the U.S. Copyright Law and except by reviewers for the public press), without written permission from the publishers. http://creativecommons.org/licenses/by-nc-sa/2.5/ The author has made an online version of the book available under a Creative Commons Noncommercial Sharealike license; it can be accessed through the author's website at http://www.benkler.org. @@ -26,11 +29,7 @@ :modified: 2006-04-03 :valid: 2006-04-03 -% @date.created: 2006-01-27 - -% @catalogue: isbn=0300110561 - -@vocabulary: none +% :created: 2006-01-27 @make: :skin: skin_won_benkler diff --git a/data/v2/samples/two_bits.christopher_kelty.sst b/data/v2/samples/two_bits.christopher_kelty.sst index 1a022e8..85efb46 100644 --- a/data/v2/samples/two_bits.christopher_kelty.sst +++ b/data/v2/samples/two_bits.christopher_kelty.sst @@ -3,19 +3,20 @@ @title: Two Bits :subtitle: The Cultural Significance of Free Software -@creator: Kelty, Christopher M. +@creator: + :author: Kelty, Christopher M. @rights: :copyright: © 2008 Duke University Press
Printed in the United States of America on acid-free paper ∞
Designed by C. H. Westmoreland
Typeset in Charis (an Open Source font) by Achorn International
Library of Congress Cataloging-in-Publication data and republication acknowledgments appear on the last printed pages of this book. :license: Licensed under the Creative Commons Attribution-NonCommercial-Share Alike License, available at http://creativecommons.org/licenses/by-nc-sa/3.0/ or by mail from Creative Commons, 559 Nathan Abbott Way, Stanford, Calif. 94305, U.S.A. "NonCommercial" as defined in this license specifically excludes any sale of this work or any portion thereof for money, even if sale does not result in a profit by the seller or if the sale is by a 501(c)(3) nonprofit or NGO.
Duke University Press gratefully acknowledges the support of HASTAC (Humanities, Arts, Science, and Technology Advanced Collaboratory), which provided funds to help support the electronic interface of this book.
Two Bits is accessible on the Web at twobits.net. @classify: - :topic_register: open source software:social aspects;software:development:geeks;anthropology:geeks;book:subject:anthropology|information society|geeks;society;programming + :topic_register: open source software:social aspects;software:development:geeks;anthropology:geeks;book:subject:anthropology|information society|geeks;society;programming;society:information society; :oclc: 183914703 -@date: 2008 +% :isbn: 978082234264-9 -% @catalogue: isbn=978-0-8223-4264-9 +@date: 2008 @make: :skin: skin_2bits -- cgit v1.2.3 From 51875ebb26b6198de4f4450e71600340c0f264a6 Mon Sep 17 00:00:00 2001 From: Ralph Amissah Date: Sun, 22 Aug 2010 17:50:43 -0400 Subject: markup sample, add missing skin for v1 markup of CONTENT, Cory Doctorow --- data/v1/samples/_sisu/skin/doc/skin_content.rb | 95 ++++++++++++++++++++++++++ 1 file changed, 95 insertions(+) create mode 100644 data/v1/samples/_sisu/skin/doc/skin_content.rb diff --git a/data/v1/samples/_sisu/skin/doc/skin_content.rb b/data/v1/samples/_sisu/skin/doc/skin_content.rb new file mode 100644 index 0000000..9ca3c0a --- /dev/null +++ b/data/v1/samples/_sisu/skin/doc/skin_content.rb @@ -0,0 +1,95 @@ +# coding: utf-8 +=begin + * Name: SiSU - Simple information Structuring Universe - Structured information, Serialized Units + * Author: Ralph Amissah + * http://www.jus.uio.no/sisu + * http://www.jus.uio.no/sisu/SiSU/download + * Description: Skin prepared for Little Brother, Cory Doctorow + * License: Same as SiSU see http://www.jus.uio.no/sisu + * Notes: Site default appearance variables set in defaults.rb + Generic site wide modifications set here scribe_skin.rb, and this file required by other "scribes" instead of defaults.rb +=end +module SiSU_Viz + require SiSU_lib + '/defaults' + class Skin + #% path + def path_root # the only parameter that cannot be changed here + './sisu/' + end + def path_rel + '../' + end + #% url + def url_home + 'http://craphound.com/content' + end + def url_author + 'http://craphound.com' + end + def url_txt # text to go with url usually stripped url + 'craphound.com/content' + end + #% color + def color_band1 + '"#ffffff"' + end + #% text + def text_hp + 'craphound.com/content' + end + def text_home + 'CONTENT' + end + #% icon + def icon_home_button + 'content_doctorow.png' + end + def icon_home_banner + icon_home_button + end + #% banner + def banner_home_button + %{
#{png_home}
\n} + end + def banner_home_and_index_buttons + %{
#{png_home}
 This text sub- 
 Table of Contents 
#{table_close}
 #{table_close}} + end + def banner_band + %{
+

CONTENT

+

Cory Doctorow

+ #{table_close}} + end + #% credits + def credits_splash + %{
+The author's original pdf is available at
craphound.com/content
+available at
Amazon.com and
+Barnes & Noble
+This book is Copyright Cory Doctorow © 2008
+Under a Creative Commons License,
+Attribution-NonCommercial-NoDerivs 3.0:
+* Attribution — You must attribute the work in the manner specified by the author or licensor (but not in any way that suggests that they endorse you or your use of the work);
+* Noncommercial. You may not use this work for commercial purposes;
+* Share Alike - If you alter, transform, or build upon this work, you may distribute the resulting work only under the same or similar license to this one. +
+<http://creativecommons.org/licenses/by-nc-sa/3.0/>
+
} + end + end + class TeX + def header_center + "\\chead{\\href{#{@vz.url_home}}{craphound.com/content}}" + end + def home_url + "\\href{#{@vz.url_home}}{craphound.com/content}" + end + def home + "\\href{#{@vz.url_home}}{CONTENT}" + end + def owner_chapter + "Document owner details" + end + end +end +__END__ -- cgit v1.2.3 From 29eac1a443731d33d8420849942e93cf7662937e Mon Sep 17 00:00:00 2001 From: Ralph Amissah Date: Sun, 22 Aug 2010 17:55:37 -0400 Subject: markup sample "CONTENT", use preferred curly braces with emphasized text (v1 & v2) --- data/v1/samples/content.cory_doctorow.sst | 112 +++++++++++++++--------------- 1 file changed, 56 insertions(+), 56 deletions(-) diff --git a/data/v1/samples/content.cory_doctorow.sst b/data/v1/samples/content.cory_doctorow.sst index 41e5d83..6ce53f0 100644 --- a/data/v1/samples/content.cory_doctorow.sst +++ b/data/v1/samples/content.cory_doctorow.sst @@ -41,11 +41,11 @@ I've been releasing my books online for free since my first novel, Down and Out in the Magic Kingdom, came out in 2003, and with every one of those books, I've included a little essay explaining why I do this sort of thing. ~# -I was tempted to write another one of these essays for this collection, but then it hit me: *this is a collection of essays that are largely concerned with exactly this subject*. ~# +I was tempted to write another one of these essays for this collection, but then it hit me: *{this is a collection of essays that are largely concerned with exactly this subject}*. ~# You see, I don't just write essays about copyright to serve as forewards to my books: I write them for magazine,s, newspapers, and websites -- I write speeches on the subject for audiences of every description and in every nation. And finally, here, I've collected my favorites, the closest I've ever come to a Comprehensive Doctorow Manifesto. ~# -So I'm going to skip the foreword this time around: the *whole book* is my explanation for why I'm giving it away for free online. ~# +So I'm going to skip the foreword this time around: the *{whole book}* is my explanation for why I'm giving it away for free online. ~# If you like this book and you want to thank me, here's what I'd ask you to do, in order of preference: ~# @@ -378,7 +378,7 @@ Here's a true story about a user I know who was stopped by DRM. She's smart, col Before I go farther, I want us all to stop a moment and marvel at this. Here is someone who is practically technophobic, but who was able to construct a mental model of sufficient accuracy that she figured out that she could connect her cables in the right order and dub her digital disc off to analog tape. I imagine that everyone in this room is the front-line tech support for someone in her or his family: wouldn't it be great if all our non-geek friends and relatives were this clever and imaginative? -I also want to point out that this is the proverbial honest user. She's not making a copy for the next door neighbors. She's not making a copy and selling it on a blanket on Canal Street. She's not ripping it to her hard-drive, DivX encoding it and putting it in her Kazaa sharepoint. She's doing something *honest* -- moving it from one format to another. She's home taping. +I also want to point out that this is the proverbial honest user. She's not making a copy for the next door neighbors. She's not making a copy and selling it on a blanket on Canal Street. She's not ripping it to her hard-drive, DivX encoding it and putting it in her Kazaa sharepoint. She's doing something *{honest}* -- moving it from one format to another. She's home taping. Except she fails. There's a DRM system called Macrovision embedded -- by law -- in every VHS that messes with the vertical blanking interval in the signal and causes any tape made in this fashion to fail. Macrovision can be defeated for about $10 with a gadget readily available on eBay. But our infringer doesn't know that. She's "honest." Technically unsophisticated. Not stupid, mind you -- just naive. @@ -394,7 +394,7 @@ Matter of fact, the next guy wasn't. Dmitry Sklyarov is a Russian programmer who Anticircumvention is a powerful tool for people who want to exclude competitors. If you claim that your car engine firmware is a "copyrighted work," you can sue anyone who makes a tool for interfacing with it. That's not just bad news for mechanics -- think of the hotrodders who want to chip their cars to tweak the performance settings. We have companies like Lexmark claiming that their printer cartridges contain copyrighted works -- software that trips an "I am empty" flag when the toner runs out, and have sued a competitor who made a remanufactured cartridge that reset the flag. Even garage-door opener companies have gotten in on the act, claiming that their receivers' firmware are copyrighted works. Copyrighted cars, print carts and garage-door openers: what's next, copyrighted light-fixtures? -Even in the context of legitimate -- excuse me, "traditional" -- copyrighted works like movies on DVDs, anticircumvention is bad news. Copyright is a delicate balance. It gives creators and their assignees some rights, but it also reserves some rights to the public. For example, an author has no right to prohibit anyone from transcoding his books into assistive formats for the blind. More importantly, though, a creator has a very limited say over what you can do once you lawfully acquire her works. If I buy your book, your painting, or your DVD, it belongs to me. It's my property. Not my "intellectual property" -- a whacky kind of pseudo-property that's swiss-cheesed with exceptions, easements and limitations -- but real, no-fooling, actual tangible *property* -- the kind of thing that courts have been managing through property law for centuries. +Even in the context of legitimate -- excuse me, "traditional" -- copyrighted works like movies on DVDs, anticircumvention is bad news. Copyright is a delicate balance. It gives creators and their assignees some rights, but it also reserves some rights to the public. For example, an author has no right to prohibit anyone from transcoding his books into assistive formats for the blind. More importantly, though, a creator has a very limited say over what you can do once you lawfully acquire her works. If I buy your book, your painting, or your DVD, it belongs to me. It's my property. Not my "intellectual property" -- a whacky kind of pseudo-property that's swiss-cheesed with exceptions, easements and limitations -- but real, no-fooling, actual tangible *{property}* -- the kind of thing that courts have been managing through property law for centuries. But anticirumvention lets rightsholders invent new and exciting copyrights for themselves -- to write private laws without accountability or deliberation -- that expropriate your interest in your physical property to their favor. Region-coded DVDs are an example of this: there's no copyright here or in anywhere I know of that says that an author should be able to control where you enjoy her creative works, once you've paid for them. I can buy a book and throw it in my bag and take it anywhere from Toronto to Timbuktu, and read it wherever I am: I can even buy books in America and bring them to the UK, where the author may have an exclusive distribution deal with a local publisher who sells them for double the US shelf-price. When I'm done with it, I can sell it on or give it away in the UK. Copyright lawyers call this "First Sale," but it may be simpler to think of it as "Capitalism." @@ -418,13 +418,13 @@ We've never had this principle: in fact, we've always had just the reverse. Thin The courts affirm this again and again. It used to be illegal to plug anything that didn't come from AT&T into your phone-jack. They claimed that this was for the safety of the network, but really it was about propping up this little penny-ante racket that AT&T had in charging you a rental fee for your phone until you'd paid for it a thousand times over. -When that ban was struck down, it created the market for third-party phone equipment, from talking novelty phones to answering machines to cordless handsets to headsets -- billions of dollars of economic activity that had been suppressed by the closed interface. Note that AT&T was one of the big beneficiaries of this: they *also* got into the business of making phone-kit. +When that ban was struck down, it created the market for third-party phone equipment, from talking novelty phones to answering machines to cordless handsets to headsets -- billions of dollars of economic activity that had been suppressed by the closed interface. Note that AT&T was one of the big beneficiaries of this: they *{also}* got into the business of making phone-kit. DRM is the software equivalent of these closed hardware interfaces. Robert Scoble is a Softie who has an excellent blog, where he wrote an essay about the best way to protect your investment in the digital music you buy. Should you buy Apple iTunes music, or Microsoft DRM music? Scoble argued that Microsoft's music was a sounder investment, because Microsoft would have more downstream licensees for its proprietary format and therefore you'd have a richer ecosystem of devices to choose from when you were shopping for gizmos to play your virtual records on. What a weird idea: that we should evaluate our record-purchases on the basis of which recording company will allow the greatest diversity of record-players to play its discs! That's like telling someone to buy the Betamax instead of the Edison Kinetoscope because Thomas Edison is a crank about licensing his patents; all the while ignoring the world's relentless march to the more open VHS format. -It's a bad business. DVD is a format where the guy who makes the records gets to design the record players. Ask yourself: how much innovation has there been over the past decade of DVD players? They've gotten cheaper and smaller, but where are the weird and amazing new markets for DVD that were opened up by the VCR? There's a company that's manufacturing the world's first HDD-based DVD jukebox, a thing that holds 100 movies, and they're charging *$27,000* for this thing. We're talking about a few thousand dollars' worth of components -- all that other cost is the cost of anticompetition. +It's a bad business. DVD is a format where the guy who makes the records gets to design the record players. Ask yourself: how much innovation has there been over the past decade of DVD players? They've gotten cheaper and smaller, but where are the weird and amazing new markets for DVD that were opened up by the VCR? There's a company that's manufacturing the world's first HDD-based DVD jukebox, a thing that holds 100 movies, and they're charging *{$27,000}* for this thing. We're talking about a few thousand dollars' worth of components -- all that other cost is the cost of anticompetition. -- @@ -482,7 +482,7 @@ There's one thing that every new art business-model had in common: it embraced t This is the overweening characteristic of every single successful new medium: it is true to itself. The Luther Bible didn't succeed on the axes that made a hand-copied monk Bible valuable: they were ugly, they weren't in Church Latin, they weren't read aloud by someone who could interpret it for his lay audience, they didn't represent years of devoted-with-a-capital-D labor by someone who had given his life over to God. The thing that made the Luther Bible a success was its scalability: it was more popular because it was more proliferate: all success factors for a new medium pale beside its profligacy. The most successful organisms on earth are those that reproduce the most: bugs and bacteria, nematodes and virii. Reproduction is the best of all survival strategies. -Piano rolls didn't sound as good as the music of a skilled pianist: but they *scaled better*. Radio lacked the social elements of live performance, but more people could build a crystal set and get it aimed correctly than could pack into even the largest Vaudeville house. MP3s don't come with liner notes, they aren't sold to you by a hipper-than-thou record store clerk who can help you make your choice, bad rips and truncated files abound: I once downloaded a twelve-second copy of "Hey Jude" from the original Napster. Yet MP3 is outcompeting the CD. I don't know what to do with CDs anymore: I get them, and they're like the especially nice garment bag they give you at the fancy suit shop: it's nice and you feel like a goof for throwing it out, but Christ, how many of these things can you usefully own? I can put ten thousand songs on my laptop, but a comparable pile of discs, with liner notes and so forth -- that's a liability: it's a piece of my monthly storage-locker costs. +Piano rolls didn't sound as good as the music of a skilled pianist: but they *{scaled better}*. Radio lacked the social elements of live performance, but more people could build a crystal set and get it aimed correctly than could pack into even the largest Vaudeville house. MP3s don't come with liner notes, they aren't sold to you by a hipper-than-thou record store clerk who can help you make your choice, bad rips and truncated files abound: I once downloaded a twelve-second copy of "Hey Jude" from the original Napster. Yet MP3 is outcompeting the CD. I don't know what to do with CDs anymore: I get them, and they're like the especially nice garment bag they give you at the fancy suit shop: it's nice and you feel like a goof for throwing it out, but Christ, how many of these things can you usefully own? I can put ten thousand songs on my laptop, but a comparable pile of discs, with liner notes and so forth -- that's a liability: it's a piece of my monthly storage-locker costs. Here are the two most important things to know about computers and the Internet: @@ -508,9 +508,9 @@ Today, at this very second, people read words off of screens for every hour that Paper books are the packaging that books come in. Cheap printer-binderies like the Internet Bookmobile that can produce a full bleed, four color, glossy cover, printed spine, perfect-bound book in ten minutes for a dollar are the future of paper books: when you need an instance of a paper book, you generate one, or part of one, and pitch it out when you're done. I landed at SEA-TAC on Monday and burned a couple CDs from my music collection to listen to in the rental car. When I drop the car off, I'll leave them behind. Who needs 'em? -Whenever a new technology has disrupted copyright, we've changed copyright. Copyright isn't an ethical proposition, it's a utilitarian one. There's nothing *moral* about paying a composer tuppence for the piano-roll rights, there's nothing *immoral* about not paying Hollywood for the right to videotape a movie off your TV. They're just the best way of balancing out so that people's physical property rights in their VCRs and phonographs are respected and so that creators get enough of a dangling carrot to go on making shows and music and books and paintings. +Whenever a new technology has disrupted copyright, we've changed copyright. Copyright isn't an ethical proposition, it's a utilitarian one. There's nothing *{moral}* about paying a composer tuppence for the piano-roll rights, there's nothing *{immoral}* about not paying Hollywood for the right to videotape a movie off your TV. They're just the best way of balancing out so that people's physical property rights in their VCRs and phonographs are respected and so that creators get enough of a dangling carrot to go on making shows and music and books and paintings. -Technology that disrupts copyright does so because it simplifies and cheapens creation, reproduction and distribution. The existing copyright businesses exploit inefficiencies in the old production, reproduction and distribution system, and they'll be weakened by the new technology. But new technology always gives us more art with a wider reach: that's what tech is *for*. +Technology that disrupts copyright does so because it simplifies and cheapens creation, reproduction and distribution. The existing copyright businesses exploit inefficiencies in the old production, reproduction and distribution system, and they'll be weakened by the new technology. But new technology always gives us more art with a wider reach: that's what tech is *{for}*. Tech gives us bigger pies that more artists can get a bite out of. That's been tacitly acknowledged at every stage of the copyfight since the piano roll. When copyright and technology collide, it's copyright that changes. @@ -540,7 +540,7 @@ If I had been a less good customer for Apple's hardware, I would have been fine. As it was Apple rewarded my trust, evangelism and out-of-control spending by treating me like a crook and locking me out of my own music, at a time when my Powerbook was in the shop -- i.e., at a time when I was hardly disposed to feel charitable to Apple. -I'm an edge case here, but I'm a *leading edge* case. If Apple succeeds in its business plans, it will only be a matter of time until even average customers have upgraded enough hardware and bought enough music to end up where I am. +I'm an edge case here, but I'm a *{leading edge}* case. If Apple succeeds in its business plans, it will only be a matter of time until even average customers have upgraded enough hardware and bought enough music to end up where I am. You know what I would totally buy? A record player that let me play everybody's records. Right now, the closest I can come to that is an open source app called VLC, but it's clunky and buggy and it didn't come pre-installed on my computer. @@ -562,7 +562,7 @@ When Mako Analysis issued their report last month advising phone companies to st The phone companies' customers want Symbian phones and for now, at least, the phone companies understand that if they don't sell them, someone else will. -The market opportunity for a truly capable devices is enormous. There's a company out there charging *$27,000* for a DVD jukebox -- go and eat their lunch! Steve Jobs isn't going to do it: he's off at the D conference telling studio execs not to release hi-def movies until they're sure no one will make a hi-def DVD burner that works with a PC. +The market opportunity for a truly capable devices is enormous. There's a company out there charging *{$27,000}* for a DVD jukebox -- go and eat their lunch! Steve Jobs isn't going to do it: he's off at the D conference telling studio execs not to release hi-def movies until they're sure no one will make a hi-def DVD burner that works with a PC. Maybe they won't buy into his BS, but they're also not much interested in what you have to sell. At the Broadcast Protection Discussion Group meetings where the Broadcast Flag was hammered out, the studios' position was, "We'll take anyone's DRM except Microsoft's and Philips'." When I met with UK broadcast wonks about the European version of the Broadcast Flag underway at the Digital Video Broadcasters' forum, they told me, "Well, it's different in Europe: mostly they're worried that some American company like Microsoft will get their claws into European television." @@ -592,19 +592,19 @@ The carrot is the entertainment industries' promise of access to their copyright The stick is the entertainment industries' threat of lawsuits for companies that don't comply. In the last century, entertainment companies fought over the creation of records, radios, jukeboxes, cable TV, VCRs, MP3 players and other technologies that made it possible to experience a copyrighted work in a new way without permission. There's one battle that serves as the archetype for the rest: the fight over the VCR. -The film studios were outraged by Sony's creation of the VCR. They had found a DRM supplier they preferred, a company called Discovision that made non-recordable optical discs. Discovision was the only company authorized to play back movies in your living room. The only way to get a copyrighted work onto a VCR cassette was to record it off the TV, without permission. The studios argued that Sony -- whose Betamax was the canary in this legal coalmine -- was breaking the law by unjustly endangering their revenue from Discovision royalties. Sure, they *could* just sell pre-recorded Betamax tapes, but Betamax was a read-write medium: they could be *copied*. Moreover, your personal library of Betamax recordings of the Sunday night movie would eat into the market for Discovision discs: why would anyone buy a pre-recorded video cassette when they could amass all the video they needed with a home recorder and a set of rabbit-ears? +The film studios were outraged by Sony's creation of the VCR. They had found a DRM supplier they preferred, a company called Discovision that made non-recordable optical discs. Discovision was the only company authorized to play back movies in your living room. The only way to get a copyrighted work onto a VCR cassette was to record it off the TV, without permission. The studios argued that Sony -- whose Betamax was the canary in this legal coalmine -- was breaking the law by unjustly endangering their revenue from Discovision royalties. Sure, they *{could}* just sell pre-recorded Betamax tapes, but Betamax was a read-write medium: they could be *{copied}*. Moreover, your personal library of Betamax recordings of the Sunday night movie would eat into the market for Discovision discs: why would anyone buy a pre-recorded video cassette when they could amass all the video they needed with a home recorder and a set of rabbit-ears? -The Supreme Court threw out these arguments in a 1984 5-4 decision, the "Betamax Decision." This decision held that the VCR was legal because it was "capable of sustaining a substantially non-infringing use." That means that if you make a technology that your customers *can* use legally, you're not on the hook for the illegal stuff they do. +The Supreme Court threw out these arguments in a 1984 5-4 decision, the "Betamax Decision." This decision held that the VCR was legal because it was "capable of sustaining a substantially non-infringing use." That means that if you make a technology that your customers *{can}* use legally, you're not on the hook for the illegal stuff they do. This principle guided the creation of virtually every piece of IT invented since: the Web, search engines, YouTube, Blogger, Skype, ICQ, AOL, MySpace... You name it, if it's possible to violate copyright with it, the thing that made it possible is the Betamax principle. -Unfortunately, the Supremes shot the Betamax principle in the gut two years ago, with the Grokster decision. This decision says that a company can be found liable for its customers' bad acts if they can be shown to have "induced" copyright infringement. So, if your company advertises your product for an infringing use, or if it can be shown that you had infringement in mind at the design stage, you can be found liable for your customers' copying. The studios and record labels and broadcasters *love* this ruling, and they like to think that it's even broader than what the courts set out. For example, Viacom is suing Google for inducing copyright infringement by allowing YouTube users to flag some of their videos as private. Private videos can't be found by Viacom's copyright-enforcement bots, so Viacom says that privacy should be illegal, and that companies that give you the option of privacy should be sued for anything you do behind closed doors. +Unfortunately, the Supremes shot the Betamax principle in the gut two years ago, with the Grokster decision. This decision says that a company can be found liable for its customers' bad acts if they can be shown to have "induced" copyright infringement. So, if your company advertises your product for an infringing use, or if it can be shown that you had infringement in mind at the design stage, you can be found liable for your customers' copying. The studios and record labels and broadcasters *{love}* this ruling, and they like to think that it's even broader than what the courts set out. For example, Viacom is suing Google for inducing copyright infringement by allowing YouTube users to flag some of their videos as private. Private videos can't be found by Viacom's copyright-enforcement bots, so Viacom says that privacy should be illegal, and that companies that give you the option of privacy should be sued for anything you do behind closed doors. The gutshot Betamax doctrine will bleed out all over the industry for decades (or until the courts or Congress restore it to health), providing a grisly reminder of what happens to companies that try to pour the entertainment companies' old wine into new digital bottles without permission. The tape-recorder was legal, but the digital tape-recorder is an inducement to infringement, and must be stopped. The promise of access to content and the threat of legal execution for non-compliance is enough to lure technology's biggest players to the DRM table. -I started attending DRM meetings in March, 2002, on behalf of my former employers, the Electronic Frontier Foundation. My first meeting was the one where Broadcast Flag was born. The Broadcast Flag was weird even by DRM standards. Broadcasters are required, by law, to deliver TV and radio without DRM, so that any standards-compliant receiver can receive them. The airwaves belong to the public, and are loaned to broadcasters who have to promise to serve the public interest in exchange. But the MPAA and the broadcasters wanted to add DRM to digital TV, and so they proposed that a law should be passed that would make all manufacturers promise to *pretend* that there was DRM on broadcast signals, receiving them and immediately squirreling them away in encrypted form. +I started attending DRM meetings in March, 2002, on behalf of my former employers, the Electronic Frontier Foundation. My first meeting was the one where Broadcast Flag was born. The Broadcast Flag was weird even by DRM standards. Broadcasters are required, by law, to deliver TV and radio without DRM, so that any standards-compliant receiver can receive them. The airwaves belong to the public, and are loaned to broadcasters who have to promise to serve the public interest in exchange. But the MPAA and the broadcasters wanted to add DRM to digital TV, and so they proposed that a law should be passed that would make all manufacturers promise to *{pretend}* that there was DRM on broadcast signals, receiving them and immediately squirreling them away in encrypted form. The Broadcast Flag was hammered out in a group called the Broadcast Protection Discussion Group (BPDG) a sub-group from the MPAA's "Content Protection Technology Working Group," which also included reps from all the big IT companies (Microsoft, Apple, Intel, and so on), consumer electronics companies (Panasonic, Philips, Zenith), cable companies, satellite companies, and anyone else who wanted to pay $100 to attend the "public" meetings, held every six weeks or so (you can attend these meetings yourself if you find yourself near LAX on one of the upcoming dates). @@ -612,7 +612,7 @@ CPTWG (pronounced Cee-Pee-Twig) is a venerable presence in the DRM world. It was The first part of the meeting is usually taken up with administrative business and presentations from DRM vendors, who come out to promise that this time they've really, really figured out how to make computers worse at copying. The real meat comes after the lunch, when the group splits into a series of smaller meetings, many of them closed-door and private (the representatives of the organizations responsible for managing DRM on DVDs splinter off at this point). -Then comes the working group meetings, like the BPDG. The BPDG was nominally set up to set up the rules for the Broadcast Flag. Under the Flag, manufacturers would be required to limit their "outputs and recording methods" to a set of "approved technologies." Naturally, every manufacturer in the room showed up with a technology to add to the list of approved technologies -- and the sneakier ones showed up with reasons why their competitors' technologies *shouldn't* be approved. If the Broadcast Flag became law, a spot on the "approved technologies" list would be a license to print money: everyone who built a next-gen digital TV would be required, by law, to buy only approved technologies for their gear. +Then comes the working group meetings, like the BPDG. The BPDG was nominally set up to set up the rules for the Broadcast Flag. Under the Flag, manufacturers would be required to limit their "outputs and recording methods" to a set of "approved technologies." Naturally, every manufacturer in the room showed up with a technology to add to the list of approved technologies -- and the sneakier ones showed up with reasons why their competitors' technologies *{shouldn't}* be approved. If the Broadcast Flag became law, a spot on the "approved technologies" list would be a license to print money: everyone who built a next-gen digital TV would be required, by law, to buy only approved technologies for their gear. The CPTWG determined that there would be three "chairmen" of the meetings: a representative from the broadcasters, a representative from the studios, and a representative from the IT industry (note that no "consumer rights" chair was contemplated -- we proposed one and got laughed off the agenda). The IT chair was filled by an Intel representative, who seemed pleased that the MPAA chair, Fox Studios's Andy Setos, began the process by proposing that the approved technologies should include only two technologies, both of which Intel partially owned. @@ -628,9 +628,9 @@ One of these rules of conduct was "robustness." As a condition of approval, manu Another was "renewability:" the ability of the studios to revoke outputs that had been compromised in the field. The studios expected the manufacturers to make products with remote "kill switches" that could be used to shut down part or all of their device if someone, somewhere had figured out how to do something naughty with it. They promised that we'd establish criteria for renewability later, and that it would all be "fair." -But we soldiered on. The MPAA had a gift for resolving the worst snarls: when shouting failed, they'd lead any recalcitrant player out of the room and negotiate in secret with them, leaving the rest of us to cool our heels. Once, they took the Microsoft team out of the room for *six hours*, then came back and announced that digital video would be allowed to output on non-DRM monitors at a greatly reduced resolution (this "feature" appears in Vista as "fuzzing"). +But we soldiered on. The MPAA had a gift for resolving the worst snarls: when shouting failed, they'd lead any recalcitrant player out of the room and negotiate in secret with them, leaving the rest of us to cool our heels. Once, they took the Microsoft team out of the room for *{six hours}*, then came back and announced that digital video would be allowed to output on non-DRM monitors at a greatly reduced resolution (this "feature" appears in Vista as "fuzzing"). -The further we went, the more nervous everyone became. We were headed for the real meat of the negotiations: the *criteria* by which approved technology would be evaluated: how many bits of crypto would you need? Which ciphers would be permissible? Which features would and wouldn't be allowed? +The further we went, the more nervous everyone became. We were headed for the real meat of the negotiations: the *{criteria}* by which approved technology would be evaluated: how many bits of crypto would you need? Which ciphers would be permissible? Which features would and wouldn't be allowed? Then the MPAA dropped the other shoe: the sole criteria for inclusion on the list would be the approval of one of its member-companies, or a quorum of broadcasters. In other words, the Broadcast Flag wouldn't be an "objective standard," describing the technical means by which video would be locked away -- it would be purely subjective, up to the whim of the studios. You could have the best product in the world, and they wouldn't approve it if your business-development guys hadn't bought enough drinks for their business-development guys at a CES party. @@ -734,7 +734,7 @@ As disastrous as Pluto Nash was, it was practically painless when compared to th !_ The disastrous thing about Napster wasn't that it it existed, but rather that the record industry managed to kill it. -Napster had an industry-friendly business-model: raise venture capital, start charging for access to the service, and then pay billions of dollars to the record companies in exchange for licenses to their works. Yes, they kicked this plan off without getting permission from the record companies, but that's not so unusual. The record companies followed the same business plan a hundred years ago, when they started recording sheet music without permission, raising capital and garnering profits, and *then* working out a deal to pay the composers for the works they'd built their fortunes on. +Napster had an industry-friendly business-model: raise venture capital, start charging for access to the service, and then pay billions of dollars to the record companies in exchange for licenses to their works. Yes, they kicked this plan off without getting permission from the record companies, but that's not so unusual. The record companies followed the same business plan a hundred years ago, when they started recording sheet music without permission, raising capital and garnering profits, and *{then}* working out a deal to pay the composers for the works they'd built their fortunes on. Napster's plan was plausible. They had the fastest-adopted technology in the history of the world, garnering 52,000,000 users in 18 months -- more than had voted for either candidate in the preceding US election! -- and discovering, via surveys, that a sizable portion would happily pay between $10 and $15 a month for the service. What's more, Napster's architecture included a gatekeeper that could be used to lock-out non-paying users. @@ -760,7 +760,7 @@ The sue-em-all-and-let-God-sort-em-out plan was a flop in the box office, a flop # -YouTube, 2007, bears some passing similarity to Napster, 2001. Founded by a couple guys in a garage, rocketed to popular success, heavily capitalized by a deep-pocketed giant. Its business model? Turn popularity into dollars and offer a share to the rightsholders whose works they're using. This is an historically sound plan: cable operators got rich by retransmitting broadcasts without permission, and once they were commercial successes, they sat down to negotiate to pay for those copyrights (just as the record companies negotiated with composers *after* they'd gotten rich selling records bearing those compositions). +YouTube, 2007, bears some passing similarity to Napster, 2001. Founded by a couple guys in a garage, rocketed to popular success, heavily capitalized by a deep-pocketed giant. Its business model? Turn popularity into dollars and offer a share to the rightsholders whose works they're using. This is an historically sound plan: cable operators got rich by retransmitting broadcasts without permission, and once they were commercial successes, they sat down to negotiate to pay for those copyrights (just as the record companies negotiated with composers *{after}* they'd gotten rich selling records bearing those compositions). YouTube 07 has another similarity to Napster 01: it is being sued by entertainment companies. @@ -780,7 +780,7 @@ But if the entertainment industry squeezes these players out, ThePirateBay and m The thing is, the public doesn't want managed services with limited rights. We don't want to be stuck using approved devices in approved ways. We never have -- we are the spiritual descendants of the customers for "illegal" record albums and "illegal" cable TV. The demand signal won't go away. -There's no good excuse for going into production on a sequel to The Napster Wars. We saw that movie. We know how it turns out. Every Christmas, we get articles about how this was the worst Christmas ever for CDs. You know what? CD sales are *never* going to improve. CDs have been rendered obsolete by Internet distribution -- and the record industry has locked itself out of the only profitable, popular music distribution systems yet invented. +There's no good excuse for going into production on a sequel to The Napster Wars. We saw that movie. We know how it turns out. Every Christmas, we get articles about how this was the worst Christmas ever for CDs. You know what? CD sales are *{never}* going to improve. CDs have been rendered obsolete by Internet distribution -- and the record industry has locked itself out of the only profitable, popular music distribution systems yet invented. Companies like Google/YouTube and TiVo are rarities: tech companies that want to do deals. They need to be cherished by entertainment companies, not sued. @@ -959,7 +959,7 @@ But, thanks to the web, artists have more options than ever. The Internet's most And it's not just the indies who benefit: the existence of successful independent artists creates fantastic leverage for artists who negotiate with the majors. More and more, the big media companies' "like it or leave it" bargaining stance is being undermined by the possibility that the next big star will shrug, turn on her heel, and make her fortune without the big companies' help. This has humbled the bigs, making their deals better and more artist-friendly. -Bargaining leverage is just for starters. The greatest threat that art faces is suppression. Historically, artists have struggled just to make themselves heard, just to safeguard the right to express themselves. Censorship is history's greatest enemy of art. A limited-liability Web is a Web where anyone can post anything and reach *everyone*. +Bargaining leverage is just for starters. The greatest threat that art faces is suppression. Historically, artists have struggled just to make themselves heard, just to safeguard the right to express themselves. Censorship is history's greatest enemy of art. A limited-liability Web is a Web where anyone can post anything and reach *{everyone}*. What's more, this privilege isn't limited to artists. All manner of communication, from the personal introspection in public "diaries" to social chatter on MySpace and Facebook, are now possible. Some artists have taken the bizarre stance that this "trivial" matter is unimportant and thus a poor excuse for allowing hosted services to exist in the first place. This is pretty arrogant: a society where only artists are allowed to impart "important" messages and where the rest of us are supposed to shut up about our loves, hopes, aspirations, jokes, family and wants is hardly a democratic paradise. @@ -1376,15 +1376,15 @@ No, if I had to come up with another title for this talk, I'd call it: "Ebooks: I haven't come to a perfect understanding. I don't know what the future of the book looks like. But I have ideas, and I'll share them with you: -1. Ebooks aren't marketing. [Ebooks aren't marketing] OK, so ebooks *are* marketing: that is to say that giving away ebooks sells more books. Baen Books, who do a lot of series publishing, have found that giving away electronic editions of the previous installments in their series to coincide with the release of a new volume sells the hell out of the new book -- and the backlist. And the number of people who wrote to me to tell me about how much they dug the ebook and so bought the paper-book far exceeds the number of people who wrote to me and said, "Ha, ha, you hippie, I read your book for free and now I'm not gonna buy it." But ebooks *shouldn't* be just about marketing: ebooks are a goal unto themselves. In the final analysis, more people will read more words off more screens and fewer words off fewer pages and when those two lines cross, ebooks are gonna have to be the way that writers earn their keep, not the way that they promote the dead-tree editions. +1. Ebooks aren't marketing. [Ebooks aren't marketing] OK, so ebooks *{are}* marketing: that is to say that giving away ebooks sells more books. Baen Books, who do a lot of series publishing, have found that giving away electronic editions of the previous installments in their series to coincide with the release of a new volume sells the hell out of the new book -- and the backlist. And the number of people who wrote to me to tell me about how much they dug the ebook and so bought the paper-book far exceeds the number of people who wrote to me and said, "Ha, ha, you hippie, I read your book for free and now I'm not gonna buy it." But ebooks *{shouldn't}* be just about marketing: ebooks are a goal unto themselves. In the final analysis, more people will read more words off more screens and fewer words off fewer pages and when those two lines cross, ebooks are gonna have to be the way that writers earn their keep, not the way that they promote the dead-tree editions. 2. Ebooks complement paper books. [Ebooks complement paper books]. Having an ebook is good. Having a paper book is good. Having both is even better. One reader wrote to me and said that he read half my first novel from the bound book, and printed the other half on scrap-paper to read at the beach. Students write to me to say that it's easier to do their term papers if they can copy and paste their quotations into their word-processors. Baen readers use the electronic editions of their favorite series to build concordances of characters, places and events. 3. Unless you own the ebook, you don't 0wn the book [Unless you own the ebook, you don't 0wn the book]. I take the view that the book is a "practice" -- a collection of social and economic and artistic activities -- and not an "object." Viewing the book as a "practice" instead of an object is a pretty radical notion, and it begs the question: just what the hell is a book? Good question. I write all of my books in a text-editor [TEXT EDITOR SCREENGRAB] (BBEdit, from Barebones Software -- as fine a text-editor as I could hope for). From there, I can convert them into a formatted two-column PDF [TWO-UP SCREENGRAB]. I can turn them into an HTML file [BROWSER SCREENGRAB]. I can turn them over to my publisher, who can turn them into galleys, advanced review copies, hardcovers and paperbacks. I can turn them over to my readers, who can convert them to a bewildering array of formats [DOWNLOAD PAGE SCREENGRAB]. Brewster Kahle's Internet Bookmobile can convert a digital book into a four-color, full-bleed, perfect-bound, laminated-cover, printed-spine paper book in ten minutes, for about a dollar. Try converting a paper book to a PDF or an html file or a text file or a RocketBook or a printout for a buck in ten minutes! It's ironic, because one of the frequently cited reasons for preferring paper to ebooks is that paper books confer a sense of ownership of a physical object. Before the dust settles on this ebook thing, owning a paper book is going to feel less like ownership than having an open digital edition of the text. -4. Ebooks are a better deal for writers. [Ebooks are a better deal for writers] The compensation for writers is pretty thin on the ground. *{Amazing Stories}*, Hugo Gernsback's original science fiction magazine, paid a couple cents a word. Today, science fiction magazines pay...a couple cents a word. The sums involved are so minuscule, they're not even insulting: they're *quaint* and *historical*, like the WHISKEY 5 CENTS sign over the bar at a pioneer village. Some writers do make it big, but they're *rounding errors* as compared to the total population of sf writers earning some of their living at the trade. Almost all of us could be making more money elsewhere (though we may dream of earning a stephenkingload of money, and of course, no one would play the lotto if there were no winners). The primary incentive for writing has to be artistic satisfaction, egoboo, and a desire for posterity. Ebooks get you that. Ebooks become a part of the corpus of human knowledge because they get indexed by search engines and replicated by the hundreds, thousands or millions. They can be googled. +4. Ebooks are a better deal for writers. [Ebooks are a better deal for writers] The compensation for writers is pretty thin on the ground. *{Amazing Stories}*, Hugo Gernsback's original science fiction magazine, paid a couple cents a word. Today, science fiction magazines pay...a couple cents a word. The sums involved are so minuscule, they're not even insulting: they're *{quaint}* and *{historical}*, like the WHISKEY 5 CENTS sign over the bar at a pioneer village. Some writers do make it big, but they're *{rounding errors}* as compared to the total population of sf writers earning some of their living at the trade. Almost all of us could be making more money elsewhere (though we may dream of earning a stephenkingload of money, and of course, no one would play the lotto if there were no winners). The primary incentive for writing has to be artistic satisfaction, egoboo, and a desire for posterity. Ebooks get you that. Ebooks become a part of the corpus of human knowledge because they get indexed by search engines and replicated by the hundreds, thousands or millions. They can be googled. -Even better: they level the playing field between writers and trolls. When Amazon kicked off, many writers got their knickers in a tight and powerful knot at the idea that axe-grinding yahoos were filling the Amazon message-boards with ill-considered slams at their work -- for, if a personal recommendation is the best way to sell a book, then certainly a personal condemnation is the best way to *not* sell a book. Today, the trolls are still with us, but now, the readers get to decide for themselves. Here's a bit of a review of Down and Out in the Magic Kingdom that was recently posted to Amazon by "A reader from Redwood City, CA": +Even better: they level the playing field between writers and trolls. When Amazon kicked off, many writers got their knickers in a tight and powerful knot at the idea that axe-grinding yahoos were filling the Amazon message-boards with ill-considered slams at their work -- for, if a personal recommendation is the best way to sell a book, then certainly a personal condemnation is the best way to *{not}* sell a book. Today, the trolls are still with us, but now, the readers get to decide for themselves. Here's a bit of a review of Down and Out in the Magic Kingdom that was recently posted to Amazon by "A reader from Redwood City, CA": group{ @@ -1412,11 +1412,11 @@ group{ }group -You see that? Hell, this guy is *working for me*! [ADDITIONAL PULL QUOTES] Someone accuses a writer I'm thinking of reading of paying off Entertainment Weekly to say nice things about his novel, "a surprisingly bad writer," no less, whose writing is "stiff, amateurish, and uninspired!" I wanna check that writer out. And I can. In one click. And then I can make up my own mind. +You see that? Hell, this guy is *{working for me}*! [ADDITIONAL PULL QUOTES] Someone accuses a writer I'm thinking of reading of paying off Entertainment Weekly to say nice things about his novel, "a surprisingly bad writer," no less, whose writing is "stiff, amateurish, and uninspired!" I wanna check that writer out. And I can. In one click. And then I can make up my own mind. You don't get far in the arts without healthy doses of both ego and insecurity, and the downside of being able to google up all the things that people are saying about your book is that it can play right into your insecurities -- "all these people will have it in their minds not to bother with my book because they've read the negative interweb reviews!" But the flipside of that is the ego: "If only they'd give it a shot, they'd see how good it is." And the more scathing the review is, the more likely they are to give it a shot. Any press is good press, so long as they spell your URL right (and even if they spell your name wrong!). -5. Ebooks need to embrace their nature. [Ebooks need to embrace their nature.] The distinctive value of ebooks is orthogonal to the value of paper books, and it revolves around the mix-ability and send-ability of electronic text. The more you constrain an ebook's distinctive value propositions -- that is, the more you restrict a reader's ability to copy, transport or transform an ebook -- the more it has to be valued on the same axes as a paper-book. Ebooks *fail* on those axes. Ebooks don't beat paper-books for sophisticated typography, they can't match them for quality of paper or the smell of the glue. But just try sending a paper book to a friend in Brazil, for free, in less than a second. Or loading a thousand paper books into a little stick of flash-memory dangling from your keychain. Or searching a paper book for every instance of a character's name to find a beloved passage. Hell, try clipping a pithy passage out of a paper book and pasting it into your sig-file. +5. Ebooks need to embrace their nature. [Ebooks need to embrace their nature.] The distinctive value of ebooks is orthogonal to the value of paper books, and it revolves around the mix-ability and send-ability of electronic text. The more you constrain an ebook's distinctive value propositions -- that is, the more you restrict a reader's ability to copy, transport or transform an ebook -- the more it has to be valued on the same axes as a paper-book. Ebooks *{fail}* on those axes. Ebooks don't beat paper-books for sophisticated typography, they can't match them for quality of paper or the smell of the glue. But just try sending a paper book to a friend in Brazil, for free, in less than a second. Or loading a thousand paper books into a little stick of flash-memory dangling from your keychain. Or searching a paper book for every instance of a character's name to find a beloved passage. Hell, try clipping a pithy passage out of a paper book and pasting it into your sig-file. 6. Ebooks demand a different attention span (but not a shorter one). [Ebooks demand a different attention span (but not a shorter one).] Artists are always disappointed by their audience's attention-spans. Go back far enough and you'll find cuneiform etchings bemoaning the current Sumerian go-go lifestyle with its insistence on myths with plotlines and characters and action, not like we had in the old days. As artists, it would be a hell of a lot easier if our audiences were more tolerant of our penchant for boring them. We'd get to explore a lot more ideas without worrying about tarting them up with easy-to-swallow chocolate coatings of entertainment. We like to think of shortened attention spans as a product of the information age, but check this out: @@ -1430,11 +1430,11 @@ group{ }group -In other words, if my book is too boring, it's because you're not paying enough attention. Writers say this stuff all the time, but this quote isn't from this century or the last. [Nietzsche quote with attribution] It's from the preface to Nietzsche's "Genealogy of Morals," published in *1887.* +In other words, if my book is too boring, it's because you're not paying enough attention. Writers say this stuff all the time, but this quote isn't from this century or the last. [Nietzsche quote with attribution] It's from the preface to Nietzsche's "Genealogy of Morals," published in *{1887}*. -Yeah, our attention-spans are *different* today, but they aren't necessarily *shorter*. Warren Ellis's fans managed to hold the storyline for Transmetropolitan [Transmet cover] in their minds for *{five years}* while the story trickled out in monthly funnybook installments. JK Rowlings's installments on the Harry Potter series get fatter and fatter with each new volume. Entire forests are sacrificed to long-running series fiction like Robert Jordan's Wheel of Time books, each of which is approximately 20,000 pages long (I may be off by an order of magnitude one way or another here). Sure, presidential debates are conducted in soundbites today and not the days-long oratory extravaganzas of the Lincoln-Douglas debates, but people manage to pay attention to the 24-month-long presidential campaigns from start to finish. +Yeah, our attention-spans are *{different}* today, but they aren't necessarily *{shorter}*. Warren Ellis's fans managed to hold the storyline for Transmetropolitan [Transmet cover] in their minds for *{five years}* while the story trickled out in monthly funnybook installments. JK Rowlings's installments on the Harry Potter series get fatter and fatter with each new volume. Entire forests are sacrificed to long-running series fiction like Robert Jordan's Wheel of Time books, each of which is approximately 20,000 pages long (I may be off by an order of magnitude one way or another here). Sure, presidential debates are conducted in soundbites today and not the days-long oratory extravaganzas of the Lincoln-Douglas debates, but people manage to pay attention to the 24-month-long presidential campaigns from start to finish. -7. We need *all* the ebooks. [We need *all* the ebooks] The vast majority of the words ever penned are lost to posterity. No one library collects all the still-extant books ever written and no one person could hope to make a dent in that corpus of written work. None of us will ever read more than the tiniest sliver of human literature. But that doesn't mean that we can stick with just the most popular texts and get a proper ebook revolution. +7. We need *{all}* the ebooks. [We need *{all}* the ebooks] The vast majority of the words ever penned are lost to posterity. No one library collects all the still-extant books ever written and no one person could hope to make a dent in that corpus of written work. None of us will ever read more than the tiniest sliver of human literature. But that doesn't mean that we can stick with just the most popular texts and get a proper ebook revolution. For starters, we're all edge-cases. Sure, we all have the shared desire for the core canon of literature, but each of us want to complete that collection with different texts that are as distinctive and individualistic as fingerprints. If we all look like we're doing the same thing when we read, or listen to music, or hang out in a chatroom, that's because we're not looking closely enough. The shared-ness of our experience is only present at a coarse level of measurement: once you get into really granular observation, there are as many differences in our "shared" experience as there are similarities. @@ -1444,7 +1444,7 @@ More than that, though, is the way that a large collection of electronic text di There's a temptation to view downloading a book as comparable to bringing it home from the store, but that's the wrong metaphor. Some of the time, maybe most of the time, downloading the text of the book is like taking it off the shelf at the store and looking at the cover and reading the blurbs (with the advantage of not having to come into contact with the residual DNA and burger king left behind by everyone else who browsed the book before you). Some writers are horrified at the idea that three hundred thousand copies of my first novel were downloaded and "only" ten thousand or so were sold so far. If it were the case that for ever copy sold, thirty were taken home from the store, that would be a horrifying outcome, for sure. But look at it another way: if one out of every thirty people who glanced at the cover of my book bought it, I'd be a happy author. And I am. Those downloads cost me no more than glances at the cover in a bookstore, and the sales are healthy. -We also like to think of physical books as being inherently *countable* in a way that digital books aren't (an irony, since computers are damned good at counting things!). This is important, because writers get paid on the basis of the number of copies of their books that sell, so having a good count makes a difference. And indeed, my royalty statements contain precise numbers for copies printed, shipped, returned and sold. +We also like to think of physical books as being inherently *{countable}* in a way that digital books aren't (an irony, since computers are damned good at counting things!). This is important, because writers get paid on the basis of the number of copies of their books that sell, so having a good count makes a difference. And indeed, my royalty statements contain precise numbers for copies printed, shipped, returned and sold. But that's a false precision. When the printer does a run of a book, it always runs a few extra at the start and finish of the run to make sure that the setup is right and to account for the occasional rip, drop, or spill. The actual total number of books printed is approximately the number of books ordered, but never exactly -- if you've ever ordered 500 wedding invitations, chances are you received 500-and-a-few back from the printer and that's why. @@ -1460,7 +1460,7 @@ And finally, of course, there's the matter of selling books. However an author e I care about books, a lot. I started working in libraries and bookstores at the age of 12 and kept at it for a decade, until I was lured away by the siren song of the tech world. I knew I wanted to be a writer at the age of 12, and now, 20 years later, I have three novels, a short story collection and a nonfiction book out, two more novels under contract, and another book in the works. [BOOK COVERS] I've won a major award in my genre, science fiction, [CAMPBELL AWARD] and I'm nominated for another one, the 2003 Nebula Award for best novelette. [NEBULA] -I own a *lot* of books. Easily more than 10,000 of them, in storage on both coasts of the North American continent [LIBRARY LADDER]. I have to own them, since they're the tools of my trade: the reference works I refer to as a novelist and writer today. Most of the literature I dig is very short-lived, it disappears from the shelf after just a few months, usually for good. Science fiction is inherently ephemeral. [ACE DOUBLES] +I own a *{lot}* of books. Easily more than 10,000 of them, in storage on both coasts of the North American continent [LIBRARY LADDER]. I have to own them, since they're the tools of my trade: the reference works I refer to as a novelist and writer today. Most of the literature I dig is very short-lived, it disappears from the shelf after just a few months, usually for good. Science fiction is inherently ephemeral. [ACE DOUBLES] Now, as much as I love books, I love computers, too. Computers are fundamentally different from modern books in the same way that printed books are different from monastic Bibles: they are malleable. Time was, a "book" was something produced by many months' labor by a scribe, usually a monk, on some kind of durable and sexy substrate like foetal lambskin. [ILLUMINATED BIBLE] Gutenberg's xerox machine changed all that, changed a book into something that could be simply run off a press in a few minutes' time, on substrate more suitable to ass-wiping than exaltation in a place of honor in the cathedral. The Gutenberg press meant that rather than owning one or two books, a member of the ruling class could amass a library, and that rather than picking only a few subjects from enshrinement in print, a huge variety of subjects could be addressed on paper and handed from person to person. [KAPITAL/TIJUANA BIBLE] @@ -1523,7 +1523,7 @@ _* File-formats go obsolete, paper has lasted for a long time None of these seemed like very good explanations for the "failure" of ebooks to me. If screen resolutions are too low to replace paper, then how come everyone I know spends more time reading off a screen every year, up to and including my sainted grandmother (geeks have a really crappy tendency to argue that certain technologies aren't ready for primetime because their grandmothers won't use them -- well, my grandmother sends me email all the time. She types 70 words per minute, and loves to show off grandsonular email to her pals around the pool at her Florida retirement condo)? -The other arguments were a lot more interesting, though. It seemed to me that electronic books are *different* from paper books, and have different virtues and failings. Let's think a little about what the book has gone through in years gone by. This is interesting because the history of the book is the history of the Enlightenment, the Reformation, the Pilgrims, and, ultimately the colonizing of the Americas and the American Revolution. +The other arguments were a lot more interesting, though. It seemed to me that electronic books are *{different}* from paper books, and have different virtues and failings. Let's think a little about what the book has gone through in years gone by. This is interesting because the history of the book is the history of the Enlightenment, the Reformation, the Pilgrims, and, ultimately the colonizing of the Americas and the American Revolution. Broadly speaking, there was a time when books were hand-printed on rare leather by monks. The only people who could read them were priests, who got a regular eyeful of the really cool cartoons the monks drew in the margins. The priests read the books aloud, in Latin [LATIN BIBLE] (to a predominantly non-Latin-speaking audience) in cathedrals, wreathed in pricey incense that rose from censers swung by altar boys. @@ -1573,7 +1573,7 @@ This is a huge problem. The biggest possible problem. Here's why: [CHART: HOW BROKEN COPYRIGHT SCREWS EVERYONE] -_* Authors freak out. Authors have been schooled by their peers that strong copyright is the only thing that keeps them from getting savagely rogered in the marketplace. This is pretty much true: it's strong copyright that often defends authors from their publishers' worst excesses. However, it doesn't follow that strong copyright protects you from your *readers*. +_* Authors freak out. Authors have been schooled by their peers that strong copyright is the only thing that keeps them from getting savagely rogered in the marketplace. This is pretty much true: it's strong copyright that often defends authors from their publishers' worst excesses. However, it doesn't follow that strong copyright protects you from your *{readers}*. _* Readers get indignant over being called crooks. Seriously. You're a small businessperson. Readers are your customers. Calling them crooks is bad for business. @@ -1587,11 +1587,11 @@ This isn't to say that copyright is bad, but that there's such a thing as good c From the Luther Bible to the first phonorecords, from radio to the pulps, from cable to MP3, the world has shown that its first preference for new media is its "democratic-ness" -- the ease with which it can reproduced. -(And please, before we get any farther, forget all that business about how the Internet's copying model is more disruptive than the technologies that proceeded it. For Christ's sake, the Vaudeville performers who sued Marconi for inventing the radio had to go from a regime where they had *{one hundred percent}* control over who could get into the theater and hear them perform to a regime where they had *zero* percent control over who could build or acquire a radio and tune into a recording of them performing. For that matter, look at the difference between a monkish Bible and a Luther Bible -- next to that phase-change, Napster is peanuts) +(And please, before we get any farther, forget all that business about how the Internet's copying model is more disruptive than the technologies that proceeded it. For Christ's sake, the Vaudeville performers who sued Marconi for inventing the radio had to go from a regime where they had *{one hundred percent}* control over who could get into the theater and hear them perform to a regime where they had *{zero}* percent control over who could build or acquire a radio and tune into a recording of them performing. For that matter, look at the difference between a monkish Bible and a Luther Bible -- next to that phase-change, Napster is peanuts) Back to democratic-ness. Every successful new medium has traded off its artifact-ness -- the degree to which it was populated by bespoke hunks of atoms, cleverly nailed together by master craftspeople -- for ease of reproduction. Piano rolls weren't as expressive as good piano players, but they scaled better -- as did radio broadcasts, pulp magazines, and MP3s. Liner notes, hand illumination and leather bindings are nice, but they pale in comparison to the ability of an individual to actually get a copy of her own. -Which isn't to say that old media die. Artists still hand-illuminate books; master pianists still stride the boards at Carnegie Hall, and the shelves burst with tell-all biographies of musicians that are richer in detail than any liner-notes booklet. The thing is, when all you've got is monks, every book takes on the character of a monkish Bible. Once you invent the printing press, all the books that are better-suited to movable type migrate into that new form. What's left behind are those items that are best suited to the old production scheme: the plays that *need* to be plays, the books that are especially lovely on creamy paper stitched between covers, the music that is most enjoyable performed live and experienced in a throng of humanity. +Which isn't to say that old media die. Artists still hand-illuminate books; master pianists still stride the boards at Carnegie Hall, and the shelves burst with tell-all biographies of musicians that are richer in detail than any liner-notes booklet. The thing is, when all you've got is monks, every book takes on the character of a monkish Bible. Once you invent the printing press, all the books that are better-suited to movable type migrate into that new form. What's left behind are those items that are best suited to the old production scheme: the plays that *{need}* to be plays, the books that are especially lovely on creamy paper stitched between covers, the music that is most enjoyable performed live and experienced in a throng of humanity. Increased democratic-ness translates into decreased control: it's a lot harder to control who can copy a book once there's a photocopier on every corner than it is when you need a monastery and several years to copy a Bible. And that decreased control demands a new copyright regime that rebalances the rights of creators with their audiences. @@ -1599,7 +1599,7 @@ For example, when the VCR was invented, the courts affirmed a new copyright exem Copyright is perennially out of date, because its latest rev was generated in response to the last generation of technology. The temptation to treat copyright as though it came down off the mountain on two stone tablets (or worse, as "just like" real property) is deeply flawed, since, by definition, current copyright only considers the last generation of tech. -So, are bookwarez in violation of copyright law? Duh. Is this the end of the world? *Duh*. If the Catholic church can survive the printing press, science fiction will certainly weather the advent of bookwarez. +So, are bookwarez in violation of copyright law? Duh. Is this the end of the world? *{Duh}*. If the Catholic church can survive the printing press, science fiction will certainly weather the advent of bookwarez. # @@ -1883,19 +1883,19 @@ group{ }group -[fn: My lifestyle is as gypsy and fancy-free as the characters in H2G2, and as a result my copies of the Adams books are thousands of miles away in storages in other countries, and this essay was penned on public transit and cheap hotel rooms in Chile, Boston, London, Geneva, Brussels, Bergen, Geneva (again), Toronto, Edinburgh, and Helsinki. Luckily, I was able to download a dodgy, re-keyed version of the Adams books from a peer-to-peer network, which network I accessed via an open wireless network on a random street-corner in an anonymous city, a fact that I note here as testimony to the power of the Internet to do what the Guide does for Ford and Arthur: put all the information I need at my fingertips, wherever I am. However, these texts *are* a little on the dodgy side, as noted, so you might want to confirm these quotes before, say, uttering them before an Adams truefan.] +[fn: My lifestyle is as gypsy and fancy-free as the characters in H2G2, and as a result my copies of the Adams books are thousands of miles away in storages in other countries, and this essay was penned on public transit and cheap hotel rooms in Chile, Boston, London, Geneva, Brussels, Bergen, Geneva (again), Toronto, Edinburgh, and Helsinki. Luckily, I was able to download a dodgy, re-keyed version of the Adams books from a peer-to-peer network, which network I accessed via an open wireless network on a random street-corner in an anonymous city, a fact that I note here as testimony to the power of the Internet to do what the Guide does for Ford and Arthur: put all the information I need at my fingertips, wherever I am. However, these texts *{are}* a little on the dodgy side, as noted, so you might want to confirm these quotes before, say, uttering them before an Adams truefan.] And there's the humor: every writer knows the pain of laboring over a piece for days, infusing it with diverse interesting factoids and insights, only to have it cut to ribbons by some distant editor (I once wrote thirty drafts of a 5,000-word article for an editor who ended up running it in three paragraphs as accompaniment for what he decided should be a photo essay with minimal verbiage.) Since the dawn of the Internet, H2G2 geeks have taken it upon themselves to attempt to make a Guide on the Internet. Volunteers wrote and submitted essays on various subjects as would be likely to appear in a good encyclopedia, infusing them with equal measures of humor and thoughtfulness, and they were edited together by the collective effort of the contributors. These projects -- Everything2, H2G2 (which was overseen by Adams himself), and others -- are like a barn-raising in which a team of dedicated volunteers organize the labors of casual contributors, piecing together a free and open user-generated encyclopedia. -These encyclopedias have one up on Adams's Guide: they have no shortage of space on their "microprocessors" (the first volume of the Guide was clearly written before Adams became conversant with PCs!). The ability of humans to generate verbiage is far outstripped by the ability of technologists to generate low-cost, reliable storage to contain it. For example, Brewster Kahle's Internet Archive project (archive.org) has been making a copy of the Web -- the *whole* Web, give or take -- every couple of days since 1996. Using the Archive's Wayback Machine, you can now go and see what any page looked like on a given day. +These encyclopedias have one up on Adams's Guide: they have no shortage of space on their "microprocessors" (the first volume of the Guide was clearly written before Adams became conversant with PCs!). The ability of humans to generate verbiage is far outstripped by the ability of technologists to generate low-cost, reliable storage to contain it. For example, Brewster Kahle's Internet Archive project (archive.org) has been making a copy of the Web -- the *{whole}* Web, give or take -- every couple of days since 1996. Using the Archive's Wayback Machine, you can now go and see what any page looked like on a given day. -The Archive doesn't even bother throwing away copies of pages that haven't changed since the last time they were scraped: with storage as cheap as it is -- and it is *very* cheap for the Archive, which runs the largest database in the history of the universe off of a collection of white-box commodity PCs stacked up on packing skids in the basement of a disused armory in San Francisco's Presidio -- there's no reason not to just keep them around. In fact, the Archive has just spawned two "mirror" Archives, one located under the rebuilt Library of Alexandria and the other in Amsterdam. [fn: Brewster Kahle says that he was nervous about keeping his only copy of the "repository of all human knowledge" on the San Andreas fault, but keeping your backups in a censorship-happy Amnesty International watchlist state and/or in a floodplain below sea level is probably not such a good idea either!] +The Archive doesn't even bother throwing away copies of pages that haven't changed since the last time they were scraped: with storage as cheap as it is -- and it is *{very}* cheap for the Archive, which runs the largest database in the history of the universe off of a collection of white-box commodity PCs stacked up on packing skids in the basement of a disused armory in San Francisco's Presidio -- there's no reason not to just keep them around. In fact, the Archive has just spawned two "mirror" Archives, one located under the rebuilt Library of Alexandria and the other in Amsterdam. [fn: Brewster Kahle says that he was nervous about keeping his only copy of the "repository of all human knowledge" on the San Andreas fault, but keeping your backups in a censorship-happy Amnesty International watchlist state and/or in a floodplain below sea level is probably not such a good idea either!] -So these systems did not see articles trimmed for lack of space; for on the Internet, the idea of "running out of space" is meaningless. But they *were* trimmed, by editorial cliques, and rewritten for clarity and style. Some entries were rejected as being too thin, while others were sent back to the author for extensive rewrites. +So these systems did not see articles trimmed for lack of space; for on the Internet, the idea of "running out of space" is meaningless. But they *{were}* trimmed, by editorial cliques, and rewritten for clarity and style. Some entries were rejected as being too thin, while others were sent back to the author for extensive rewrites. -This traditional separation of editor and writer mirrors the creative process itself, in which authors are exhorted to concentrate on *either* composing *or* revising, but not both at the same time, for the application of the critical mind to the creative process strangles it. So you write, and then you edit. Even when you write for your own consumption, it seems you have to answer to an editor. +This traditional separation of editor and writer mirrors the creative process itself, in which authors are exhorted to concentrate on *{either}* composing *{or}* revising, but not both at the same time, for the application of the critical mind to the creative process strangles it. So you write, and then you edit. Even when you write for your own consumption, it seems you have to answer to an editor. The early experimental days of the Internet saw much experimentation with alternatives to traditional editor/author divisions. Slashdot, a nerdy news-site of surpassing popularity [fn: Having a link to one's website posted to Slashdot will almost inevitably overwhelm your server with traffic, knocking all but the best-provisioned hosts offline within minutes; this is commonly referred to as "the Slashdot Effect."], has a baroque system for "community moderation" of the responses to the articles that are posted to its front pages. Readers, chosen at random, are given five "moderator points" that they can use to raise or lower the score of posts on the Slashdot message boards. Subsequent readers can filter their views of these boards to show only highly ranked posts. Other readers are randomly presented with posts and their rankings and are asked to rate the fairness of each moderator's moderation. Moderators who moderate fairly are given more opportunities to moderate; likewise message-board posters whose messages are consistently highly rated. @@ -1907,13 +1907,13 @@ One of the best-capitalized businesses in the history of the world, Yahoo!, went Hence Slashdot, a system of distributed slushreading. Rather than professionalizing the editorship role, Slashdot invites contributors to identify good stuff when they see it, turning editorship into a reward for good behavior. -But as well as Slashdot works, it has this signal failing: nearly every conversation that takes place on Slashdot is shot through with discussion, griping and gaming *{on the moderation system itself}*. The core task of Slashdot has *become* editorship, not the putative subjects of Slashdot posts. The fact that the central task of Slashdot is to rate other Slashdotters creates a tenor of meanness in the discussion. Imagine if the subtext of every discussion you had in the real world was a kind of running, pedantic nitpickery in which every point was explicitly weighed and judged and commented upon. You'd be an unpleasant, unlikable jerk, the kind of person that is sometimes referred to as a "slashdork." +But as well as Slashdot works, it has this signal failing: nearly every conversation that takes place on Slashdot is shot through with discussion, griping and gaming *{on the moderation system itself}*. The core task of Slashdot has *{become}* editorship, not the putative subjects of Slashdot posts. The fact that the central task of Slashdot is to rate other Slashdotters creates a tenor of meanness in the discussion. Imagine if the subtext of every discussion you had in the real world was a kind of running, pedantic nitpickery in which every point was explicitly weighed and judged and commented upon. You'd be an unpleasant, unlikable jerk, the kind of person that is sometimes referred to as a "slashdork." As radical as Yahoo!'s conceit was, Slashdot's was more radical. But as radical as Slashdot's is, it is still inherently conservative in that it presumes that editorship is necessary, and that it further requires human judgment and intervention. Google's a lot more radical. Instead of editors, it has an algorithm. Not the kind of algorithm that dominated the early search engines like Altavista, in which laughably bad artificial intelligence engines attempted to automatically understand the content, context and value of every page on the Web so that a search for "Dog" would turn up the page more relevant to the query. -Google's algorithm is predicated on the idea that people are good at understanding things and computers are good at counting things. Google counts up all the links on the Web and affords more authority to those pages that have been linked to by the most other pages. The rationale is that if a page has been linked to by many web-authors, then they must have seen some merit in that page. This system works remarkably well -- so well that it's nearly inconceivable that any search-engine would order its rankings by any other means. What's more, it doesn't pervert the tenor of the discussions and pages that it catalogs by turning each one into a performance for a group of ranking peers. [fn: Or at least, it *didn't*. Today, dedicated web-writers, such as bloggers, are keenly aware of the way that Google will interpret their choices about linking and page-structure. One popular sport is "googlebombing," in which web-writers collude to link to a given page using a humorous keyword so that the page becomes the top result for that word -- which is why, for a time, the top result for "more evil than Satan" was Microsoft.com. Likewise, the practice of "blogspamming," in which unscrupulous spammers post links to their webpages in the message boards on various blogs, so that Google will be tricked into thinking that a wide variety of sites have conferred some authority onto their penis-enlargement page.] +Google's algorithm is predicated on the idea that people are good at understanding things and computers are good at counting things. Google counts up all the links on the Web and affords more authority to those pages that have been linked to by the most other pages. The rationale is that if a page has been linked to by many web-authors, then they must have seen some merit in that page. This system works remarkably well -- so well that it's nearly inconceivable that any search-engine would order its rankings by any other means. What's more, it doesn't pervert the tenor of the discussions and pages that it catalogs by turning each one into a performance for a group of ranking peers. [fn: Or at least, it *{didn't}*. Today, dedicated web-writers, such as bloggers, are keenly aware of the way that Google will interpret their choices about linking and page-structure. One popular sport is "googlebombing," in which web-writers collude to link to a given page using a humorous keyword so that the page becomes the top result for that word -- which is why, for a time, the top result for "more evil than Satan" was Microsoft.com. Likewise, the practice of "blogspamming," in which unscrupulous spammers post links to their webpages in the message boards on various blogs, so that Google will be tricked into thinking that a wide variety of sites have conferred some authority onto their penis-enlargement page.] But even Google is conservative in assuming that there is a need for editorship as distinct from composition. Is there a way we can dispense with editorship altogether and just use composition to refine our ideas? Can we merge composition and editorship into a single role, fusing our creative and critical selves? @@ -1979,11 +1979,11 @@ $$$$ (Originally published in The Guardian, November 13, 2007) ~# -The excellent little programmer book for the National Portrait Gallery's current show POPARTPORTRAITS has a lot to say about the pictures hung on the walls, about the diverse source material the artists drew from in producing their provocative works. They cut up magazines, copied comic books, drew in trademarked cartoon characters like Minnie Mouse, reproduced covers from *Time* magazine, made ironic use of the cartoon figure of Charles Atlas, painted over an iconic photo of James Dean or Elvis Presley -- and that's just in the first room of seven. +The excellent little programmer book for the National Portrait Gallery's current show POPARTPORTRAITS has a lot to say about the pictures hung on the walls, about the diverse source material the artists drew from in producing their provocative works. They cut up magazines, copied comic books, drew in trademarked cartoon characters like Minnie Mouse, reproduced covers from *{Time}* magazine, made ironic use of the cartoon figure of Charles Atlas, painted over an iconic photo of James Dean or Elvis Presley -- and that's just in the first room of seven. The programmer book describes the aesthetic experience of seeing these repositioned icons of culture high and low, the art created by the celebrated artists Poons, Rauschenberg, Warhol, et al by nicking the work of others, without permission, and remaking it to make statements and evoke emotions never countenanced by the original creators. -However, the book does not say a word about copyright. Can you blame it? A treatise on the way that copyright and trademark were -- *had to be* -- trammeled to make these works could fill volumes. Reading the programmer book, you have to assume that the curators' only message about copyright is that where free expression is concerned, the rights of the creators of the original source material appropriated by the pop school take a back seat. +However, the book does not say a word about copyright. Can you blame it? A treatise on the way that copyright and trademark were -- *{had to be}* -- trammeled to make these works could fill volumes. Reading the programmer book, you have to assume that the curators' only message about copyright is that where free expression is concerned, the rights of the creators of the original source material appropriated by the pop school take a back seat. There is, however, another message about copyright in the National Portrait Gallery: it's implicit in the "No Photography" signs prominently placed throughout the halls, including one right by the entrance of the POPARTPORTRAITS exhibition. This isn't intended to protect the works from the depredations of camera-flashes (it would read NO FLASH PHOTOGRAPHY if this were so). No, the ban on pictures is in place to safeguard the copyright in the works hung on the walls -- a fact that every gallery staffer I spoke to instantly affirmed when I asked about the policy. @@ -1995,7 +1995,7 @@ Or is it the epitaph on the tombstone of the sweet days before the UN's charteri Does this show -- paid for with public money, with some works that are themselves owned by public institutions -- seek to inspire us to become 21st century pops, armed with cameraphones, websites and mixers, or is it supposed to inform us that our chance has passed, and we'd best settle for a life as information serfs, who can't even make free use of what our eyes see, our ears hear, of the streets we walk upon? -Perhaps, just perhaps, it's actually a Dadaist show *masquerading* as a pop art show! Perhaps the point is to titillate us with the delicious irony of celebrating copyright infringement while simultaneously taking the view that even the NO PHOTOGRAPHY sign is a form of property, not to be reproduced without the permission that can never be had. +Perhaps, just perhaps, it's actually a Dadaist show *{masquerading}* as a pop art show! Perhaps the point is to titillate us with the delicious irony of celebrating copyright infringement while simultaneously taking the view that even the NO PHOTOGRAPHY sign is a form of property, not to be reproduced without the permission that can never be had. $$$$ @@ -2051,7 +2051,7 @@ Keeping track of our social relationships is a serious piece of work that runs a You'd think that Facebook would be the perfect tool for handling all this. It's not. For every long-lost chum who reaches out to me on Facebook, there's a guy who beat me up on a weekly basis through the whole seventh grade but now wants to be my buddy; or the crazy person who was fun in college but is now kind of sad; or the creepy ex-co-worker who I'd cross the street to avoid but who now wants to know, "Am I your friend?" yes or no, this instant, please. -It's not just Facebook and it's not just me. Every "social networking service" has had this problem and every user I've spoken to has been frustrated by it. I think that's why these services are so volatile: why we're so willing to flee from Friendster and into MySpace's loving arms; from MySpace to Facebook. It's socially awkward to refuse to add someone to your friends list -- but *removing* someone from your friend-list is practically a declaration of war. The least-awkward way to get back to a friends list with nothing but friends on it is to reboot: create a new identity on a new system and send out some invites (of course, chances are at least one of those invites will go to someone who'll groan and wonder why we're dumb enough to think that we're pals). +It's not just Facebook and it's not just me. Every "social networking service" has had this problem and every user I've spoken to has been frustrated by it. I think that's why these services are so volatile: why we're so willing to flee from Friendster and into MySpace's loving arms; from MySpace to Facebook. It's socially awkward to refuse to add someone to your friends list -- but *{removing}* someone from your friend-list is practically a declaration of war. The least-awkward way to get back to a friends list with nothing but friends on it is to reboot: create a new identity on a new system and send out some invites (of course, chances are at least one of those invites will go to someone who'll groan and wonder why we're dumb enough to think that we're pals). That's why I don't worry about Facebook taking over the net. As more users flock to it, the chances that the person who precipitates your exodus will find you increases. Once that happens, poof, away you go -- and Facebook joins SixDegrees, Friendster and their pals on the scrapheap of net.history. @@ -2175,7 +2175,7 @@ We seem to have sunk to a kind of playground system of forming contracts. There If you buy a downloadable movie from Amazon Unbox, you agree to let them install spyware on your computer, delete any file they don't like on your hard-drive, and cancel your viewing privileges for any reason. Of course, it goes without saying that Amazon reserves the right to modify the agreement at any time. -The worst offenders are people who sell you movies and music. They're a close second to people who sell you software, or provide services over the Internet. There's a rubric to this -- you're getting a discount in exchange for signing onto an abusive agreement, but just try and find the software that *doesn't* come with one of these "agreements" -- at any price. +The worst offenders are people who sell you movies and music. They're a close second to people who sell you software, or provide services over the Internet. There's a rubric to this -- you're getting a discount in exchange for signing onto an abusive agreement, but just try and find the software that *{doesn't}* come with one of these "agreements" -- at any price. For example, Vista, Microsoft's new operating system, comes in a rainbow of flavors varying in price from $99 to $399, but all of them come with the same crummy terms of service, which state that "you may not work around any technical limitations in the software," and that Windows Defender, the bundled anti-malware program, can delete any program from your hard drive that Microsoft doesn't like, even if it breaks your computer. @@ -2187,9 +2187,9 @@ However, the EULA that got Bragg upset wasn't a Murdoch innovation -- it dates b In their defense, EULAese is so mind-numbingly boring that it's a kind of torture to read these things. You can hardly blame them. -But it does raise the question -- why are we playing host to these infectious agents? If they're not read by customers *or* companies, why bother with them? +But it does raise the question -- why are we playing host to these infectious agents? If they're not read by customers *{or}* companies, why bother with them? -If you wanted to really be careful about this stuff, you'd prohibit every employee at your office from clicking on any link, installing any program, creating accounts, signing for parcels -- even doing a run to Best Buy for some CD blanks, have you *seen* the fine-print on their credit-card slips? After all, these people are entering into "agreements" on behalf of their employer -- agreements to allow spyware onto your network, to not "work around any technical limitations in their software," to let malicious software delete arbitrary files from their systems. +If you wanted to really be careful about this stuff, you'd prohibit every employee at your office from clicking on any link, installing any program, creating accounts, signing for parcels -- even doing a run to Best Buy for some CD blanks, have you *{seen}* the fine-print on their credit-card slips? After all, these people are entering into "agreements" on behalf of their employer -- agreements to allow spyware onto your network, to not "work around any technical limitations in their software," to let malicious software delete arbitrary files from their systems. So far, very few of us have been really bitten in the ass by EULAs, but that's because EULAs are generally associated with companies who have products or services they're hoping you'll use, and enforcing their EULAs could cost them business. @@ -2199,7 +2199,7 @@ But the rise of the patent troll changed all that. Patent trolls don't make prod If a shakedown artist can buy up some bogus patents and use them to put the screws to you, then it's only a matter of time until the same grifters latch onto the innumerable "agreements" that your company has formed with a desperate dot-bomb looking for an exit strategy. -More importantly, these "agreements" make a mockery of the law and of the very *idea* of forming agreements. Civilization starts with the idea of a real agreement -- for example, "We crap *here* and we sleep *there*, OK?" -- and if we reduce the noble agreement to a schoolyard game of no-takebacks, we erode the bedrock of civilization itself. +More importantly, these "agreements" make a mockery of the law and of the very *{idea}* of forming agreements. Civilization starts with the idea of a real agreement -- for example, "We crap *{here}* and we sleep *{there}*, OK?" -- and if we reduce the noble agreement to a schoolyard game of no-takebacks, we erode the bedrock of civilization itself. $$$$ @@ -2219,7 +2219,7 @@ But is the same true of a game? The money in your real-world bank-account and in Can you amass wealth in such a world? Well, sure. There are rich people in dictatorships all over the world. Stalin's favorites had great big dachas and drove fancy cars. You don't need democratic rights to get rich. -But you *do* need democratic freedoms to *stay* rich. In-world wealth is like a Stalin-era dacha, or the diamond fortunes of Apartheid South Africa: valuable, even portable (to a limited extent), but not really *yours*, not in any stable, long-term sense. +But you *{do}* need democratic freedoms to *{stay}* rich. In-world wealth is like a Stalin-era dacha, or the diamond fortunes of Apartheid South Africa: valuable, even portable (to a limited extent), but not really *{yours}*, not in any stable, long-term sense. Here are some examples of the difference between being a citizen and a customer: @@ -2241,15 +2241,15 @@ So what does it mean to be "rich" in Second Life? Sure, you can have a thriving Well, what of it? Why not just create a "democratic" game that has a constitution, full citizenship for players, and all the prerequisites for stable wealth? Such a game would be open source (so that other, interoperable "nations" could be established for you to emigrate to if you don't like the will of the majority in one game-world), and run by elected representatives who would instruct the administrators and programmers as to how to run the virtual world. In the real world, the TSA sets the rules for aviation -- in a virtual world, the equivalent agency would determine the physics of flight. -The question is, would this game be any *fun*? Well, democracy itself is pretty fun -- where "fun" means "engrossing and engaging." Lots of people like to play the democracy game, whether by voting every four years or by moving to K Street and setting up a lobbying operation. +The question is, would this game be any *{fun}*? Well, democracy itself is pretty fun -- where "fun" means "engrossing and engaging." Lots of people like to play the democracy game, whether by voting every four years or by moving to K Street and setting up a lobbying operation. But video games aren't quite the same thing. Gameplay conventions like "grinding" (repeating a task), "leveling up" (attaining a higher level of accomplishment), "questing" and so on are functions of artificial scarcity. The difference between a character with 10,000,000 gold pieces and a giant, rare, terrifying crossbow and a newbie player is which pointers are associated with each character's database entry. If the elected representatives direct that every player should have the shiniest armor, best space-ships, and largest bank-balances possible (this sounds like a pretty good election platform to me!), then what's left to do? -Oh sure, in Second Life they have an interesting crafting economy based on creating and exchanging virtual objects. But these objects are *also* artificially scarce -- that is, the ability of these objects to propagate freely throughout the world is limited only by the software that supports them. It's basically the same economics of the music industry, but applied to every field of human endeavor in the entire (virtual) world. +Oh sure, in Second Life they have an interesting crafting economy based on creating and exchanging virtual objects. But these objects are *{also}* artificially scarce -- that is, the ability of these objects to propagate freely throughout the world is limited only by the software that supports them. It's basically the same economics of the music industry, but applied to every field of human endeavor in the entire (virtual) world. Fun matters. Real world currencies rise and fall based, in part, by the economic might of the nations that issue them. Virtual world currencies are more strongly tied to whether there's any reason to spend the virtual currency on the objects that are denominated in it. 10,000 EverQuest golds might trade for $100 on a day when that same sum will buy you a magic EQ sword that enables you to play alongside the most interesting people online, running the most fun missions online. But if all those players out-migrate to World of Warcraft, and word gets around that Warlord's Command is way more fun than anything in poor old creaky EverQuest, your EverQuest gold turns into Weimar Deutschemarks, a devalued currency that you can't even give away. -This is where the plausibility of my democratic, co-operative, open source virtual world starts to break down. Elected governments can field armies, run schools, provide health care (I'm a Canadian), and bring acid lakes back to health. But I've never done anything run by a government agency that was a lot of *fun*. It's my sneaking suspicion that the only people who'd enjoy playing World of Democracycraft would be the people running for office there. The players would soon find themselves playing IRSQuest, Second Notice of Proposed Rulemaking Life, and Caves of 27 Stroke B. +This is where the plausibility of my democratic, co-operative, open source virtual world starts to break down. Elected governments can field armies, run schools, provide health care (I'm a Canadian), and bring acid lakes back to health. But I've never done anything run by a government agency that was a lot of *{fun}*. It's my sneaking suspicion that the only people who'd enjoy playing World of Democracycraft would be the people running for office there. The players would soon find themselves playing IRSQuest, Second Notice of Proposed Rulemaking Life, and Caves of 27 Stroke B. Maybe I'm wrong. Maybe customership is enough of a rock to build a platform of sustainable industry upon. It's not like entrepreneurs in Dubai have a lot of recourse if they get on the wrong side of the Emir; or like Singaporeans get to appeal the decisions of President Nathan, and there's plenty of industry there. -- cgit v1.2.3 From 2dce51c69cc48da475bad2874b9f65b03ddb1fab Mon Sep 17 00:00:00 2001 From: Ralph Amissah Date: Sun, 22 Aug 2010 17:52:33 -0400 Subject: markup samples, add v1 markup for Democratizing Innovation, Eric von Hippel --- data/v1/samples/_sisu/image/di_evh.png | Bin 0 -> 24750 bytes data/v1/samples/_sisu/image/di_evh_f1-1.png | Bin 0 -> 132687 bytes data/v1/samples/_sisu/image/di_evh_f10-1.png | Bin 0 -> 97145 bytes data/v1/samples/_sisu/image/di_evh_f11-1.png | Bin 0 -> 212004 bytes data/v1/samples/_sisu/image/di_evh_f11-2.png | Bin 0 -> 42750 bytes data/v1/samples/_sisu/image/di_evh_f5-1.png | Bin 0 -> 255904 bytes .../samples/_sisu/skin/doc/skin_di_von_hippel.rb | 78 + .../democratizing_innovation.eric_von_hippel.sst | 3096 ++++++++++++++++++++ 8 files changed, 3174 insertions(+) create mode 100644 data/v1/samples/_sisu/image/di_evh.png create mode 100644 data/v1/samples/_sisu/image/di_evh_f1-1.png create mode 100644 data/v1/samples/_sisu/image/di_evh_f10-1.png create mode 100644 data/v1/samples/_sisu/image/di_evh_f11-1.png create mode 100644 data/v1/samples/_sisu/image/di_evh_f11-2.png create mode 100644 data/v1/samples/_sisu/image/di_evh_f5-1.png create mode 100644 data/v1/samples/_sisu/skin/doc/skin_di_von_hippel.rb create mode 100644 data/v1/samples/democratizing_innovation.eric_von_hippel.sst diff --git a/data/v1/samples/_sisu/image/di_evh.png b/data/v1/samples/_sisu/image/di_evh.png new file mode 100644 index 0000000..9938981 Binary files /dev/null and b/data/v1/samples/_sisu/image/di_evh.png differ diff --git a/data/v1/samples/_sisu/image/di_evh_f1-1.png b/data/v1/samples/_sisu/image/di_evh_f1-1.png new file mode 100644 index 0000000..eb884e9 Binary files /dev/null and b/data/v1/samples/_sisu/image/di_evh_f1-1.png differ diff --git a/data/v1/samples/_sisu/image/di_evh_f10-1.png b/data/v1/samples/_sisu/image/di_evh_f10-1.png new file mode 100644 index 0000000..ce41d83 Binary files /dev/null and b/data/v1/samples/_sisu/image/di_evh_f10-1.png differ diff --git a/data/v1/samples/_sisu/image/di_evh_f11-1.png b/data/v1/samples/_sisu/image/di_evh_f11-1.png new file mode 100644 index 0000000..4534358 Binary files /dev/null and b/data/v1/samples/_sisu/image/di_evh_f11-1.png differ diff --git a/data/v1/samples/_sisu/image/di_evh_f11-2.png b/data/v1/samples/_sisu/image/di_evh_f11-2.png new file mode 100644 index 0000000..7f17b42 Binary files /dev/null and b/data/v1/samples/_sisu/image/di_evh_f11-2.png differ diff --git a/data/v1/samples/_sisu/image/di_evh_f5-1.png b/data/v1/samples/_sisu/image/di_evh_f5-1.png new file mode 100644 index 0000000..caab88f Binary files /dev/null and b/data/v1/samples/_sisu/image/di_evh_f5-1.png differ diff --git a/data/v1/samples/_sisu/skin/doc/skin_di_von_hippel.rb b/data/v1/samples/_sisu/skin/doc/skin_di_von_hippel.rb new file mode 100644 index 0000000..3448e58 --- /dev/null +++ b/data/v1/samples/_sisu/skin/doc/skin_di_von_hippel.rb @@ -0,0 +1,78 @@ +# coding: utf-8 +=begin + * Name: SiSU - Simple information Structuring Universe - Structured information, Serialized Units + * Author: Ralph Amissah + * http://www.jus.uio.no/sisu + * http://www.jus.uio.no/sisu/SiSU/download + * Description: Skin prepared for Democratizing Innovation, Eric von Hippel + * License: Same as SiSU see http://www.jus.uio.no/sisu + * Notes: Site default appearance variables set in defaults.rb + Generic site wide modifications set here scribe_skin.rb, and this file required by other "scribes" instead of defaults.rb +=end +module SiSU_Viz + require SiSU_lib + '/defaults' + class Skin + #def path_root # the only parameter that cannot be changed here + # './sisu/' + #end + #def rel + # '../' + #end + def url_home + 'http://web.mit.edu/evhippel/www/' + #'http://web.mit.edu/evhippel/www/democ1.htm' + end + def url_txt # text to go with url usually stripped url + 'web.mit.edu/evhippel/www/' + end + def color_band1 + '"#ffffff"' + end + def txt_hp + 'web.mit.edu/evhippel/www/' + end + def txt_home + 'Eric von Hippel' + end + def icon_home_button + 'di_evh.png' + end + def icon_home_banner + icon_home_button + end + def banner_home_button + %{
#{png_home}
\n} + end + def banner_home_and_index_buttons + %{
#{png_home}
 This text sub- 
 Table of Contents 
#{table_close}
 #{table_close}} + end + def banner_band + %{
#{png_home}#{table_close}} + end + def credits_splash + %{
+The original pdf is available online at
web.mit.edu/evhippel/www/
+
+available at
Amazon.com and
+Barnes & Noble
+This book is Copyright Eric von Hippel © 2005
+Under a Creative Commons License, License: Attribution-Noncommercial-No Derivative Works (CC-BY-NC-ND) 2.0 +http://creativecommons.org/licenses/by-nc-nd/2.0/
} + end + end + class TeX + def header_center + "\\chead{\\href{#{@vz.url_home}}{web.mit.edu/evhippel/www/}}" + end + def home_url + "\\href{#{@vz.url_home}}{web.mit.edu/evhippel/www/}" + end + def home + "\\href{#{@vz.url_home}}{Eric von Hippel}" + end + def owner_chapter + "Document owner details" + end + end +end +__END__ diff --git a/data/v1/samples/democratizing_innovation.eric_von_hippel.sst b/data/v1/samples/democratizing_innovation.eric_von_hippel.sst new file mode 100644 index 0000000..566b9d1 --- /dev/null +++ b/data/v1/samples/democratizing_innovation.eric_von_hippel.sst @@ -0,0 +1,3096 @@ +% SiSU 1.0 + +@title: Democratizing Innovation + +@language: US + +@creator: von Hipel, Eric + +@type: Book + +@topic_register: SiSU:markup sample:book;innovation;technological innovations:economic aspects;diffusion of innovations;democracy;open source software:innovation + :isbn: 9780262720472 + :oclc: 56880369 + +% HC79.T4H558 2005 +% 338'.064-dc22 2004061060 + +@rights: Copyright (C) 2005 Eric von Hippel. Exclusive rights to publish and sell this book in print form in English are licensed to The MIT Press. All other rights are reserved by the author. An electronic version of this book is available under a Creative Commons license.
Creative Commons US Attribution-NonCommercial-NoDerivs license 2.0. http://creativecommons.org/licenses/by-nc-nd/2.0/legalcode Some Rights Reserved. You are free to copy, distribute, display and perform the work, under the following conditions: Attribution, you must give the original author credit; you may not use this work for commercial purposes; No Derivative Works, you may not alter, transform, or build-upon this work. For reuse or distribution you must make clear to others the license terms of this work. Any conditions can be waived if you get permission from the copyright holder. Your fair use and other rights are in no way affected by the above. + +@date.published: 2005 + +@date.created: 2005 + +@date.issued: 2005 + +@date.available: 2005 + +@date.modified: 2005 + +@date.valid: 2005 + +@level: new=:B,C; break=1 + +@skin: skin_di_von_hippel + +@links: {Democratizing Innovation}http://web.mit.edu/evhippel/www/democ1.htm +{Eric von Hippel}http://web.mit.edu/evhippel/www/ +{Democratizing Innovation, Eric von Hippel @ SiSU}http://www.jus.uio.no/sisu/democratizing_innovation.eric_von_hippel +{@ Wikipedia}http://en.wikipedia.org/wiki/Democratizing_Innovation +{Viral Spiral, David Bollier@ SiSU}http://www.jus.uio.no/sisu/viral_spiral.david_bollier +{Two Bits, Christopher Kelty @ SiSU}http://www.jus.uio.no/sisu/two_bits.christopher_kelty +{Free Culture, Lawrence Lessig @ SiSU}http://www.jus.uio.no/sisu/free_culture.lawrence_lessig +{CONTENT, Cory Doctorow @ SiSU}http://www.jus.uio.no/sisu/content.cory_doctorow +{Free as in Freedom (on Richard M. Stallman), Sam Williams @ SiSU}http://www.jus.uio.no/sisu/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams +{Free For All, Peter Wayner @ SiSU}http://www.jus.uio.no/sisu/free_for_all.peter_wayner +{The Cathedral and the Bazaar, Eric S. Raymond @ SiSU }http://www.jus.uio.no/sisu/the_cathedral_and_the_bazaar.eric_s_raymond +{Little Brother, Cory Doctorow @ SiSU}http://www.jus.uio.no/sisu/little_brother.cory_doctorow +{Democratizing Innovation @ Amazon.com}http://www.amazon.com/Democratizing-Innovation-Eric-Von-Hippel/dp/0262720477 +{Democratizing Innovation @ Barnes & Noble}http://search.barnesandnoble.com/booksearch/isbnInquiry.asp?isbn=9780262720472 + +:A~ @title @author + +1~attribution Attribution~# + +Dedicated to all who are building the information commons.~# + +% Contents +% Acknowledgements ix +% 1 Introduction and Overview 1 +% 2 Development of Products by Lead Users 19 +% 3 Why Many Users Want Custom Products 33 +% 4 Users' Innovate-or-Buy Decisions 45 +% 5 Users' Low-Cost Innovation Niches 63 +% 6 Why Users Often Freely Reveal Their Innovations 77 +% 7 Innovation Communities 93 +% 8 Adapting Policy to User Innovation 107 +% 9 Democratizing Innovation 121 +% 10 Application: Searching for Lead User Innovations 133 +% 11 Application: Toolkits for User Innovation and Custom Design 147 +% 12 Linking User Innovation to Other Phenomena and Fields 165 +% Notes 179 +% Bibliography 183 +% Index 197 + +1~acknowledgements Acknowledgements~# + +Early in my research on the democratization of innovation I was very fortunate to gain five major academic mentors and friends. Nathan Rosenberg, Richard Nelson, Zvi Griliches, Edwin Mansfield, and Ann Carter all provided crucial support as I adopted economics as the organizing framework and toolset for my work. Later, I collaborated with a number of wonderful co-authors, all of whom are friends as well: Stan Finkelstein, Nikolaus Franke, Dietmar Harhoff, Joachim Henkel, Cornelius Herstatt, Ralph Katz, Georg von Krogh, Karim Lakhani, Gary Lilien, Christian Luthje, Pamela Morrison, William Riggs, John Roberts, Stephan Schrader, Mary Sonnack, Stefan Thomke, Marcie Tyre, and Glen Urban. Other excellent research collaborators and friends of long standing include Carliss Baldwin, Sonali Shah, Sarah Slaughter, and Lars Jeppesen.~# + +At some point as interest in a topic grows, there is a transition from dyadic academic relationships to a real research community. In my case, the essential person in enabling that transition was my close friend and colleague Dietmar Harhoff. He began to send wonderful Assistant Professors (Habilitanden) over from his university, Ludwig Maximilians Universität in Munich, to do collaborative research with me as MIT Visiting Scholars. They worked on issues related to the democratization of innovation while at MIT and then carried on when they returned to Europe. Now they are training others in their turn.~# + +I have also greatly benefited from close contacts with colleagues in industry. As Director of the MIT Innovation Lab, I work together with senior innovation managers in just a few companies to develop and try out innovation tools in actual company settings. Close intellectual colleagues and friends of many years standing in this sphere include Jim Euchner from Pitney-Bowes, Mary Sonnack and Roger Lacey from 3M, John Wright from IFF, Dave Richards from Nortel Networks, John Martin from Verizon, Ben Hyde from the Apache Foundation, Brian Behlendorf from the Apache Foundation and CollabNet, and Joan Churchill and Susan Hiestand from Lead User Concepts. Thank you so much for the huge (and often humbling) insights that your and our field experimentation has provided!~# + +I am also eager to acknowledge and thank my family for the joy and learning they experience and share with me. My wife Jessie is a professional editor and edited my first book in a wonderful way. For this book, however, time devoted to bringing up the children made a renewed editorial collaboration impossible. I hope the reader will not suffer unduly as a consequence! My children Christiana Dagmar and Eric James have watched me work on the book---indeed they could not avoid it as I often write at home. I hope they have been drawing the lesson that academic research can be really fun. Certainly, that is the lesson I drew from my father, Arthur von Hippel. He wrote his books in his study upstairs when I was a child and would often come down to the kitchen for a cup of coffee. In transit, he would throw up his hands and say, to no one in particular, "/{Why}/ do I choose to work on such difficult problems?" And then he would look deeply happy. Dad, I noticed the smile!~# + +Finally my warmest thanks to my MIT colleagues and students and also to MIT as an institution. MIT is a really inspiring place to work and learn from others. We all understand the requirements for good research and learning, and we all strive to contribute to a very supportive academic environment. And, of course, new people are always showing up with new and interesting ideas, so fun and learning are always being renewed!~# + +:B~ Democratizing Innovation + +1~ 1 Introduction and Overview + +When I say that innovation is being democratized, I mean that users of products and services---both firms and individual consumers---are increasingly able to innovate for themselves. User-centered innovation processes offer great advantages over the manufacturer-centric innovation development systems that have been the mainstay of commerce for hundreds of years. Users that innovate can develop exactly what they want, rather than relying on manufacturers to act as their (often very imperfect) agents. Moreover, individual users do not have to develop everything they need on their own: they can benefit from innovations developed and freely shared by others. +={Economic benefit, expectations of by lead users:by users+43;Manufacturers:innovation and+11;Users:See also Lead users|expectations of economic benefit by+33|innovation and+33} + +The trend toward democratization of innovation applies to information products such as software and also to physical products. As a quick illustration of the latter, consider the development of high-performance windsurfing techniques and equipment in Hawaii by an informal user group. High-performance windsurfing involves acrobatics such as jumps and flips and turns in mid-air. Larry Stanley, a pioneer in high-performance windsurfing, described the development of a major innovation in technique and equipment to Sonali Shah: +={Stanley, L.;Shah, S.;Information commons:See also Information communities|See also Innovation communities;windsurfing} + +In 1978 Jürgen Honscheid came over from West Germany for the first Hawaiian World Cup and discovered jumping, which was new to him, although Mike Horgan and I were jumping in 1974 and 1975. There was a new enthusiasm for jumping and we were all trying to outdo each other by jumping higher and higher. The problem was that . . . the riders flew off in mid-air because there was no way to keep the board with you---and as a result you hurt your feet, your legs, and the board. +={Honscheid, J.;Horgan, M.} + +Then I remembered the "Chip," a small experimental board we had built with footstraps, and thought "it's dumb not to use this for jumping." That's when I first started jumping with footstraps and discovering controlled flight. I could go so much faster than I ever thought and when you hit a wave it was like a motorcycle rider hitting a ramp; you just flew into the air. All of a sudden not only could you fly into the air, but you could land the thing, and not only that, but you could change direction in the air! + +The whole sport of high-performance windsurfing really started from that. As soon as I did it, there were about ten of us who sailed all the time together and within one or two days there were various boards out there that had footstraps of various kinds on them, and we were all going fast and jumping waves and stuff. It just kind of snowballed from there. (Shah 2000) +={Shah, S.;windsurfing+1} + +By 1998, more than a million people were engaged in windsurfing, and a large fraction of the boards sold incorporated the user-developed innovations for the high-performance sport. + +The user-centered innovation process just illustrated is in sharp contrast to the traditional model, in which products and services are developed by manufacturers in a closed way, the manufacturers using patents, copyrights, and other protections to prevent imitators from free riding on their innovation investments. In this traditional model, a user's only role is to have needs, which manufacturers then identify and fill by designing and producing new products. The manufacturer-centric model does fit some fields and conditions. However, a growing body of empirical work shows that users are the first to develop many and perhaps most new industrial and consumer products. Further, the contribution of users is growing steadily larger as a result of continuing advances in computer and communications capabilities. +={Intellectual property rights:See also Private-collective innovation|copyrights and|innovation and+2;Copyrights:See Intellectual property rights;Manufacturers:government policy and+2;Product development+2;Users:government policy and;Economic benefit, expectations of by lead users:by manufacturers+5;Economic benefit, expectations of by lead users:by manufacturers+12;Government policy:manufacturer innovation and+2;Manufacturers:expectations of economic benefit by+26} + +In this book I explain in detail how the emerging process of user-centric, democratized innovation works. I also explain how innovation by users provides a very necessary complement to and feedstock for manufacturer innovation. + +The ongoing shift of innovation to users has some very attractive qualities. It is becoming progressively easier for many users to get precisely what they want by designing it for themselves. And innovation by users appears to increase social welfare. At the same time, the ongoing shift of product-development activities from manufacturers to users is painful and difficult for many manufacturers. Open, distributed innovation is "attacking" a major structure of the social division of labor. Many firms and industries must make fundamental changes to long-held business models in order to adapt. Further, governmental policy and legislation sometimes preferentially supports innovation by manufacturers. Considerations of social welfare suggest that this must change. The workings of the intellectual property system are of special concern. But despite the difficulties, a democratized and user-centric system of innovation appears well worth striving for. +={Government policy:See also Digital Millennium Copyright Act|and social welfare|user innovation and;Social welfare:government policy and|innovation and+3|user innovation and+3} + +% check government policy + +Users, as the term will be used in this book, are firms or individual consumers that expect to benefit from /{using}/ a product or a service. In contrast, manufacturers expect to benefit from /{selling}/ a product or a service. A firm or an individual can have different relationships to different products or innovations. For example, Boeing is a manufacturer of airplanes, but it is also a user of machine tools. If we were examining innovations developed by Boeing for the airplanes it sells, we would consider Boeing a manufacturer-innovator in those cases. But if we were considering innovations in metal-forming machinery developed by Boeing for in-house use in building airplanes, we would categorize those as user-developed innovations and would categorize Boeing as a user-innovator in those cases. +={Users:characteristics of+2;Manufacturers:characteristics of+2} + +Innovation user and innovation manufacturer are the two general "functional" relationships between innovator and innovation. Users are unique in that they alone benefit /{directly}/ from innovations. All others (here lumped under the term "manufacturers") must sell innovation-related products or services to users, indirectly or directly, in order to profit from innovations. Thus, in order to profit, inventors must sell or license knowledge related to innovations, and manufacturers must sell products or services incorporating innovations. Similarly, suppliers of innovation-related materials or services---unless they have direct use for the innovations---must sell the materials or services in order to profit from the innovations. +={Innovation:See also Innovation communities|functional sources of;Suppliers} + +The user and manufacturer categorization of relationships between innovator and innovation can be extended to specific functions, attributes, or features of products and services. When this is done, it may turn out that different parties are associated with different attributes of a particular product or service. For example, householders are the users of the switching attribute of a household electric light switch---they use it to turn lights on and off. However, switches also have other attributes, such as "easy wiring" qualities, that may be used only by the electricians who install them. Therefore, if an electrician were to develop an improvement to the installation attributes of a switch, it would be considered a user-developed innovation. + +A brief overview of the contents of the book follows. + +!_ Development of Products by Lead Users (Chapter 2) +={Economic benefit, expectations of by lead users+6;Lead users+6:characteristics of+6|commercial attractiveness of+6|economic benefit, expectations of+6|identification of+6|innovation and+50|library information search system and+6} + +Empirical studies show that many users---from 10 percent to nearly 40 percent---engage in developing or modifying products. About half of these studies do not determine representative innovation frequencies; they were designed for other purposes. Nonetheless, when taken together, the findings make it very clear that users are doing a /{lot}/ of product modification and product development in many fields. +={innovation:attractiveness of+8} + +Studies of innovating users (both individuals and firms) show them to have the characteristics of "lead users." That is, they are ahead of the majority of users in their populations with respect to an important market trend, and they expect to gain relatively high benefits from a solution to the needs they have encountered there. The correlations found between innovation by users and lead user status are highly significant, and the effects are very large. + +Since lead users are at the leading edge of the market with respect to important market trends, one can guess that many of the novel products they develop for their own use will appeal to other users too and so might provide the basis for products manufacturers would wish to commercialize. This turns out to be the case. A number of studies have shown that many of the innovations reported by lead users are judged to be commercially attractive and/or have actually been commercialized by manufacturers. +={Manufacturers:lead users and+1} + +Research provides a firm grounding for these empirical findings. The two defining characteristics of lead users and the likelihood that they will develop new or modified products have been found to be highly correlated (Morrison et al. 2004). In addition, it has been found that the higher the intensity of lead user characteristics displayed by an innovator, the greater the commercial attractiveness of the innovation that the lead user develops (Franke and von Hippel 2003a). In figure 1.1, the increased concentration of innovations toward the right indicates that the likelihood of innovating is higher for users having higher lead user index values. The rise in average innovation attractiveness as one moves from left to right indicates that innovations developed by lead users tend to be more commercially attractive. (Innovation attractiveness is the sum of the novelty of the innovation and the expected future generality of market demand.) +={Morrison, Pamela;Franke, N.+5;von Hippel, E.+5} + +%% Figure 1.1 +% 2 3 4 5 6 7 8 9 10 11 12 13 14 +% 10 +% 5 +% 0 +% Attractiveness +% of +% innovations +% Innovation +% Estimated OLS curve +% "Lead-user?ness" of users + +{di_evh_f1-1.png}image + +!_ Figure 1.1 +User-innovators with stronger "lead user" characteristics develop innovations having higher appeal in the general marketplace. Estimated OLS function: Y = 2.06 + 0.57x, where Y represents attractiveness of innovation and x represents lead-user-ness of respondent. Adjusted R^{2}^ = 0.281; p = 0.002; n = 30. Source of data: Franke and von Hippel 2003. + +!_ Why Many Users Want Custom Products (Chapter 3) +={Custom products:heterogeneity of user needs and+2;User need+2;Users:innovate-or-buy decisions by+8|needs of+2} + +Why do so many users develop or modify products for their own use? Users may innovate if and as they want something that is not available on the market and are able and willing to pay for its development. It is likely that many users do not find what they want on the market. Meta-analysis of market-segmentation studies suggests that users' needs for products are highly heterogeneous in many fields (Franke and Reisinger 2003). +={Reisinger, H.} + +Mass manufacturers tend to follow a strategy of developing products that are designed to meet the needs of a large market segment well enough to induce purchase from and capture significant profits from a large number of customers. When users' needs are heterogeneous, this strategy of "a few sizes fit all" will leave many users somewhat dissatisfied with the commercial products on offer and probably will leave some users seriously dissatisfied. In a study of a sample of users of the security features of Apache web server software, Franke and von Hippel (2003b) found that users had a very high heterogeneity of need, and that many had a high willingness to pay to get precisely what they wanted. Nineteen percent of the users sampled actually innovated to tailor Apache more closely to their needs. Those who did were found to be significantly more satisfied. +={Apache web server software;Manufacturers:lead users and} + +!_ Users' Innovate-or-Buy Decisions (Chapter 4) +={Custom products:heterogeneity of user needs and+3|manufacturers and+3|agency costs and+2;User need+3;Users:needs of+3;Manufacturers:innovation and+9|innovate-or-buy decisions and+4;Users:agency costs and+2} + +Even if many users want "exactly right products" and are willing and able to pay for their development, why do users often do this for themselves rather than hire a custom manufacturer to develop a special just-right product for them? After all, custom manufacturers specialize in developing products for one or a few users. Since these firms are specialists, it is possible that they could design and build custom products for individual users or user firms faster, better, or cheaper than users could do this for themselves. Despite this possibility, several factors can drive users to innovate rather than buy. Both in the case of user firms and in the case of individual user-innovators, agency costs play a major role. In the case of individual user-innovators, enjoyment of the innovation process can also be important. +={Agency costs+1;Manufacturers:custom products and+2;Custom products:users and+3;Economic benefit, expectations of by lead users:by manufacturers+13} + +With respect to agency costs, consider that when a user develops its own custom product that user can be trusted to act in its own best interests. When a user hires a manufacturer to develop a custom product, the situation is more complex. The user is then a principal that has hired the custom manufacturer to act as its agent. If the interests of the principal and the agent are not the same, there will be agency costs. In general terms, agency costs are (1) costs incurred to monitor the agent to ensure that it (or he or she) follows the interests of the principal, (2) the cost incurred by the agent to commit itself not to act against the principal's interest (the "bonding cost"), and (3) costs associated with an outcome that does not fully serve the interests of the principal (Jensen and Meckling 1976). In the specific instance of product and service development, a major divergence of interests between user and custom manufacturer does exist: the user wants to get precisely what it needs, to the extent that it can afford to do so. In contrast, the custom manufacturer wants to lower its development costs by incorporating solution elements it already has or that it predicts others will want in the future---even if by doing so it does not serve its present client's needs as well as it could. +={Jensen, M.;Meckling, W.} + +A user wants to preserve its need specification because that specification is chosen to make /{that user's}/ overall solution quality as high as possible at the desired price. For example, an individual user may specify a mountain-climbing boot that will precisely fit his unique climbing technique and allow him to climb Everest more easily. Any deviations in boot design will require compensating modifications in the climber's carefully practiced and deeply ingrained climbing technique---a much more costly solution from the user's point of view. A custom boot manufacturer, in contrast, will have a strong incentive to incorporate the materials and processes it has in stock and expects to use in future even if this produces a boot that is not precisely right for the present customer. For example, the manufacturer will not want to learn a new way to bond boot components together even if that would produce the best custom result for one client. The net result is that when one or a few users want something special they will often get the best result by innovating for themselves. +={Innovation communities:See also Information communities+1} + +A small model of the innovate-or-buy decision follows. This model shows in a quantitative way that user firms with unique needs will always be better off developing new products for themselves. It also shows that development by manufacturers can be the most economical option when n or more user firms want the same thing. However, when the number of user firms wanting the same thing falls between 1 and n, manufacturers may not find it profitable to develop a new product for just a few users. In that case, more than one user may invest in developing the same thing independently, owing to market failure. This results in a waste of resources from the point of view of social welfare. The problem can be addressed by new institutional forms, such as the user innovation communities that will be studied later in this book. +={Innovation communities:social welfare, and;Manufacturers:social welfare and+21;Social welfare:manufacturer innovation and+21|user innovation and+21} + +Chapter 4 concludes by pointing out that an additional incentive can drive individual user-innovators to innovate rather than buy: they may value the /{process}/ of innovating because of the enjoyment or learning that it brings them. It might seem strange that user-innovators can enjoy product development enough to want to do it themselves---after all, manufacturers pay their product developers to do such work! On the other hand, it is also clear that enjoyment of problem solving is a motivator for many individual problem solvers in at least some fields. Consider for example the millions of crossword-puzzle aficionados. Clearly, for these individuals enjoyment of the problem-solving process rather than the solution is the goal. One can easily test this by attempting to offer a puzzle solver a completed puzzle---the very output he or she is working so hard to create. One will very likely be rejected with the rebuke that one should not spoil the fun! Pleasure as a motivator can apply to the development of commercially useful innovations as well. Studies of the motivations of volunteer contributors of code to widely used software products have shown that these individuals too are often strongly motivated to innovate by the joy and learning they find in this work (Hertel et al. 2003; Lakhani and Wolf 2005). +={Hertel, G.;Lakhani, K.;Wolf, B.;Innovation process;Users:innovation process and+7;Free software:See also Open source software;Hackers;Herrmann, S.} + +!_ Users' Low-Cost Innovation Niches (Chapter 5) +={Users:low-cost innovation niches of+3} + +An exploration of the basic processes of product and service development show that users and manufacturers tend to develop different /{types}/ of innovations. This is due in part to information asymmetries: users and manufacturers tend to know different things. Product developers need two types of information in order to succeed at their work: need and context-of-use information (generated by users) and generic solution information (often initially generated by manufacturers specializing in a particular type of solution). Bringing these two types of information together is not easy. Both need information and solution information are often very "sticky"---that is, costly to move from the site where the information was generated to other sites. As a result, users generally have a more accurate and more detailed model of their needs than manufacturers have, while manufacturers have a better model of the solution approach in which they specialize than the user has. +={Innovation process;Users:innovation process and;Information asymmetries+3;Manufacturers:information asymmetries of+2;Sticky information+2:innovation and+2;Users:information asymmetries of+2;Local information+2} + +When information is sticky, innovators tend to rely largely on information they already have in stock. One consequence of the information asymmetry between users and manufacturers is that users tend to develop innovations that are functionally novel, requiring a great deal of user-need information and use-context information for their development. In contrast, manufacturers tend to develop innovations that are improvements on well-known needs and that require a rich understanding of solution information for their development. For example, firms that use inventory-management systems, such as retailers, tend to be the developers of new approaches to inventory management. In contrast, manufacturers of inventory-management systems and equipment tend to develop improvements to the equipment used to implement these user-devised approaches (Ogawa 1998). +={Ogawa, S.;Innovation:functional sources of} + +If we extend the information-asymmetry argument one step further, we see that information stickiness implies that information on hand will also differ among /{individual}/ users and manufacturers. The information assets of some particular user (or some particular manufacturer) will be closest to what is required to develop a particular innovation, and so the cost of developing that innovation will be relatively low for that user or manufacturer. The net result is that user innovation activities will be distributed across many users according to their information endowments. With respect to innovation, one user is by no means a perfect substitute for another. + +!_ Why Users Often Freely Reveal Their Innovations (Chapter 6) +={Free revealing of innovation information:case for+5|evidence of+5|in information communities+5|intellectual property rights and+5|users and+5;Users:free revealing by+5} + +The social efficiency of a system in which individual innovations are developed by individual users is increased if users somehow diffuse what they have developed to others. Manufacturer-innovators /{partially}/ achieve this when they sell a product or a service on the open market (partially because they diffuse the product incorporating the innovation, but often not all the information that others would need to fully understand and replicate it). If user-innovators do not somehow also diffuse what they have done, multiple users with very similar needs will have to independently develop very similar innovations---a poor use of resources from the viewpoint of social welfare. Empirical research shows that users often do achieve widespread diffusion by an unexpected means: they often "freely reveal" what they have developed. When we say that an innovator freely reveals information about a product or service it has developed, we mean that all intellectual property rights to that information are voluntarily given up by the innovator, and all interested parties are given access to it---the information becomes a public good. +={Free revealing of innovation information:manufacturers and+2;Innovation:distributed process of+4;Intellectual property rights:free revealing and;Manufacturers:free revealing and+2} + +The empirical finding that users often freely reveal their innovations has been a major surprise to innovation researchers. On the face of it, if a user-innovator's proprietary information has value to others, one would think that the user would strive to prevent free diffusion rather than help others to free ride on what it has developed at private cost. Nonetheless, it is now very clear that individual users and user firms---and sometimes manufacturers---often freely reveal detailed information about their innovations. + +The practices visible in "open source" software development were important in bringing this phenomenon to general awareness. In these projects it was clear /{policy}/ that project contributors would routinely and systematically freely reveal code they had developed at private expense (Raymond 1999). However, free revealing of product innovations has a history that began long before the advent of open source software. Allen, in his 1983 study of the eighteenth-century iron industry, was probably the first to consider the phenomon systematically. Later, Nuvolari (2004) discussed free revealing in the early history of mine pumping engines. Contemporary free revealing by users has been documented by von Hippel and Finkelstein (1979) for medical equipment, by Lim (2000) for semiconductor process equipment, by Morrison, Roberts, and von Hippel (2000) for library information systems, and by Franke and Shah (2003) for sporting equipment. Henkel (2003) has documented free revealing among manufacturers in the case of embedded Linux software. +={Allen, R.;Finkelstein, S.;Franke, N.;Henkel, J.;Lim, K.;Linux;Morrison, Pamela;Nuvolari, A.;Raymond, E.;Roberts, J.;Shah, S.;von Hippel, E.;Free revealing of innovation information:open source software and+7;Intellectual property rights:open source software and+1;Open source software:See also Free software communities and|free revealing and+7|intellectual property rights and+2;Library information search system} + +Innovators often freely reveal because it is often the best or the only practical option available to them. Hiding an innovation as a trade secret is unlikely to be successful for long: too many generally know similar things, and some holders of the "secret" information stand to lose little or nothing by freely revealing what they know. Studies find that innovators in many fields view patents as having only limited value. Copyright protection and copyright licensing are applicable only to "writings," such as books, graphic images, and computer software. +={Intellectual property rights:copyrights and|trade secrets and|free revealing and|licensing of|patents and;Government policy:trade secrets and} + +Active efforts by innovators to freely reveal---as opposed to sullen acceptance---are explicable because free revealing can provide innovators with significant private benefits as well as losses or risks of loss. Users who freely reveal what they have done often find that others then improve or suggest improvements to the innovation, to mutual benefit (Raymond 1999). Freely revealing users also may benefit from enhancement of reputation, from positive network effects due to increased diffusion of their innovation, and from other factors. Being the first to freely reveal a particular innovation can also enhance the benefits received, and so there can actually be a rush to reveal, much as scientists rush to publish in order to gain the benefits associated with being the first to have made a particular advancement. +={Raymond, E.} + +!_ Innovation Communities (Chapter 7) +={Innovation communities+3} + +Innovation by users tends to be widely distributed rather than concentrated among just a very few very innovative users. As a result, it is important for user-innovators to find ways to combine and leverage their efforts. Users achieve this by engaging in many forms of cooperation. Direct, informal user-to-user cooperation (assisting others to innovate, answering questions, and so on) is common. Organized cooperation is also common, with users joining together in networks and communities that provide useful structures and tools for their interactions and for the distribution of innovations. Innovation communities can increase the speed and effectiveness with which users and also manufacturers can develop and test and diffuse their innovations. They also can greatly increase the ease with which innovators can build larger systems from interlinkable modules created by community participants. +={Users:innovation communities and+2} + +Free and open source software projects are a relatively well-developed and very successful form of Internet-based innovation community. However, innovation communities are by no means restricted to software or even to information products, and they can play a major role in the development of physical products. Franke and Shah (2003) have documented the value that user innovation communities can provide to user-innovators developing physical products in the field of sporting equipment. The analogy to open source innovation communities is clear. +={Franke, N.;Shah, S.;Free software;Innovation communities:open source software and|physical products and|sporting equipment and;Open source software:innovation communities and} + +The collective or community effort to provide a public good---which is what freely revealed innovations are---has traditionally been explored in the literature on "collective action." However, behaviors seen in extant innovation communities fail to correspond to that literature at major points. In essence, innovation communities appear to be more robust with respect to recruiting and rewarding members than the literature would predict. Georg von Krogh and I attribute this to innovation contributors' obtaining some private rewards that are not shared equally by free riders (those who take without contributing). For example, a product that a user-innovator develops and freely reveals might be perfectly suited to that user-innovator's requirements but less well suited to the requirements of free riders. Innovation communities thus illustrate a "private-collective" model of innovation incentive (von Hippel and von Krogh 2003). +={von Hippel, E.;von Krogh, G.;Free revealing of innovation information:collective action model for|private-collective model for;Innovation communities:and sources of innovation;Private-collective model;Social welfare:private-collective model and+2;Users:social welfare and+6} + +!_ Adapting Policy to User Innovation (Chapter 8) +={Government policy:social welfare and+5|user innovation and+5;Social welfare:and government policy+5} + +Is innovation by users a "good thing?" Welfare economists answer such a question by studying how a phenomenon or a change affects social welfare. Henkel and von Hippel (2005) explored the social welfare implications of user innovation. They found that, relative to a world in which only manufacturers innovate, social welfare is very probably increased by the presence of innovations freely revealed by users. This finding implies that policy making should support user innovation, or at least should ensure that legislation and regulations do not favor manufacturers at the expense of user-innovators. +={Henkel, J.;von Hippel, E.;Free revealing of innovation information:manufacturers and} + +The transitions required of policy making to achieve neutrality with respect to user innovation vs. manufacturer innovation are significant. Consider the impact on open and distributed innovation of past and current policy decisions. Research done in the past 30 years has convinced many academics that intellectual property law is sometimes or often not having its intended effect. Intellectual property law was intended to increase the amount of innovation investment. Instead, it now appears that there are economies of scope in both patenting and copyright that allow firms to use these forms of intellectual property law in ways that are directly opposed to the intent of policy makers and to the public welfare. Major firms can invest to develop large portfolios of patents. They can then use these to create "patent thickets"---dense networks of patent claims that give them plausible grounds for threatening to sue across a wide range of intellectual property. They may do this to prevent others from introducing a superior innovation and/or to demand licenses from weaker competitors on favorable terms (Shapiro 2001). Movie, publishing, and software firms can use large collections of copyrighted work to a similar purpose (Benkler 2002). In view of the distributed nature of innovation by users, with each tending to create a relatively small amount of intellectual property, users are likely to be disadvantaged by such strategies. +={Benkler, Y.;Shapiro, C.;Intellectual property rights:copyrights and+1;Government policy:copyrights and+1|patents and|patent thickets and;Intellectual property rights:patents and|patent thickets and|licensing of} + +It is also important to note that users (and manufacturers) tend to build prototypes of their innovations economically by modifying products already available on the market to serve a new purpose. Laws such as the (US) Digital Millennium Copyright Act, intended to prevent consumers from illegally copying protected works, also can have the unintended side effect of preventing users from modifying products that they purchase (Varian 2002). Both fairness and social welfare considerations suggest that innovation-related policies should be made neutral with respect to the sources of innovation. +={Digital Millennium Copyright Act;Varian, H.} + +It may be that current impediments to user innovation will be solved by legislation or by policy making. However, beneficiaries of existing law and policy will predictably resist change. Fortunately, a way to get around some of these problems is in the hands of innovators themselves. Suppose many innovators in a particular field decide to freely reveal what they have developed, as they often have reason to do. In that case, users can collectively create an information commons (a collection of information freely available to all) containing substitutes for some or a great deal of information now held as private intellectual property. Then user-innovators can work around the strictures of intellectual property law by simply using these freely revealed substitutes (Lessig 2001). This is essentially what is happening in the field of software. For many problems, user-innovators in that field now have a choice between proprietary, closed software provided by Microsoft and other firms and open source software that they can legally download from the Internet and legally modify to serve their own specific needs. +={Lessig, L.;Microsoft;Free revealing of innovation information:collective action model for|in information communities|intellectual property rights and|users and+1;Information commons;Intellectual property rights:free revealing and|information communities and;Microsoft;User need;Users:free revealing by} + +Policy making that levels the playing field between users and manufacturers will force more rapid change onto manufacturers but will by no means destroy them. Experience in fields where open and distributed innovation processes are far advanced show how manufacturers can and do adapt. Some, for example, learn to supply proprietary platform products that offer user-innovators a framework upon which to develop and use their improvements. +={Manufacturers:innovation and+7} + +!_ Democratizing Innovation (Chapter 9) + +Users' ability to innovate is improving /{radically}/ and /{rapidly}/ as a result of the steadily improving quality of computer software and hardware, improved access to easy-to-use tools and components for innovation, and access to a steadily richer innovation commons. Today, user firms and even individual hobbyists have access to sophisticated programming tools for software and sophisticated CAD design tools for hardware and electronics. These information-based tools can be run on a personal computer, and they are rapidly coming down in price. As a consequence, innovation by users will continue to grow even if the degree of heterogeneity of need and willingness to invest in obtaining a precisely right product remains constant. +={Free revealing of innovation information:users and;Task partitioning+5:See also Toolkits;Toolkits:manufacturers and+5|task partitioning+5|user-friendly tools for|users and+5;User need;Users:innovation and+5} + +Equivalents of the innovation resources described above have long been available within corporations to a few. Senior designers at firms have long been supplied with engineers and designers under their direct control, and with the resources needed to quickly construct and test prototype designs. The same is true in other fields, including automotive design and clothing design: just think of the staffs of engineers and modelmakers supplied so that top auto designers can quickly realize and test their designs. + +But if, as we have seen, the information needed to innovate in important ways is widely distributed, the traditional pattern of concentrating innovation-support resources on a few individuals is hugely inefficient. High-cost resources for innovation support cannot efficiently be allocated to "the right people with the right information:" it is very difficult to know who these people may be before they develop an innovation that turns out to have general value. When the cost of high-quality resources for design and prototyping becomes very low (the trend we have described), these resources can be diffused very widely, and the allocation problem diminishes in significance. The net result is and will be to democratize the opportunity to create. +={Manufacturers:innovation and+9;Users:innovation and+9} + +On a level playing field, users will be an increasingly important source of innovation and will increasingly substitute for or complement manufacturers' innovation-related activities. In the case of information products, users have the possibility of largely or completely doing without the services of manufacturers. Open source software projects are object lessons that teach us that users can create, produce, diffuse, provide user field support for, update, and use complex products by and for themselves in the context of user innovation communities. In physical product fields, product development by users can evolve to the point of largely or totally supplanting product development---but not product manufacturing---by manufacturers. (The economies of scale associated with manufacturing and distributing physical products give manufacturers an advantage over "do-it-yourself" users in those activities.) +={Custom products:manufacturers and+2|users and+2;Innovation communities+1:open source software and|physical products and;Manufacturers:custom products and+2;Users:custom products and+2} + +The evolving pattern of the locus of product development in kitesurfing illustrates how users can displace manufacturers from the role of product developer. In that industry, the collective product-design and testing work of a user innovation community has clearly become superior in both quality and quantity relative to the levels of in-house development effort that manufacturers of kitesurfing equipment can justify. Accordingly, manufacturers of such equipment are increasingly shifting away from product design and focusing on producing product designs first developed and tested by user innovation communities. +={Innovation communities:kitesurfing and;Kitesurfing} + +How can or should manufacturers adapt to users' encroachment on elements of their traditional business activities? There are three general possibilities: (1) Produce user-developed innovations for general commercial sale and/or offer custom manufacturing to specific users. (2) Sell kits of product-design tools and/or "product platforms" to ease users' innovation-related tasks. (3) Sell products or services that are complementary to user-developed innovations. Firms in fields where users are already very active in product design are experimenting with all these possibilities. +={Custom products:product platforms and|toolkits and;Toolkits:platform products and} + +!_ Application: Searching for Lead User Innovations (Chapter 10) +={Custom products:manufacturers and+2;Manufacturers:custom products and+2;Users:custom products and+2;Lead users+59:idea generation and+2|identification of+2|innovation and+2} + +Manufacturers design their innovation processes around the way they think the process works. The vast majority of manufacturers still think that product development and service development are always done by manufacturers, and that their job is always to find a need and fill it rather than to sometimes find and commercialize an innovation that lead users have already developed. Accordingly, manufacturers have set up market-research departments to explore the needs of users in the target market, product-development groups to think up suitable products to address those needs, and so forth. The needs and prototype solutions of lead users---if encountered at all---are typically rejected as outliers of no interest. Indeed, when lead users' innovations do enter a firm's product line---and they have been shown to be the actual source of many major innovations for many firms--- they typically arrive with a lag and by an unconventional and unsystematic route. For example, a manufacturer may "discover" a lead user innovation only when the innovating user firm contacts the manufacturer with a proposal to produce its design in volume to supply its own in-house needs. Or sales or service people employed by a manufacturer may spot a promising prototype during a visit to a customer's site. +={Marketing research+1} + +Modification of firms' innovation processes to /{systematically}/ search for and further develop innovations created by lead users can provide manufacturers with a better interface to the innovation process as it actually works, and so provide better performance. A natural experiment conducted at 3M illustrates this possibility. Annual sales of lead user product ideas generated by the average lead user project at 3M were conservatively forecast by management to be more than 8 times the sales forecast for new products developed in the traditional manner---$146 million versus $18 million per year. In addition, lead user projects were found to generate ideas for new product lines, while traditional market-research methods were found to produce ideas for incremental improvements to existing product lines. As a consequence, 3M divisions funding lead user project ideas experienced their highest rate of major product line generation in the past 50 years (Lilien et al. 2002). +={Lilien, G.;Lead users:3M and;3M Corporation} + +!_ Application: Toolkits for User Innovation and Custom Design (Chapter 11) +={Toolkits+2} + +Firms that understand the distributed innovation process and users' roles in it can /{change}/ factors affecting lead user innovation and so affect its rate and direction in ways they value. Toolkits for user innovation custom design offer one way of doing this. This approach involves partitioning product-development and service-development projects into solution-information-intensive subtasks and need-information-intensive subtasks. Need-intensive subtasks are then assigned to users along with a kit of tools that enable them to effectively execute the tasks assigned to them. The resulting co-location of sticky information and problem-solving activity makes innovation within the solution space offered by a particular toolkit cheaper for users. It accordingly attracts them to the toolkit and so influences what they develop and how they develop it. The custom semiconductor industry was an early adopter of toolkits. In 2003, more than $15 billion worth of semiconductors were produced that had been designed using this approach. +={Toolkits:platform products and+2;Lead users:innovation and;Sticky information:innovation and} + +Manufacturers that adopt the toolkit approach to supporting and channeling user innovation typically face major changes in their business models, and important changes in industry structure may also follow. For example, as a result of the introduction of toolkits to the field of semiconductor manufacture, custom semiconductor manufacturers---formerly providers of both design and manufacturing services to customers---lost much of the work of custom product design to customers. Many of these manufacturers then became specialist silicon foundries, supplying production services primarily. Manufacturers may or may not wish to make such changes. However, experience in fields where toolkits have been deployed shows that customers tend to prefer designing their own custom products with the aid of a toolkit over traditional manufacturer-centric development practices. As a consequence, the only real choice for manufacturers in a field appropriate to the deployment of toolkits may be whether to lead or to follow in the transition to toolkits. + +!_ Linking User Innovation to Other Phenomena and Fields (Chapter 12) + +In chapter 12 I discuss links between user innovation and some related phenomena and literatures. With respect to phenomena, I point out the relationship of user innovation to /{information}/ communities, of which user innovation communities are a subset. One open information community is the online encyclopedia Wikipedia (www.wikipedia.org). Other such communities include the many specialized Internet sites where individuals with both common and rare medical conditions can find one another and can find specialists in those conditions. Many of the advantages associated with user innovation communities also apply to open information networks and communities. Analyses appropriate to information communities follow the same overall pattern as the analyses provided in this book for innovation communities. However, they are also simpler, because in open information communities there may be little or no proprietary information being transacted and thus little or no risk of related losses for participants. +={Wikipedia;Information commons;Information communities:See also Innovation communities;Innovation communities+2:and sources of innovation+2;Intellectual property rights:information communities and+2|innovation and+2} + +Next I discuss links between user-centric innovation phenomena and the literature on the economics of knowledge that have been forged by Foray (2004) and Weber (2004). I also discuss how Porter's 1991 work on the competitive advantage of nations can be extended to incorporate findings on nations' lead users as product developers. Finally, I point out how findings explained in this book link to and complement research on the Social Construction of Technology (Pinch and Bijker 1987). +={Bijker, W.;Foray, D.;Pinch, T.;Weber, S.;Porter, M.;Knowledge, production and distribution of} + +I conclude this introductory chapter by reemphasizing that user innovation, free revealing, and user innovation communities will flourish under many but not all conditions. What we know about manufacturer-centered innovation is still valid; however, lead-user-centered innovation patterns are increasingly important, and they present major new opportunities and challenges for us all. +={Free revealing of innovation information:free revealing and;Users:free revealing by} + +1~ 2 Development of Products by Lead Users + +The idea that novel products and services are developed by manufacturers is deeply ingrained in both traditional expectations and scholarship. When we as users of products complain about the shortcomings of an existing product or wish for a new one, we commonly think that "they" should develop it---not us. Even the conventional term for an individual end user, "consumer," implicitly suggests that users are not active in product and service development. Nonetheless, there is now very strong empirical evidence that product development and modification by both user firms and users as individual consumers is frequent, pervasive, and important. +={Consumers+5} + +I begin this chapter by reviewing the evidence that many users indeed do develop and modify products for their own use in many fields. I then show that innovation is concentrated among /{lead}/ users, and that lead users' innovations often become commercial products. + +!_ Many Users Innovate + +The evidence on user innovation frequency and pervasiveness is summarized in table 2.1. We see here that the frequency with which user firms and individual consumers develop or modify products for their own use range from 10 percent to nearly 40 percent in fields studied to date. The matter has been studied across a wide range of industrial product types where innovating users are user firms, and also in various types of sporting equipment, where innovating users are individual consumers. +={Lead users:sporting equipment and+1;Sporting equipment:lead users and+1} + +The studies cited in table 2.1 clearly show that a lot of product development and modification by users is going on. However, these findings should not be taken to reflect innovation rates in overall populations of users. All of the studies probably were affected by a response bias. (That is, if someone sends a questionnaire about whether you innovated or not, you might be more inclined to respond if your answer is "Yes."). Also, each of the studies looked at innovation rates affecting a particular product type among users who care a great deal about that product type. Thus, university surgeons (study 4 in table 2.1) care a great deal about having just-right surgical equipment, just as serious mountain bikers (study 8) care a great deal about having just-right equipment for their sport. As the intensity of interest goes down, it is likely that rates of user innovation drop too. This is probably what is going on in the case of the study of purchasers of outdoor consumer products (study 6). All we are told about that sample of users of outdoor consumer products is that they are recipients of one or more mail order catalogs from suppliers of relatively general outdoor items---winter jackets, sleeping bags, and so on. Despite the fact that these users were asked if they have developed or modified any item in this broad category of goods (rather than a very specific one such as a mountain bike), just 10 percent answered in the affirmative. Of course, 10 percent or even 5 percent of a user population numbering in the tens of millions worldwide is still a very large number---so we again realize that many users are developing and modifying products. +={Lead users:outdoor consumer products and;Outdoor products} + +!_ Table 2.1 +Many respondents reported developing or modifying products for their own use in the eight product areas listed here. +={Lüthje, C.+1;Urban, G.+1;Franke, N.+1;Herstatt, C.+1;Morrison, Pamela+1;von Hippel, E.+1;Lead users:Apache web server software and+1r|library information search system and+1|mountain biking and+1|outdoor consumer products and+1|pipe hanger hardware and+1|printed circuit CAD software and+1|surgical equipment and+3;Library information search system+1;Mountain biking+1;Outdoor products+1;Pipe hanger hardware+1;Printed circuit CAD software+1;Surgical equipment+1} + +table{~h c4; 20; 45; 15; 20; + +~ +Number and type of Users Sampled +Percentage developing and building product for own use +Source + +Industrial products +~ +~ +~ + +1. Printed circuit CAD software +136 user firm attendees at PC-CAD conference +24.3% +Urban and von Hippel 1988 + +2. Pipe hanger hardware +Employees in 74 pipe hanger installation firms +36% +Herstatt and von Hippel 1992 + +3. Library information systems +Employees in 102 Australian libraries using computerized OPAC library information systems +26% +Morrison et al. 2000 + +4. Surgical equipment +261 surgeons working in university clinics in Germany +22% +Lüthje 2003 + +5. Apache OS server software security features +131 technically sophisticated Apache features users (webmasters) +19.1% +Franke and von Hippel 2003 + +Consumer products +~ +~ +~ + +6. Outdoor consumer products +153 recipients of mail order catalogs for outdoor activity products for consumers +9.8% +Lüthje 2004 + +7. "Extreme" sporting equipment +197 members of 4 specialized sporting clubs in 4 "extreme" sports +37.8% +Franke and Shah 2003 + +8. Mountain biking equipment +291 mountain bikers in a geographic region +19.2% +Lüthje et al. + +}table + +The cited studies also do not set an upper or a lower bound on the commercial or technical importance of user-developed products and product modifications that they report, and it is likely that most are of minor significance. However, most innovations from any source are minor, so user-innovators are no exception in this regard. Further, to say an innovation is minor is not the same as saying it is trivial: minor innovations are cumulatively responsible for much or most technical progress. Hollander (1965) found that about 80 percent of unit cost reductions in Rayon manufacture were the cumulative result of minor technical changes. Knight (1963, VII, pp. 2--3) measured performance advances in general-purpose digital computers and found, similarly, that "these advances occur as the result of equipment designers using their knowledge of electronics technology to produce a multitude of small improvements that together produce significant performance advances." +={Hollander, S.;Knight, K.;Users:process improvements by+1} + +Although most products and product modifications that users or others develop will be minor, users are by no means restricted to developing minor or incremental innovations. Qualitative observations have long indicated that important process improvements are developed by users. Smith (1776, pp. 11--13) pointed out the importance of "the invention of a great number of machines which facilitate and abridge labor, and enable one man to do the work of many." He also noted that "a great part of the machines made use of in those manufactures in which labor is most subdivided, were originally the invention of common workmen, who, being each of them employed in some very simple operation, naturally turned their thoughts towards finding out easier and readier methods of performing it." Rosenberg (1976) studied the history of the US machine tool industry and found that important and basic machine types like lathes and milling machines were first developed and built by user firms having a strong need for them. Textile manufacturing firms, gun manufacturers and sewing machine manufacturers were important early user-developers of machine tools. Other studies show quantitatively that some of the most important and novel products and processes have been developed by user firms and by individual users. Enos (1962) reported that nearly all the most important innovations in oil refining were developed by user firms. Freeman (1968) found that the most widely licensed chemical production processes were developed by user firms. Von Hippel (1988) found that users were the developers of about 80 percent of the most important scientific instrument innovations, and also the developers of most of the major innovations in semiconductor processing. Pavitt (1984) found that a considerable fraction of invention by British firms was for in-house use. Shah (2000) found that the most commercially important equipment innovations in four sporting fields tended to be developed by individual users. +={Enos, J.;Freeman, C.;Pavitt, K.;Rosenberg, N.;Shah, S.;Smith, A.;von Hippel, E.+23;Sporting equipment:lead users and} + +!_ Lead User Theory +={Lead users:theory of+3} + +A second major finding of empirical research into innovation by users is that most user-developed products and product modifications (and the most commercially attractive ones) are developed by users with "lead user" characteristics. Recall from chapter 1 that lead users are defined as members of a user population having two distinguishing characteristics: (1) They are at the leading edge of an important market trend(s), and so are currently experiencing needs that will later be experienced by many users in that market. (2) They anticipate relatively high benefits from obtaining a solution to their needs, and so may innovate. +={Economic benefit, expectations of by lead users+1;Lead users:economic benefit, expectations of+1} + +The theory that led to defining "lead users" in terms of these two characteristics was derived as follows (von Hippel 1986). First, the "ahead on an important market trend" variable was included because of its assumed effect on the commercial attractiveness of innovations developed by users residing at a leading-edge position in a market. Market needs are not static---they evolve, and often they are driven by important underlying trends. If people are distributed with respect to such trends as diffusion theory indicates, then people at the leading edges of important trends will be experiencing needs today (or this year) that the bulk of the market will experience tomorrow (or next year). And, if users develop and modify products to satisfy their own needs, then the innovations that lead users develop should later be attractive to many. The expected benefits variable and its link to innovation likelihood was derived from studies of industrial product and process innovations. These showed that the greater the benefit an entity expects to obtain from a needed innovation, the greater will be that entity's investment in obtaining a solution, where a solution is an innovation either developed or purchased (Schmookler 1966; Mansfield 1968). +={Mansfield, E.;Schmookler, J.} + +Empirical studies to date have confirmed lead user theory. Morrison, Roberts, and Midgely (2004) studied the characteristics of innovating and non-innovating users of computerized library information systems in a sample of Australian libraries. They found that the two lead user characteristics were distributed in a continuous, unimodal manner in that sample. They also found that the two characteristics of lead users and the actual development of innovations by users were highly correlated. Franke and von Hippel (2003b) confirmed these findings in a study of innovating and non-innovating users of Apache web server software. They also found that the commercial attractiveness of innovations developed by users increased along with the strength of those users' lead user characteristics. +={Franke, N.;Midgely, David;Morrison, Pamela+19;Roberts, J.;Apache web server software;Lead users:Apache web server software and|library information search system and;Library information search system} + +!_ Evidence of Innovation by Lead Users + +Several studies have found that user innovation is largely the province of users that have lead user characteristics, and that products lead users develop often form the basis for commercial products. These general findings appear robust: the studies have used a variety of techniques and have addressed a variety of markets and innovator types. Brief reviews of four studies will convey the essence of what has been found. + +!_ Innovation in Industrial Product User Firms + +In the first empirical study of lead users' role in innovation, Urban and von Hippel (1988) studied user innovation activity related to a type of software used to design printed circuit boards. A major market trend to which printed circuit computer-aided design software (PC-CAD) must respond is the steady movement toward packing electronic circuitry more densely onto circuit boards. Higher density means one that can shrink boards in overall size and that enables the circuits they contain to operate faster---both strongly desired attributes. Designing a board at the leading edge of what is technically attainable in density at any particular time is a very demanding task. It involves some combination of learning to make the printed circuit wires narrower, learning how to add more layers of circuitry to a board, and using smaller electronic components. +={Urban, G.+1;Lead users:printed circuit CAD software and+3;Lead users:printed circuit CAD software and+3;Printed circuit CAD software+3} + +To explore the link between user innovation and needs at the leading edge of the density trend, Urban and von Hippel collected a sample of 138 user-firm employees who had attended a trade show on the topic of PC-CAD. To learn the position of each firm on the density trend, they asked questions about the density of the boards that each PC-CAD user firm was currently producing. To learn about each user's likely expected benefits from improvements to PC-CAD, they asked questions about how satisfied each respondent was with their firm's present PC-CAD capabilities. To learn about users' innovation activities, they asked questions about whether each firm had modified or built its own PC-CAD software for its own in-house use. + +Users' responses were cluster analyzed, and clear lead user (n = 38) and non-lead-user (n = 98) clusters were found. Users in the lead user cluster were those that made the densest boards on average and that also were dissatisfied with their PC-CAD capabilities. In other words, they were at the leading edge of an important market trend, and they had a high incentive to innovate to improve their capabilities. Strikingly, 87 percent of users in the lead user cluster reported either developing or modifying the PC-CAD software that they used. In contrast, only 1 percent of non-lead users reported this type of innovation. Clearly, in this case user innovation was very strongly concentrated in the lead user segment of the user population. A discriminant analysis on indicated that "build own system" was the most important indicator of membership in the lead user cluster. The discriminant analysis had 95.6 percent correct classification of cluster membership. + +The commercial attractiveness of PC-CAD solutions developed by lead users was high. This was tested by determining whether lead users and more ordinary users preferred a new PC-CAD system concept containing features developed by lead users over the best commercial PC-CAD system available at the time of the study (as determined by a large PC-CAD system manufacturer's competitive analysis) and two additional concepts. The concept containing lead user features was significantly preferred at even twice the price (p < 0.01). +={Lead users:commercial attractiveness of} + +!_ Innovation in Libraries +={Lead users:library information search system and+11;Library information search system+11} + +Morrison, Roberts, and von Hippel (2000) explored user modifications made by Australian libraries to computerized information search systems called Online Public Access systems ("OPACs"). Libraries might not seem the most likely spot for technological innovators to lurk. However, computer technologies and the Internet have had a major effect on how libraries are run, and many libraries now have in-house programming expertise. Computerized search methods for libraries were initially developed by advanced and technically sophisticated user institutions. Development began in the United States in the 1970s with work by major universities and the Library of Congress, with support provided by grants from the federal government (Tedd 1994). Until roughly 1978, the only such systems extant were those that had been developed by libraries for their own use. In the late 1970s, the first commercial providers of computerized search systems for libraries appeared in the United States, and by 1985 there were at least 48 OPAC vendors in the United States alone (Matthews 1985). In Australia (site of the study sample), OPAC adoption began about 8 years later than in the United States (Tedd 1994). +={Tedd, L.;Roberts, J.+3} + +Morrison, Roberts, and I obtained responses from 102 Australian libraries that were users of OPACs. We found that 26 percent of these had in fact modified their OPAC hardware or software far beyond the user-adjustment capabilities provided by the system manufacturers. The types of innovations that the libraries developed varied widely according to local needs. For example, the library that modified its OPAC to "add book retrieval instructions for staff and patrons" (table 2.2) did so because its collection of books was distributed in a complex way across a number of buildings--- making it difficult for staff and patrons to find books without precise directions. There was little duplication of innovations except in the case of adding Internet search capabilities to OPACs. In that unusual case, nine libraries went ahead and did the programming needed to add this important feature in advance of its being offered by the manufacturers of their systems. + +!_ Table 2.2 +OPAC modifications created by users served a wide variety of functions. + +table{~h c2; 50; 50; + +Improved library management +Improved information-search capabilities + +Add library patron summary statistics +Integrate images in records (2) + +Add library identifiers +Combined menu/command searches + +Add location records for physical audit +Add title sorting and short title listing + +Add book retrieval instructions for staff and patrons +Add fast access key commands + +Add CD ROM System backup +Add multilingual search formats <:br>Add key word searches (2) + +Add book access control based on copyright +Add topic linking and subject access + +Patrons can check their status via OPAC +Add prior search recall feature + +Patrons can reserve books via OPAC (2) +Add search "navigation aids" + +Remote access to OPAC by different systems +Add different hierarchical searches + +Add graduated system access via password +Access to other libraries' catalogs (2) + +Add interfaces to other in-house IT systems +Add or customize web interface (9) + + Word processing and correspondence (2) + Hot links for topics + + Umbrella for local information collection (2) + Extended searches + + Local systems adaptation + Hot links for source material + +}table + +Source of data: Morrison et al. 2000, table 1. Number of users (if more than one) developing functionally similar innovations is shown in parentheses after description of innovation. + +The libraries in the sample were asked to rank themselves on a number of characteristics, including "leading edge status" (LES). (Leading edge status, a construct developed by Morrison, is related to and highly correlated with the lead user construct (in this sample, ρ ,{(LES, CLU)}, = 0.904, /{p}/ = 0.000). ~{ LES contains four types of measures. Three ("benefits recognized early," "high benefits expected," and "direct elicitation of the construct") contain the core components of the lead user construct. The fourth ("applications generation") is a measure of a number of innovation-related activities in which users might engage: they "suggest new applications," they "pioneer those applications," and (because they have needs or problems earlier than their peers) they may be "used as a test site" (Morrison, Midgely, and Roberts 2004). }~ Self-evaluation bias was checked for by asking respondents to name other libraries they regarded as having the characteristics of lead users. Self-evaluations and evaluations by others did not differ significantly. +={Midgely, David;Morrison, Pamela;Roberts, J.} + +Libraries that had modified their OPAC systems were found to have significantly higher LES---that is, to be lead users. They were also found to have significantly higher incentives to make modifications than non-innovators, better in-house technical skills, and fewer "external resources" (for example, they found it difficult to find outside vendors to develop the modifications they wanted for them). Application of these four variables in a logit model classified libraries into innovator and non-innovator categories with an accuracy of 88 percent (table 2.3). + +!_ Table 2.3 +Factors associated with innovating in librararies (logit model). χ^{2}^/,{4}, = 33.85; ρ^{2}^ = 0.40; classification rate = 87.78%. + +table{~h c3; 40; 30; 30; + +~ +Coefficient +Standard error + +Leading-edge status +1.862 +0.601 + +Lack of incentive to modify +--0.845 +0.436 + +Lack of in-house technology skills +--1.069 +0.412 + +Lack of external resources +0.695 +0.456 + +Constant +--2.593 +0.556 + +}table + +Source: Morrison et al. 2000, table 6. + +The commercial value of user-developed innovations in the library OPAC sample was assessed in a relatively informal way. Two development mangers employed by the Australian branches of two large OPAC manufacturers were asked to evaluate the commercial value of each user innovation in the sample. They were asked two questions about each: (1) "How important commercially to your firm is the functionality added to OPACs by this user-developed modification?" (2) "How novel was the information contained in the user innovation to your firm at the time that innovation was developed?" Responses from both managers indicated that about 70 percent (25 out of 39) of the user modifications provided functionality improvements of at least "medium" commercial importance to OPACs---and in fact many of the functions were eventually incorporated in the OPACs the manufacturers sold. However, the managers also felt that their firms generally already knew about the lead users' needs when the users developed their solutions, and that the innovations the users developed provided novel information to their company only in 10--20 percent of the cases. (Even when manufacturers learn about lead users' needs early, they may not think it profitable to develop their own solution for an "emerging" need until years later. I will develop this point in chapter 4.) +={Lead users:commercial attractiveness of;Manufacturers:innovation and|lead users and} + +!_ "Consumer" Innovation in Sports Communities +={Franke, N.+11;Shah, S.+11;Innovation:and sporting equipment+11;Lead users:manufacturers and+11;Sporting equipment:lead users and+11} + +% check manufacturers ref, see previous paragraph + +Franke and Shah (2003) studied user innovation in four communities of sports enthusiasts. The communities, all located in Germany, were focused on four very different sports. +={Manufacturers:innovation and|lead users and} + +One community was devoted to canyoning, a new sport popular in the Alps. Canyoning combines mountain climbing, abseiling (rappelling), and swimming in canyons. Members do things like rappel down the middle of an active waterfall into a canyon below. Canyoning requires significant skill and involves physical risk. It is also a sport in rapid evolution as participants try new challenges and explore the edges of what is both achievable and fun. + +The second community studied was devoted to sailplaning. Sailplaning or gliding, a more mature sport than canyoning, involves flying in a closed, engineless glider carrying one or two people. A powered plane tows the glider to a desired altitude by means of a rope; then the rope is dropped and the engineless glider flies on its own, using thermal updrafts in the atmosphere to gain altitude as possible. The sailplaning community studied by Franke and Shah consisted of students of technical universities in Germany who shared an interest in sailplaning and in building their own sailplanes. + +Boardercross was the focus of the third community. In this sport, six snowboarders compete simultaneously in a downhill race. Racetracks vary, but each is likely to incorporate tunnels, steep curves, water holes, and jumps. The informal community studied consisted of semi-professional athletes from all over the world who met in as many as ten competitions a year in Europe, in North America, and in Japan. + +The fourth community studied was a group of semi-professional cyclists with various significant handicaps, such as cerebral palsy or an amputated limb. Such individuals must often design or make improvements to their equipment to accommodate their particular disabilities. These athletes knew each other well from national and international competitions, training sessions, and seminars sponsored by the Deutscher Sportbund (German National Sports Council). + +A total of 197 respondents (a response rate of 37.8 percent) answered a questionnaire about innovation activities in their communities. Thirty-two percent reported that they had developed or modified equipment they used for their sport. The rate of innovation varied among the sports, the high being 41 percent of the sailplane enthusiasts reporting innovating and the low being 18 percent of the boardercross snowboarders reporting. (The complexity of the equipment used in the various sports probably had something to do with this variation: a sailplane has many more components than a snowboard.) + +The innovations developed varied a great deal. In the sailplane community, users developed innovations ranging from a rocket-assisted emergency ejection system to improvements in cockpit ventilation. Snowboarders invented such things as improved boots and bindings. Canyoners' inventions included very specialized solutions, such as a way to cut loose a trapped rope by using a chemical etchant. With respect to commercial potential, +={Lead users:commercial attractiveness of} + +Franke and Shah found that 23 percent of the user-developed innovations reported were or soon would be produced for sale by a manufacturer. Franke and Shah found that users who innovated were significantly higher on measures of the two lead user characteristics than users who did not innovate (table 2.4). They also found that the innovators spent more time in sporting and community-related activities and felt they had a more central role in the community. + +!_ Table 2.4 +Factors associated with innovation in sports communities. + +table{~h c4; 55; 15; 15; 15; + +~ +Innovators^{a}^ +Non-innovators^{b}^ +Significance of difference^{c}^ + +Time in community +~ +~ +~ + +Years as a community member +4.46 +3.17 +p < 0.01 + +Days per year spent with community members +43.07 +32.73 +p < 0.05 + +Days per year spent participating in the sport +72.48 +68.71 +not significant + +Role in community^{d}^ +~ +~ +~ + +"I am a very active member of the community." +2.85 +3.82 +p < 0.01 + + +"I get together with members of the community for activities that are not related to the sport (movies, dinner parties, etc.)." +3.39 +4.14 +p < 0.05 + +"The community takes my opinion into account when making decisions" +2.89 +3.61 +p < 0.05 + +Lead user characteristic 1: being ahead of the trend^{d}^ +~ +~ +~ + +"I usually find out about new products and solutions earlier than others." +2.71 +4.03 +p < 0.001 + +"I have benefited significantly by the early adoption and use of new products." +3.58 +4.34 +p < 0.01 + +"I have tested prototype versions of new products for manufacturers." +4.94 +5.65 +p < 0.05 + +"In my sport I am regarded as being on the "cutting edge." +4.56 +5.38 +p < 0.01 + + +"I improved and developed new techniques in boardercrossing." +4.29 +5.84 +p < 0.001 + +Lead user characteristic 2: high benefit from innovation^{d}^ +~ +~ +~ + +"I have new needs which are not satisfied by existing products." +3.27 +4.38 +p < 0.001 + + +"I am dissatisfied with the existing equipment." +3.90 +5.13 +p < 0.001 + +}table + +Source: Franke and Shah 2003, table 3.<:br> +a. All values are means; n = 60.<:br> +b. All values are means; n = 129.<:br> +c. Two-tailed t-tests for independent samples.<:br> +d. Rated on seven-point scale, with 1 = very accurate and 7 = not accurate at all. Two-tailed t-tests for independent samples. + +!_ Innovation among Hospital Surgeons +={Surgical equipment+4;Lead users:surgical equipment and+4} + +Lüthje (2003) explored innovations developed by surgeons working at university clinics in Germany. Ten such clinics were chosen randomly, and 262 surgeons responded to Lüthje's questionnaire---a response rate of 32.6 percent. Of the university surgeons responding, 22 percent reported developing or improving some item(s) of medical equipment for use in their own practices. Using a logit model to determine the influence of user characteristics on innovation activity, Lüthje found that innovating surgeons tended to be lead users (p < 0.01). He also found that solutions to problems encountered in their own surgical practices were the primary benefit that the innovating surgeons expected to obtain from the solutions they developed (p < 0.01). In addition, he found that the level of technical knowledge the surgeon held was significantly correlated with innovation (p < 0.05). Also, perhaps as one might expect in the field of medicine, the "contextual barrier" of concerns about legal problems and liability risks was found to have a strongly significant negative correlation with the likelihood of user invention by surgeons (p < 0.01). +={Lüthje, C.+1;Sticky information:toolkits and} + +With respect to the commercial value of the innovations the lead user surgeons had developed, Lüthje reported that 48 percent of the innovations developed by his lead user respondents were or soon would be marketed by manufacturers of medical equipment. + +!_ Discussion + +The studies reviewed in this chapter all found that user innovations in general and commercially attractive ones in particular tended to be developed by lead users. These studies were set in a range of fields, but all were focused on hardware innovations or on information innovations such as new software. It is therefore important to point out that, in many fields, innovation in techniques is at least as important as equipment innovation. For example, many novel surgical operations are performed with standard equipment (such as scalpels), and many novel innovations in snowboarding are based on existing, unmodified equipment. Technique-only innovations are also likely to be the work of lead users, and indeed many of the equipment innovations documented in the studies reviewed here involved innovations in technique as well as innovations in equipment. +={Lead users:commercial attractiveness of+1;Users:innovation process and+1} + +Despite the strength of the findings, many interesting puzzles remain that can be addressed by the further development of lead user theory. For example, empirical studies of innovation by lead users are unlikely to have sampled the world's foremost lead users. Thus, in effect, the studies reviewed here determined lead users to be those highest on lead user characteristics that were within their samples. Perhaps other samples could have been obtained in each of the fields studied containing users that were even more "leading edge" with respect to relevant market trends. If so, why were the samples of moderately leading-edge users showing user innovation if user innovation is concentrated among "extreme" lead users? There are at least three possible explanations. First, most of the studies of user innovation probably included users reasonably close to the global leading edge in their samples. Had the "top" users been included, perhaps the result would have been that still more attractive user innovations would have been found. Second, it may be that the needs of local user communities differ, and so local lead users really may be the world's lead users with respect to their particular needs. Third, even if a sample contains lead users that are not near the global top with respect to lead users' characteristics, local lead users might still have reasons to (re)develop innovations locally. For example, it might be cheaper, faster, more interesting, or more enjoyable to innovate than to search for a similar innovation that a "global top" lead user might already have developed. +={Economic benefit, expectations of by lead users;Innovation process;Lead users:economic benefit, expectations of;Local information;Users:low-cost innovation niches of} + +1~ 3 Why Many Users Want Custom Products +={Custom products:heterogeneity of user needs and+42|users and+42;User need+42;Users:custom products and+42|innovation and+42|needs of} + +The high rates of user innovation documented in chapter 2 suggest that many users may want custom products. Why should this be so? I will argue that it is because many users have needs that differ in detail, and many also have both sufficient willingness to pay and sufficient resources to obtain a custom product that is just right for their individual needs. In this chapter, I first present the case for heterogeneity of user needs. I then review a study that explores users' heterogeneity of need and willingness to pay for product customization. + +!_ Heterogeneity of User Needs + +If many individual users or user firms want something different in a product type, it is said that heterogeneity of user need for that product type is high. If users' needs are highly heterogeneous, only small numbers of users will tend to want exactly the same thing. In such a case it is unlikely that mass-produced products will precisely suit the needs of many users. Mass manufacturers tend to want to build products that will appeal to more users rather than fewer, so as to spread their fixed costs of development and production. If many users want something different, and if they have adequate interest and resources to get exactly the product they need, they will be driven either to develop it for themselves or to pay a custom manufacturer to develop it for them. +={Manufacturers:custom products and|expectations of economic benefit by|innovation and} + +Are users' needs for new products (and services) often highly heterogeneous? A test of reason suggests that they are. An individual's or a firm's need for a many products depends on detailed considerations regarding the user's initial state and resources, on the pathway the user must traverse to get from the initial state to the preferred state, and on detailed considerations regarding their preferred end state as well. These are likely to be different for each individual user and for each user firm at some level of detail. This, in turn, suggests that needs for many new products and services that are precisely right for each user will differ: that needs for those products will be highly heterogeneous. + +Suppose, for example, that you decide you need a new item of household furnishing. Your house is already furnished with hundreds of items, big and small, and the new item must "fit in" properly. In addition, your precise needs for the new item are likely to be affected by your living situation, your resources, and your preferences. For example: "We need a new couch that Uncle Bill will like, that the kids can jump on, that matches the wallpaper I adore, that reflects my love of coral reefs and overall good taste, and that we can afford." Many of these specific constraints are not results of current whim and are not easy to change. Perhaps you can change the wallpaper, but you are less likely to change Uncle Bill, your kids, your established tastes with respect to a living environment, or your resource constraints. + +The net result is that the most desired product characteristics might be specific to each individual or firm. Of course, many will be willing to satisfice---make compromises---on many items because of limits on the money or time they have available to get exactly what they want. Thus, a serious mountain biker may be willing to simply buy almost any couch on sale even if he or she is not fully happy with it. On the other hand, that same biker may be totally unwilling to compromise about getting mountain biking equipment that is precisely right for his or her specific needs. In terms of industrial products, NASA may insist on getting precisely right components for the Space Shuttle if they affect mission safety, but may be willing to satisfice on other items. + +!_ Evidence from Studies of User Innovation + +Two studies of innovation by users provide indirect information on the heterogeneity of user need. They provide descriptions of the functions of the innovations developed by users in their samples. Inspection of these descriptions shows a great deal of variation and few near-duplicates. Different functionality, of course, implies that the developers of the products had different needs. In the 2000 study of user modifications of library IT systems by Morrison, Roberts, and von Hippel, discussed earlier, only 14 of 39 innovations are functionally similar to any other innovations in the sample. If one type of functionality that was repeatedly developed ("web interface") is excluded, the overlap is even lower (see table 2.2). Other responses by study participants add to this impression of high heterogeneity of need among users. Thirty percent of the respondents reported that their library IT system had been highly customized by the manufacturer during installation to meet their specific needs. In addition, 54 percent of study respondents agreed with the statement "We would like to make additional improvements to our IT system functionality that can't be made by simply adjusting the standard, customer-accessible parameters provided by the supplier." +={Morrison, Pamela;Roberts, J.;von Hippel, E.+34;Lead users:library information search system and;Library information search system} + +Similar moderate overlap in the characteristics of user innovations can be seen in innovation descriptions provided in the study of mountain biking by Lüthje, Herstatt, and von Hippel (2002). In that study sample, I estimate that at most 10 of 43 innovations had functionality similar to that of another sample member. This diversity makes sense: mountain biking, which outsiders might assume is a single type of athletic activity, in fact has many subspecialties. +={Herstatt, C.+7;Lüthje, C.+7;Mountain biking+7} + +As can be seen in table 3.1, the specializations of mountain bikers in the our study sample involved very different mountain biking terrains, and important variations in riding conditions and riding specializations. The innovations users developed were appropriate to their own heterogeneous riding activities and so were quite heterogeneous in function. Consider three examples drawn from our study: + +_* I ride on elevated, skinny planks and ladders, do jumps, steep technical downhills, obstacles and big drops. Solution devised: I needed sophisticated cycling armor and protective clothing. So I designed arm and leg armor, chest protection, shorts, pants and a jacket that enable me to try harder things with less fear of injury. + +_* I do back-country touring and needed a way to easily lift and carry a fully loaded mountain bike on the sides of steep hills and mountains and dangle it over cliffs as I climbed. Solution devised: I modified the top tube and the top of my seat post to provide secure attachment points for a carrying strap, then I modified a very plush and durable mountaineering sling to serve as the over-shoulder strap. Because the strap sits up high, I only need to bend my knees a little bit to lift the bike onto my shoulders, yet it is just high enough to keep the front wheel from hitting when I am climbing a steep hill. Eventually, I came up with a quick-release lateral strap to keep the main strap from sliding off my shoulder, but it will easily break away if I fall or land in a fast river and need to ditch my bike. + +_* When riding on ice, my bike has no traction and I slip and fall. Solution devised: I increased the traction of my tires by getting some metal studs used by the auto industry for winter tires. Then I selected some mountain biking tires with large blocks of rubber in the tread pattern, drilled a hole in the center of each block and inserted a stud in each hole. + +!_ Table 3.1 +Activity specializations of innovating mountain bikers. + +table{~h c6; 20; 13; 20; 13; 20; 14; + +Preferred terrain +Number of bikers +Outside conditions +Number of bikers +Focus on particular riding abilities +Number of bikers + +Fast downhill tracks (steep, drops, fast) +44 (39.6%) +Darkness, night riding +45 (40.5%) +Jumps, drops, stunts, obstacles +34 (30.6%) + +Technical single tracks (up and down, rocky, jumps) +68 (61.3%) +Snow, ice, cold +60 (54.1%) +Technical ability/balance +22 (19.8%) + +Smooth single tracks (hilly, rolling, speed, sand, hardpack) +13 (11.7%) +Rain, mud +53 (47.7%) +Fast descents / downhill +34 (30.6%) + +Urban and streets +9 (8.1%) +Heat +15 (13.5%) +Endurance +9 (8.1%) + +No special terrain preferred +5 (4.5%) +High altitude +10 (9.0%) +Climbing +17 (13%) + +~ +~ +No extreme outside conditions +29 (26.1%) +Sprint +3 (2.7%) + +~ +~ +~ +~ +No focus on specific riding ability +36 (32.4%) + +}table + +Source: Lüthje,Herstatt, and vonHippel 2002. This table includes the 111 users in the study sample who had ideas for improvements to mountain biking equipment. (Of these, 61 had actually gone on to build the equipment they envisioned.) Many of these users reported experience in more than one category of activity, so the sum in each column is higher than 111. + +!_ Evidence from Studies of Market Segmentation + +Empirical data on heterogeneity of demand for specific products and services are sparse. Those most interested in studying the matter are generally mass manufacturers of products and services for consumers---and they do not make a practice of prospecting for heterogeneity. Instead, they are interested in finding areas where users' needs are similar enough to represent profitable markets for standard products produced in large volumes. Manufacturers customarily seek such areas via market-segmentation studies that partition markets into a very few segments---perhaps only three, four, or five. Each segment identified consists of customers with relatively similar needs for a particular product (Punj and Stewart 1983; Wind 1978). For example, toothpaste manufacturers may divide their markets into segments such as boys and girls, adults interested in tooth whitening, and so on. +={Punj, G.;Stewart, D.;Wind, Y.;Custom products:market segmentation and+3;Manufacturers:innovation and;Marketing research+1} + +Since the 1970s, nearly all market-segmentation studies have been carried out by means of cluster analysis (Green 1971; Green and Schaffer 1998). After cluster analysis places each participant in the segment of the market most closely matching his needs, a measure of within-segment need variation is determined. This is the proportion of total variation that is within each cluster, and it shows how much users' needs deviate from the averages in "their" respective segments. If within-segment variation is low, users within the segment will have fairly homogeneous needs, and so may be reasonably satisfied with a standard product designed to serve all customers in their segment. If it high, many users are likely to be dissatisfied---some seriously so. +={Green, P.;Schaffer, C.} + +Within-segment variation is seldom reported in published studies, but a survey of market-segmentation studies published in top-tier journals did find 15 studies reporting that statistic. These studies specified 5.5 clusters on average, and had an average remaining within-cluster variance of 46 percent (Franke and Reisinger 2003). Franke and von Hippel (2003b) found similar results in an independent sample. In that study, an average of 3.7 market segments were specified and 54 percent of total variance was left as within-segment variation after the completion of cluster analysis. These data suggest that heterogeneity of need might be very substantial among users in many product categories. ~{ Cluster analysis does not specify the "right" number of clusters---it simply segments a sample into smaller and smaller clusters until the analyst calls a halt. Determining an appropriate number of clusters within a sample can be done in different ways. Of course, it always possible to say that "I only want to deal with three market segments, so I will stop my analysis when my sample has been segmented into three clusters." More commonly, analysts will examine the increase of squared error sums of each step, and generally will view the optimal number of clusters as having been reached when the plot shows a sudden "elbow" (Myers 1996). Since this technique does not incorporate information on remaining within-cluster heterogeneity, it can lead to solutions with a large amount of within-cluster variance. The "cubic clustering criterion" (CCC) partially addresses this concern by measuring the within-cluster homogeneity relative to the between-cluster heterogeneity. It suggests choosing the number of clusters where this value peaks (Milligan and Cooper 1985). However, this method appears to be rarely used: Ketchen and Shook (1996) found it used in only 5 of 45 segmentation studies they examined. }~ +={Franke, N.+20;Reisinger, H.} + +!_ A Study of Heterogeneity and Willingness To Pay + +A need for a novel product not on the market must be accompanied by adequate willingness to pay (and resources) if it is to be associated with the actual development or purchase of a custom product. What is needed to reliably establish the relationship among heterogeneity of demand, willingness to pay, and custom product development or purchase is studies that address all three factors in the same sample. My colleague Nikolaus Franke and I conducted one such study in a population of users of web server software, a product used primarily by industrial firms (Franke and von Hippel 2003b). + +Franke and I looked in detail at users' needs for security features in Apache web server software, and at users' willingness to pay for solutions that precisely fit their needs. Apache web server software is open source software that is explicitly designed to allow modification by anyone having appropriate skills. Anyone may download open source software from the Internet and use it without charge. Users are also explicitly granted the legal right to study the software's source code, to modify the software, and to distribute modified or unmodified versions to others. (See chapter 7 for a full discussion of open source software.) +={Apache web server software+16;Custom products:Apache web server software and+16;Lead users:Apache web server software and+16;Users:and paying for innovations} + +Apache web server software is used on web server computers connected to the Internet. A web server's function is to respond to requests from Internet browsers for particular documents or content. A typical server waits for clients' requests, locates the requested resource, applies the requested method to the resource, and sends the response back to the client. Web server software began by offering relatively simple functionality. Over time, however, Apache and other web server software programs have evolved into the complicated front end for many of the technically demanding applications that now run on the Internet. For example, web server software is now used to handle security and authentication of users, to provide e-commerce shopping carts, and gateways to databases. In the face of strong competition from commercial competitors (including Microsoft and Sun/Netscape), the Apache web server has become the most popular web server software on the Internet, used by 67 percent of the many millions of World Wide Web sites extant in early 2004. It has also received many industry awards for excellence. + +Franke and I created a preliminary list of server security functions from published and web-based sources. The preliminary list was evaluated and corrected by experts in web server security and Apache web server software. We eventually ended up with a list of 45 security functions that some or many users might need. Solutions to some were already incorporated in the standard Apache code downloadable by users, others were available in additional modules, and a few were not yet addressed by any security module generally available to the Apache community. (Security threats can emerge quickly and become matters of great concern before a successful response is developed and offered to the general community. A recent example is site flooding, a form of attack in which vandals attempt to cause a website to fail by flooding it with a very large number of simultaneous requests for a response.) + +Users of the security functions of web server software are the webmasters employed by firms to make sure that their software is up to date and functions properly. A major portion of a webmaster's job is to ensure that the software used is secure from attacks launched by those who wish illicit access or simply want to cause the software to fail in some way. We collected responses to our study questions from two samples of Apache webmasters: webmasters who posted a question or an answer on a question at the Apache Usenet Forum ~{ http://groups-beta.google.com/group/comp.infosystems.www.servers.unix }~ and webmasters who subscribed to a specialized online Apache newsgroup. ~{ http://modules.apache.org/ }~ This stratified sample gave us an adequate representation of webmasters who both did and did not have the technical skills needed to modify Apache security software to better fit their needs: subscribers to apache-modules.org tend to have a higher level of technical skills on average than those posting to the Apache Usenet Forum. Data were obtained by means of an Internet-based questionnaire. + +!_ The Heterogeneity of Users' Needs + +Franke and I found the security module needs of Apache users were very heterogeneous indeed both among those that had the in-house capability to write code to modify Apache and those that did not. The calibrated coefficient of heterogeneity, H,{c},, was 0.98, indicating that there was essentially no tendency of the users to cluster beyond chance. (We defined the "heterogeneity of need" in a group as the degree to which the needs of i individuals can be satisfied with j standard products which optimally meet their needs. This means that heterogeneity of need is high when many standard products are necessary to satisfy the needs of i individuals and low when the needs can be satisfied by a few standard products. The higher the coefficient the more heterogeneous are the needs of users in a sample. If the calibrated heterogeneity coefficient H,{c}, equals 1, there is no systematic tendency of the users to cluster. If it is lower than 1, there is some tendency of the individuals to cluster. A coefficient of 0 means that the needs of all individuals are exactly the same. ~{ To measure heterogeneity, Franke and I analyzed the extent to which j standards, varying from [1; i], meet the needs of the i individuals in our sample. Conceptually, we first locate a product in multi-dimensional need space (dimensions = 45 in the case of our present study) that minimizes the distances to each individual's needs. (This step is analogous to the Ward's method in cluster analysis that also minimizes within cluster variation; see Punj and Stewart 1983.) The "error" is then measured as the sum of squared Euclidean distances. We then repeated these steps to determine the error for two optimally positioned products, three products, and so on up to a number equaling I -- 1. The sum of squared errors for all cases is then a simple coefficient that measures how much the needs of i individuals can be satisfied with j standard products. The "coefficient of heterogeneity" just specified is sensitive both to the (average) /{distance}/ between the needs and for the /{configuration}/ of the needs: when the needs tend to form clusters the heterogeneity coefficient is lower than if they are evenly spread. To make the coefficient comparable across different populations, we calibrate it using a bootstrapping technique (Efron 1979) involving dividing the coefficient by the expected value (this value is generated by averaging the heterogeneity of many random distributions of heterogeneity of the same kind). The average random heterogeneity coefficient is then an appropriate value for calibration purposes: it assumes that there is no systematic relationship between the needs of the individuals or between the need dimensions. }~ ) +={Franke, N.;Punj, G.;Stewart, D.} + +Even this understates the heterogeneity. Responding Apache webmasters went far beyond the 45 security-related functions of web server software that we offered for their evaluation. In our questionnaire we offered an open question asking users to list up to four additional needs they experienced that were not covered by the standard list. Nearly 50 percent used the opportunity to add additional functions. When duplicates were eliminated, we found that 92 distinct additional security-related needs had been noted by one or more webmaster users.~{ Conceptually, it can be possible to generate "one perfect product" for everyone--- in which case heterogeneity of demand is zero---by simply creating all the features wanted by anyone (45 + 92 features in the case of this study), and incorporating them in the "one perfect product." Users could then select the features they want from a menu contained in the one perfect product to tailor it to their own tastes. Doing this is at least conceptually possible in the case of software, but less so in the case of a physical product for two reasons: (1) delivering all possible physical options to everyone who buys the product would be expensive for physical goods (while costing nothing extra in the case of information products); (2) some options are mutually exclusive (an automobile cannot be both red and green at the same time). }~ + +High heterogeneity of need in our sample suggests that there should be a high interest in obtaining modifications to Apache---and indeed, overall satisfaction with the existing version was only moderate. + +!_ Willingness to Pay for Improvements + +It is not enough to want a better-fitting custom product. One must also be willing and able to pay to get what one wants. Those in the Apache sample who did innovate were presumably willing to pay the price to do so. But how much were the users in our sample---the innovators and the non-innovators--- willing to pay /{now}/ for improvements? Estimating a user's willingness to pay (WTP) is known to be a difficult task. Franke and I used the contingent valuation method, in which respondents are directly asked how much they are willing to pay for a product or service (Mitchell and Carson 1989). Results obtained by that method often overestimate WTP significantly. Empirical studies that compare expressed WTP with actual cash payments on average showed actual spending behavior to be somewhat smaller than expressed WTP in the case of private purchases (such as in our case). In contrast, they generally find willingness to pay to be greatly overstated in the case of public goods such as the removal of a road from a wilderness area. ~{ The difference between actual willingness to pay and expressed willingness to pay is much lower for private goods (our case) than for public goods. In the case of private goods, Loomis et al. (1996) found the expressed willingness to pay for art prints to be twice the actual WTP. Willis and Powe (1998) found that among visitors to a castle the expressed WTP was 60 percent lower than the actual WTP. In the case of public goods, Brown et al. (1996), in a study of willingness to pay for removal of a road from a wilderness area, found the expressed WTP to be 4--6 times the actual WTP. Lindsey and Knaap (1999), in a study of WTP for a public urban greenway, found the expressed WTP to be 2-10 times the actual WPT. Neil et al. (1994) found the expressed WTP for conserving an original painting in the desert to be 9 times the actual WTP. Seip and Strand (1992) found that less than 10 percent of those who expressed interest in paying to join an environmental organization actually joined. }~ +={Carson, R.;Mitchell, R.} + +To compensate for the likely overstatement of expressed relative to actual WTP in our study, Franke and I conservatively deflated respondents' indicated willingness to pay by 80 percent. (Although the product in question was intended for private use, webmasters were talking about their willingness to spend company money, not their own money.) We asked each user who had indicated that he was not really satisfied with a function (i.e., whose satisfaction with the respective function was 4 or less on a 7-point scale, where 1 = not satisfied at all, and 7 = very satisfied) to estimate how much he would be willing to pay to get a very satisfactory solution regarding this function. After deflation, our sample of 137 webmasters said they were willing to pay $700,000 in aggregate to modify web server software to a point that fully satisfied them with respect to their security function needs. This amounts to an average of $5,232 total willingness to pay per respondent. This is a striking number because the price of commercial web server software similar to Apache's for one server was about $1,100 at the time of our study (source: www.sun.com, November 2001). If we assume that each webmaster was in charge of ten servers on average, this means that each webmaster was willing to pay half the price of a total server software package to get his heterogeneous needs for security features better satisfied. + +!_ Increased Satisfaction from Customization of Apache + +Recall that it takes some technical skill to modify Apache web server software by writing new code. In table 3.2, Franke and I examined only the technically skilled users in our sample who claimed the capability of making modifications to Apache web server software. For these technically skilled users, we found significantly higher satisfaction levels among those that actually did customize their software---but even the users that made modifications were not fully satisfied. + +!_ Table 3.2 +Skilled users who customized their software were more satisfied than those who did not customize. + +table{~h c4; 55; 15; 15; 15; + +~ +Users who customized (n = 18) +Users who did not customize (n = 44) +Difference (one-tailed t-test) + +Satisfaction with basic web server functionality +5.5 +4.3 +0.100 + +Satisfaction with authentication of client +3.0 +1.0 +0.001 + +Satisfaction with e-commerce-related functions +1.3 +0.0 +0.023 + +Satisfaction with within-site user access control +8.5 +6.9 +0.170 + +Satisfaction with other security functions +3.9 +3.9 +0.699 + +Overall satisfaction +4.3 +2.6 +0.010 + +}table + +Source: Franke and von Hippel 2003, table 8. In this table, 45 individual functions are grouped into five general categories. The satisfaction index ranges from -21 to +21. + +One might wonder why users with the ability to modify Apache closer to their liking were not totally satisfied. The answer can be found in respondents' judgments regarding how much effort it would require to modify Apache still more to their liking. We asked all respondents who indicated dissatisfaction of level 4 or lower with a specific function of Apache how much working time it would cost them to improve the function to the point where they would judge it to be very satisfactory (to be at a satisfaction level of 7). For the whole sample and all dissatisfactions, we obtained a working time of 8,938 person-days necessary to get a very satisfactory solution. This equals $78 of incremental benefit per incremental programmer working day ($716,758 divided by 8,938 days). This is clearly below the regular wages a skilled programmer gets. Franke and I concluded from this that skilled users do not improve their respective Apache versions to the point where they are perfectly satisfied because the costs of doing so would exceed the benefits. + +!_ Discussion + +Heterogeneity of user need is likely to be high for many types of products. Data are still scanty, but high heterogeneity of need is a very straightforward explanation for why there is so much customization by users: many users have "custom" needs for products and services. + +Those interested can easily enhance their intuitions about heterogenity of user need and related innovation by users. User innovation appears to be common enough so that one can find examples for oneself in a reasonably small, casual sample. Readers therefore may find it possible (and enjoyable) to do their own informal tests of the matter. My own version of such a test is to ask the students in one of my MIT classes (typically about 50 students) to think about a particular product that many use, such as a backpack. I first ask them how satisfied they are with their backpack. Initially, most will say "It's OK." But after some discussion and thinking, a few complaints will slowly begin to surface (slowly, I think, because we all take some dissatisfaction with our products as the unremarkable norm). "It doesn't fit comfortably" in this or that particular way. "When my lunch bag or thermos leaks the books and papers I am carrying get wet---there should be a water proof partition." "I carry large drawings to school rolled up in my backpack with the ends sticking out. They are ruined if it rains and I have not taken the precaution of wrapping them in plastic." Next, I ask whether any students have modified their backpacks to better meet their needs. Interestingly enough, one or two typically have. Since backpacks are not products of very high professional or hobby interest to most users, the presence of even some user innovation to adapt to individual users' unmet needs in such small, casual samples is an interesting intuition builder with respect to the findings discussed in this chapter. + +1~ 4 Users' Innovate-or-Buy Decisions +={Users:innovation and+4|innovate-or-buy decisions by+74} + +Why does a user wanting a custom product sometimes innovate for itself rather than buying from a manufacturer of custom products? There is, after all, a choice---at least it would seem so. However, if a user with the resources and willingness to pay does decide to buy, it may be surprised to discover that it is not so easy to find a manufacturer willing to make exactly what an individual user wants. Of course, we all know that mass manufacturers with businesses built around providing standard products in large numbers will be reluctant to accommodate special requests. Consumers know this too, and few will be so foolish as to contact a major soup producer like Campbell's with a request for a special, "just-right" can of soup. But what about manufacturers that specialize in custom products? Isn't it their business to respond to special requests? To understand which way the innovate-or-buy choice will go, one must consider both transaction costs and information asymmetries specific to users and manufacturers. I will talk mainly about transaction costs in this chapter and mainly about information asymmetries in chapter 5. +={Custom products:users and+3;Innovation process+3;Manufacturers:innovation and+3;Transaction costs+3;Users:innovation process and+3|and paying for innovations} + +I begin this chapter by discussing four specific and significant transaction costs that affect users' innovate-or-buy decisions. Next I review a case study that illustrates these. Then, I use a simple quantitative model to further explore when user firms will find it more cost-effective to develop a solution---a new product or service---for themselves rather than hiring a manufacturer to solve the problem for them. Finally, I point out that /{individual}/ users can sometimes be more inclined to innovate than one might expect because they sometimes value the /{process}/ of innovating as well as the novel product or service that is created. + +!_ Users' vs. Manufacturers' Views of Innovation Opportunities +={Agency costs+15;Manufacturers:agency costs and+15;Transaction costs:See also Agency costs;Users:agency costs and+15|transaction costs and+15} + +Three specific contributors to transaction costs---in addition to the "usual suspects," such as opportunism---often have important effects on users' decisions whether to buy a custom product or to develop it for themselves. These are (1) differences between users' and manufacturers' views regarding what constitutes a desirable solution, (2) differences in innovation quality signaling requirements between user and manufacturer innovators, and (3) differences in legal requirements placed on user and manufacturer innovators. The first two of these factors involve considerations of agency costs. When a user hires a manufacturer to develop a custom product, the user is a principal that has hired the custom manufacturer as to act as its agent. When the interests of the principal and the agent are not the same, agency costs will result. Recall from chapter 1 that agency costs are (1) costs incurred to monitor the agent to ensure that it follows the interests of the principal, (2) the cost incurred by the agent to commit itself not to act against the principal's interest (the "bonding cost"), and (3) costs associated with an outcome that does not fully serve the interests of the principal (Jensen and Meckling 1976). In the specific instance of product and service development, agency considerations enter because a user's and a manufacturer's interests with respect to the development of a custom product often differ significantly. +={Jensen, M.;Meckling, W.} + +!_ Preferences Regarding Solutions + +Individual products and services are components of larger user solutions. A user therefore wants a product that will make the best overall tradeoff between solution quality and price. Sometimes the best overall tradeoff will result in a willingness to pay a surprisingly large amount to get a solution component precisely right. For example, an individual user may specify tennis racket functionality that will fit her specific technique and relative strengths and will be willing to pay a great deal for exactly that racket. Deviations in racket functionality would require compensating modifications in her carefully practiced and deeply ingrained hitting technique---a much more costly overall solution from the user's point of view. In contrast, a user will be much less concerned with precisely /{how}/ the desired functionality is attained. For example, tennis players will typically be unconcerned about whether a tennis racket is made from metal, carbon fiber, plastic or wood---or, for that matter, from mud---if it performs precisely as desired. And, indeed, users have quickly shifted to new types of rackets over the years as new materials promise a better fit to their particular functional requirements. + +Of course, the same thing is true in the case of products for industrial users. For example, a firm with a need for a process machine may be willing to pay a great deal for one that is precisely appropriate to the characteristics of the input materials being processed, and to the skills of employees who will operate the machine. Deviations in either matter would require compensating modifications in material supply and employee training---likely to be a much more costly overall solution from the user's point of view. In contrast, the user firm will be much less concerned with precisely how the desired functionality is achieved by the process machine, and will care only that it performs precisely as specified. + +Manufacturers faced with custom development requests from users make similar calculations, but theirs revolve around attempting to conserve the applicability of a low-cost (to them) solution. Manufacturers tend to specialize in and gain competitive advantage from their capabilities in one or a few specific solution types. They then seek to find as many profitable applications for those solutions types as possible. For example, a specialist in fabricating custom products from carbon fiber might find it profitable to make any kind of product---from airplane wings to tennis rackets---as long as they are made from carbon fiber. In contrast, that same manufacturer would have no competitive advantage in---and so no profit from making--- any of these same products from metal or wood. + +Specializations in solution types can be very narrow indeed. For example, thousands of manufacturers specialize in adhesive-based fastening solutions, while other thousands specialize in mechanical fastening solutions involving such things as metal screws and nails. Importantly, companies that produce products and solution types that have close functional equivalence from the user's point of view can look very different from the point of view of a solution supplier. For example, a manufacturer of standard or custom adhesives needs chemists on staff with an expertise in chemical formulation. It also needs chemistry labs and production equipment designed to mix specialized batches of chemicals on a small scale, and it needs the equipment, expertise and regulatory approvals to package that kind of product in a way that is convenient to the customer and also in line with regulatory safeguards. In contrast, manufacturers specializing in standard or custom metal fastening solutions need none of these things. What they need instead are mechanical design engineers, a machine shop to build product prototypes and production tooling, specialized metal-forming production equipment such as screw machines, and so on. + +Users, having an investment only in a need specification and not in a solution type, want the best functional solution to their problem, independent of solution type used. Manufacturers, in contrast, want to supply custom solutions to users that utilize their existing expertise and production capabilities. Thus, in the case of the two fastening technology alternatives just described, users will prefer whatever solution approach works best. In contrast, adhesives manufacturers will find it tremendously more attractive to create a solution involving adhesive-based fastening, and manufacturers specializing in mechanical fastening will similarly strongly prefer to offer to develop solutions involving mechanical fastening. + +The difference between users' incentives to get the best functional solution to their need and specialist manufacturers' incentives to embed a specific solution type in the product to be developed are a major source of agency costs in custom product development, because there is typically an information asymmetry between user and manufacturer with respect to what will be the best solution. Manufacturers tend to know more than users about this and to have a strong incentive to provide biased information to users in order to convince them that the solution type in which they specialize is the best one to use. Such biases will be difficult for users to detect because, again, they are less expert than the suppliers in the various solution technologies that are candidates. +={Information asymmetries;Users:information asymmetries of} + +Theoretically, this agency cost would disappear if it were possible to fully specify a contract (Aghion and Tirole 1994; Bessen 2004). But in product development, contracting can be problematic. Information regarding characteristics of solutions and needs is inescapably incomplete at the time of contracting---users cannot fully specify what they want in advance of trying out prototype solutions, and manufacturers are not fully sure how planned solution approaches will work out before investing in customer-specific development. +={Aghion, P.;Bessen, J.;Contracting;Tirole, J.} + +!_ Users' Expectations + +When users buy a product from manufacturers, they tend to expect a package of other services to come along with the product they receive. However, when users develop a product for themselves, some of these are not demanded or can be supplied in a less formal, less expensive way by users for themselves. This set of implicit expectations can raise the cost to a user of a custom solution bought from a manufacturer relative to a home-developed solution. +={Manufacturers:innovation and+11;Users:innovation and+11} + +Users typically expect a solution they have purchased to work correctly and reliably "right out of the box." In effect, a sharp line is drawn between product development at the manufacturer's site and routine, trouble-free usage at the purchaser's site. When the user builds a product for itself, however, both the development and the use functions are in the same organization and may explicitly be overlapped. Repeated tests and repeated repairs and improvements during early use are then more likely to be understood and tolerated as an acceptable part of the development process. + +A related difference in expectations has to do with field support for a product that has been purchased rather than developed in house. In the case of a purchased custom product, users expect that manufacturers will provide replacement parts and service if needed. Responding to this expectation is costly for a custom manufacturer. It must keep a record of what it has built for each particular user, and of any special parts incorporated in that user's products so that they can be built or purchased again if needed. In contrast, if a user has developed a product for itself, it has people on site who know details of its design. These employees will be capable of rebuilding or repairing or redesigning the product /{ad hoc}/ if and as the need arises. (Of course, if these knowledgeable employees leave the user firm while the product they designed is still in use, such informality can prove costly.) + +Manufacturers also must invest in indirect quality signals that may not have an effect on actual quality, but instead are designed to assure both the specific user being served and the market in general that the product being supplied is of high quality. These represent another element of agency costs that user-innovators do not incur. When users develop an innovation for themselves, they end up intimately knowing the actual quality of the solution they have developed, and knowing why and how it is appropriate to their task. As an example, an engineer building a million-dollar process machine for in-house use might feel it perfectly acceptable to install a precisely right and very cheap computer controller made and prominently labeled by Lego, a manufacturer of children's toys. (Lego provides computer controllers for some of its children's building kit products.) But if that same engineer saw a Lego controller in a million-dollar process machine his firm was purchasing from a specialist high-end manufacturer, he might not know enough about the design details to know that the Lego controller was precisely right for the application. In that case, the engineer and his managers might well regard the seemingly inappropriate brand name as an indirect signal of bad quality. + +Manufacturers are often so concerned about a reputation for quality that they refuse to take shortcuts that a customer specifically requests and that might make sense for a particular customer, lest others get wind of what was done and take it as a negative signal about the general quality of the firm's products. For example, you may say to a maker of luxury custom cars: "I want to have a custom car of your brand in my driveway---my friends will admire it. But I only plan to drive it to the grocery store once in a while, so I only want a cheap little engine. A luxury exterior combined with cheap parts is the best solution for me in this application---just slap something together and keep the price low." The maker is likely to respond: "We understand your need, but we cannot be associated with any product of low quality. Someone else may look under the hood some day, and that would damage our reputation as a maker of fine cars. You must look elsewhere, or decide you are willing to pay the price to keep one of our fine machines idle on your driveway." + +!_ Differing Legal and Regulatory Requirements + +Users that innovate do not generally face legal risk if the product they develop fails and causes costs to themselves but not to others. In contrast, manufacturers that develop and sell new products are regarded under US law as also providing an implied warranty of "fitness for the intended use." If a product does not meet this criterion, and if a different, written warranty is not in place, manufacturers can be found liable for negligence with respect to providing a defective design and failure to warn buyers (Barnes and Ulin 1984). This simple difference can cause a large difference in exposure to liability by innovators and so can drive up the costs of manufacturer-provided solutions relative to user-provided ones. +={Barnes, B.;Ulin, D.;Transaction costs+51;Users:transaction costs and+23} + +For example, a user firm that builds a novel process controller to improve its plant operations must pay its own actual costs if the self-built controller fails and ruins expensive materials being processed. On the other hand, if a controller manufacturer designed the novel controller product and sold it to customers, and a failure then occurred and could be traced back to a fault in the design, the controller manufacturer is potentially liable for actual user costs and punitive damages. It may also incur significant reputational losses if the unhappy user broadcasts its complaints. The logical response of a controller manufacturer to this higher risk is to charge more and/or to be much more careful with respect to running exhaustive, expensive, and lengthy tests before releasing a new product. The resulting increase in cost and delay for obtaining a manufacturer-developed product can tend to tip users toward building their own, in-house solutions. +={Custom products:manufacturers and+2;Economic benefit, expectations of by lead users:by manufacturers+4|by users+7;Manufacturers:expectations of economic benefit by+4} + +!_ Net Result + +A net result of the foregoing considerations is that manufacturers often find that developing a custom product for only one or a few users will be unprofitable. In such cases, the transaction costs involved can make it cheaper for users with appropriate capabilities to develop the product for themselves. In larger markets, in contrast, fixed transaction costs will be spread over many customers, and the economies of scale obtainable by producing for the whole market may be substantial. In that case, it will likely be cheaper for users to buy than to innovate. As a result, manufacturers, when contacted by a user with a very specific request, will be keenly interested in how many others are likely to want this solution or elements of it. If the answer is "few," a custom manufacturer will be unlikely to accept the project. + +Of course, manufacturers have an incentive to /{make}/ markets attractive from their point of view. This can be done by deviating from precisely serving the needs of a specific custom client in order to create a solution that will be "good enough" for that client but at the same time of more interest to others. Manufacturers may do this openly by arranging meetings among custom buyers with similar needs, and then urging the group to come up with a common solution that all will find acceptable. "After all," as the representative will say, "it is clear that we cannot make a special product to suit each user, so all of you must be prepared to make really difficult compromises!" More covertly, manufacturers may simply ignore some of the specific requests of the specific user client and make something that they expect to be a more general solution instead. + +The contrasting incentives of users and manufacturers with respect to generality of need being served---and also with respect to the solution choice issue discussed earlier---can result in a very frustrating and cloudy interaction in which each party hides its best information and attempts to manipulate others to its own advantage. With respect to generality of need, sophisticated users understand custom suppliers' preference for a larger market and attempt to argue convincingly that "everyone will want precisely what I am asking you for." Manufacturers, in turn, know users have this incentive and so will generally prefer to develop custom products for which they themselves have a reasonable understanding of demand. Users are also aware of manufacturers' strong preference for only producing products that embody their existing solution expertise. To guard against the possibility that this incentive will produce biased advice, they may attempt to shop around among a number of suppliers offering different solution types and/or develop internal expertise on solution possibilities and/or attempt to write better contracts. All these attempts to induce and guard against bias involve agency costs. +={Custom products:manufacturers and} + +!_ An Illustrative Case + +A case study by Sarah Slaughter (1993) illustrates the impact of some of the transaction costs discussed above related to users' innovate-or-buy decisions. Slaughter studied patterns of innovation in stressed-skin panels, which are used in some housing construction. The aspects of the panels studied were related to installation, and so the users of these features were home builders rather than home owners. When Slaughter contrasted users' costs of innovating versus buying, she found that it was always much cheaper for the builder to develop a solution for itself at a construction site than to ask a panel manufacturer to do so. +={Slaughter, S.+16;Stressed-skin panels+16} + +A stressed-skin panel can be visualized as a large 4-by-8-foot sandwich consisting of two panels made of plywood with a layer of plastic foam glued in between. The foam, about 4 inches thick, strongly bonds the two panels together and also acts as a layer of thermal insulation. In 1989, manufacturing of stressed-skin panels was a relatively concentrated industry; the four largest manufacturers collectively having a 77 percent share of the market. The user industry was much less concentrated: the four largest constructors of panelized housing together had only 1 percent of the market for such housing in 1989. + +In housing construction, stressed-skin panels are generally attached to strong timber frames to form the outer shell of a house and to resist shear loads (such as the force of the wind). To use the panels in this way, a number of subsidiary inventions are required. For example, one must find a practical, long-lasting way to attach panels to each other and to the floors, the roof, and the frame. Also, one has to find a new way to run pipes and wires from place to place because there are no empty spaces in the walls to put them---panel interiors are filled with foam. + +Stressed-skin panels were introduced into housing construction after World War II. From then till 1989, the time of Slaughter's study, 34 innovations were made in 12 functionally important areas to create a complete building system for this type of construction. Slaughter studied the history of each of these innovations and found that 82 percent had been developed by users of the stressed-skin panels---residential builders---and only 18 percent by manufacturers of stressed-skin panels. Sometimes more than one user developed and implemented different approaches to the same functional problem (table 4.1). Builders freely revealed their innovations rather than protecting them for proprietary advantage. They were passed from builder to builder by word of mouth, published in trade magazines, and diffused widely. All were replicated at building sites for years before any commercial panel manufacturer developed and sold a solution to accomplish the same function. + +Histories of the user-developed improvements to stressed-skin panel construction showed that the user-innovator construction firms did not engage in planned R&D projects. Instead, each innovation was an immediate response to a problem encountered in the course of a construction project. Once a problem was encountered, the innovating builder typically developed and fabricated a solution at great speed, using skills, materials, and equipment on hand at the construction site. Builders reported that the average time from discovery of the problem to installation of the completed solution on the site was only half a day. The total cost of each innovation, including time, equipment, and materials, averaged $153. + +!_ Example: Installing Wiring in a Stressed-Skin Panel + +A builder was faced with the immediate problem of how to route wires through the foam interior of panels to wall switches located in the middle of the panels. He did not want cut grooves or channels through the surfaces of the panels to these locations---that would dangerously reduce the panels' structural strength. His inventive solution was to mount an electrically heated wire on the tip of a long pole and simply push the heated tip through the center insulation layer of the panel. As he pushed, the electrically heated tip quickly melted a channel through the foam plastic insulation from the edge of the panel to the desired spot. Wires were then pulled through this channel. + +!_ Table 4.1 +Users would have found it much more costly to get custom solutions from manufacturers. The costs of user-developed innovations in stressed-skin panels were very low. + +table{~h c5; 40; 17; 17; 6; 20; + +Function +Average user development time (days) +Average user development cost +N +Minimimum cost of waiting for manufacturer to deliver + +Framing of openings in panels +0.1 +$20 +1 +$1,400 + +Structural connection between panels +0.1 +30 +2 +$1,400 + +Ventilation of panels on roof +0.1 +32 +2 +$28,000 + +Insulated connection between panels +0.1 +41 +3 +$2,800 + +Corner connection between panels +0.2 +60 +1 +$2,800 + +Installation of HVAC in panels +0.2 +60 +2 +$2,800 + +Installation of wiring in panels +0.2 +79 +7 +$2,800 + +Connection of panels to roof +0.2 +80 +1 +$2,800 + +Add insect repellency to panels +0.4 +123 +3 +$70,000 + +Connect panels to foundation +0.5 +160 +1 +$1,400 + +Connect panels to frames +1.2 +377 +3 +$2,800 + +Development of curved panels +5.0 +1,500 +1 +$28,000 + +Average for all innovations +0.5 +$153 +~ +$12,367 + +}table + +N represents number of innovations developed by /{users}/ to carry out each listed function. Source: Slaughter 1993, tables 4 and 5. Costs and times shown are averaged for all user-developed innovations in each functional category. (The six /{manufacturer}/-developed innovations in Slaughter's sample are not included in this table.) + +The builder-innovator reported that the total time to develop the innovation was only an hour, and that the total cost for time and materials equaled $40. How could it cost so little and take so little time? The builder explained that using hot wires to slice sheets of plastic foam insulation into pieces of a required length is a technique known to builders. His idea as to how to modify the slicing technique to melt channels instead came to him quickly. To test the idea, he immediately sent a worker to an electrical supply house to get some nichrome wire (a type of high-resistance wire often used as an electrical heating element), attached the wire to a tip of a pole, and tried the solution on a panel at the building site---and it worked! + +This solution was described in detail in an article in a builder's magazine and was widely imitated. A panel manufacturer's eventual response (after the user solution had spread for a number of years) was to manufacture a panel with a channel for wires pre-molded into the plastic foam interior of the panel. This solution is only sometimes satisfactory. Builders often do not want to locate switch boxes at the height of the premolded channel. Also, sometimes construction workers will install some panels upside down in error, and the preformed channels will then not be continuous between one panel and the next. In such cases, the original, user-developed solution is again resorted to. + +!_ Example: Creating a Curved Panel +={Manufacturers:transaction costs and+8} + +A builder was constructing a custom house with large, curved windows. Curved stressed-skin panels were needed to fill in the space above and below these windows, but panel manufacturers only sold flat panels at that time. The builder facing the problem could not simply buy standard flat panels and bend them into curved ones at the construction site---completed panels are rigid by design. So he bought plywood and plastic foam at a local building supply house and slowly bent each panel component separately over a curved frame quickly built at the construction site. He then bonded all three elements together with glue to create strong curved panels that would maintain their shape over time. + +To determine whether users' decisions to innovate rather than buy made economic sense for them, Slaughter calculated, in a very conservative way, what it would have cost users to buy a manufacturer-developed solution embodied in a manufactured panel rather than build a solution for themselves. Her estimates included only the cost of the delay a user-builder would incur while waiting for delivery of a panel incorporating a manufacturer's solution. Delay in obtaining a solution to a problem encountered at a construction site is costly for a builder, because the schedule of deliveries, subcontractors, and other activities must then be altered. For example, if installation of a panel is delayed, one must also reschedule the arrival of the subcontractor hired to run wires through it, the contractor hired to paint it, and so on. Slaughter estimated the cost of delay to a builder at $280 per crew per day of delay (Means 1989). To compute delay times, she assumed that a manufacturer would always be willing to supply the special item a user requested. She also assumed that no time elapsed while the manufacturer learned about the need, contracted to do the job, designed a solution, and obtained needed regulatory approvals. She then asked panel manufacturers to estimate how long it would take them to simply construct a panel with the solution needed and deliver it to the construction site. Delay times computed in this manner ranged from 5 days for some innovations to 250 days for the longest-term one and averaged 44 days. +={Means, R.;Economic benefit, expectations of by lead users:by manufacturers+2|by users+2;Manufacturers:expectations of economic benefit by+2|innovation and+5} + +The conservative nature of this calculation is very clear. For example, Slaughter points out that the regulatory requirements for building components, not included, are in fact much more stringent for manufacturers than for user-builders in the field of residential construction. Manufacturers delivering products can be required to provide test data demonstrating compliance with local building codes for each locality served. Testing new products for compliance in a locality can take from a month to several years, and explicit code approval often takes several additional years. In contrast, a builder that innovates need only convince the local building inspector that what he has done meets code or performance requirements--- often a much easier task (Ehrenkrantz Group 1979; Duke 1988). +={Duke, R.;Ehrenkrantz Group} + +Despite her very conservative method of calculation, Slaughter found the costs to users of obtaining a builder solution to be at least 100 times the actual costs of developing a solution for themselves (table 4.1). Clearly, users' decisions to innovate rather than buy made economic sense in this case. + +!_ Modeling Users' Innovate-or-Buy Decisions + +In this section I summarize the core of the argument discussed in this chapter via a simple quantitative model developed with Carliss Baldwin. Our goal is to offer additional clarity by trading off the richness of the qualitative argument for simplicity. +={Baldwin, C.+24} + +Whether a user firm should innovate or buy is a variant of a well-known problem: where one should place an activity in a supply chain. In any real-world case many complexities enter. In the model that follows, Baldwin and I ignore most of these and consider a simple base case focused on the impact of transaction costs on users' innovate-or-buy considerations. The model deals with manufacturing firms and user firms rather than individual users. We assume that user firms and manufacturer firms both will hire designers from the same homogeneous pool if they elect to solve a user problem. We also assume that both user firms and manufacturer firms will incur the same costs to solve a specific user problem. For example, they will have the same costs to monitor the performance of the designer employees they hire. In this way we simplify our innovate-or-buy problem to one of transaction costs only. + +If there are no transaction costs (for example, no costs to write and enforce a contract), then by Coase's theorem a user will be indifferent between making or buying a solution to its problem. But in the real world there are transaction costs, and so a user will generally prefer to either make or buy. Which, from the point of view of minimizing overall costs of obtaining a problem solution, is the better choice under any given circumstances? +={Coase, R.} + +Let V,{ij}, be the value of a solution to problem j for user i. Let N,{j}, be the number of users having problem j. Let Wh,{j}, be the cost of solving problem j, where W = hourly wage and h,{j}, = hours required to solve it. Let P,{j}, be the price charged by a manufacturer for a solution to problem j. Let T be fixed or "setup" transaction costs, such as writing a general contract for buyers of a solution to problem j. Let t be variable or "frictional" transaction costs, such as tailoring the general contract to a specific customer. + +To explore this problem we make two assumptions. First, we assume that a user firm knows its own problems and the value of a solution to itself, V,{ij},. Second, we assume that a manufacturer knows the number of users having each problem, N,{j},, and the value of solutions for each problem for all users, V,{ij},. + +These assumptions are in line with real-world incentives of users and manufacturers, although information stickiness generally prevents firms from getting full information. That is, users have a high incentive to know their own problems and the value to them of a solution. Manufacturers, in turn, have an incentive to invest in understanding the nature of problems faced by users in the target market, the number of users affected, and the value that the users would attach to getting a solution in order to determine the potential profitability of markets from their point of view. +={Sticky information:innovation and} + +We first consider the user's payoff for solving a problem for itself. A user has no transaction costs in dealing with itself, so a user's payoff for solving problem j will be V,{ij}, - Wh,{j},. Therefore, a user will buy a solution from an upstream manufacturer rather than develop one for itself if and only if P ≤ Wh,{j},. + +Next we consider payoffs to a manufacturer for solving problem j. In this case, transaction costs such as those discussed in earlier sections will be encountered. With respect to transaction costs assume first that t = 0 but T > 0. Then, the manufacturer's payoff for solving problem j will be V,{ij}, - Wh,{j},, which needs to be positive in order for the manufacturer to find innovation attractive: + +N,{j}, P,{j}, - Wh,{j}, - T > 0. + +But, as we saw, P,{j}, ≤ Wh,{j}, if the user is to buy, so we may substitute Wh,{j}, for P,{j}, in our inequality. Thus we obtain the following inequality as a condition for the user to buy: + +N,{j}, (Wh,{j},) - Wh,{j}, - T > 0, + +or + +N,{j}, > (T / Wh,{j},) + 1. + +In other words, Baldwin and I find that the absolute lower bound on N is greater than 1. This means that a single user will always prefer to solve a unique problem j for itself (except in Coase's world, where T = 0, and the user will be indifferent). If every problem is unique to a single user, users will never choose to call on upstream manufacturers for solutions. +={Coase, R.} + +Now assume that T = 0 but t > 0. Then the condition for the user to buy rather than to innovate for itself becomes + +N,{j}, (Wh,{j}, - t) - Wh,{j}, > 0, + +or equivalently (provided Wh,{j}, > t) + +N,{j}, > Wh,{j}, / (Wh,{j}, - t) > 1. + +Again, users will not call on upstream manufacturers to solve problems unique to one user. + +The findings from the simplified model, then, are the following: Problems unique to one user will always be solved efficiently by users hiring designers to work for them in house. In contrast, problems affecting more than a moderate number of users, n, which is a function of the transaction costs, will be efficiently solved by the manufacturer hiring designers to develop the needed new product or service and then selling that solution to all users affected by the problem. However, given sufficient levels of T and/or of t, problems affecting more than one but fewer than n users will not be solved by a manufacturer, and so there will be a market failure: Assuming an institutional framework consisting only of independent users and manufacturers, multiple users will have to solve the same problem independently. + +As illustration, suppose that t = 0.25Wh,{j}, and T = 10Wh,{j},. Then, combining the two expressions and solving for n yields + +n = (11Wh,{j}, /0.75Wh,{j},) = 14.66. + +The condition for the user to buy the innovation rather than innovate itself becomes N,{j}, ≥ 15. For a number of users less than 15 but greater than 1, there will be a wasteful multiplication of user effort: several users will invest in developing the same innovation independently. + +In a world that consists entirely of manufacturers and of users that do not share the innovations they develop, the type of wasteful duplicative innovation investment by users just described probably will occur often. As was discussed earlier in this chapter, and as was illustrated by Slaughter's study, substantial transaction costs might well be the norm. In addition, low numbers of users having the same need---situations where N,{j}, is low---might also be the norm in the case of functionally novel innovations. Functionally novel innovations, as I will show later, tend to be developed by lead users, and lead users are by definition at the leading (low-N,{j},) edge of markets. +={Slaughter, S.;Stressed-skin panels} + +When the type of market failure discussed above does occur, users will have an incentive to search for institutional forms with a lower T and/or a lower t than is associated with assignment of the problem to an upstream manufacturer. One such institutional form involves interdependent innovation development among multiple users (for example, the institutional form used successfully in open source software projects that I will discuss in chapter 7). Baldwin and Clark (2003) show how this form can work to solve the problem of wasteful user innovation investments that were identified in our model. They show that, given modularity in the software's architecture, it will pay for users participating in open source software projects to generate and freely reveal some components of the needed innovation, benefiting from the fact that other users are likely to develop and reveal other components of that innovation. At the limit, the wasteful duplication of users' innovative efforts noted above will be eliminated; each innovation component will have been developed by only one user, but will be shared by many. +={Clark, K.} + +!_ Benefiting from the Innovation Process +={Innovation process+4;Users:innovation process and+4} + +Some individual users (not user firms) may decide to innovate for themselves rather than buy even if a traditional accounting evaluation would show that they had made a major investment in time and materials for an apparently minor reward in product functionality. The reason is that individual users may gain major rewards from the process of innovating, in addition to rewards from the product being developed. Make-or-buy evaluations typically include factors such as the time and materials that must be invested to develop a solution. These costs are then compared with the likely benefits produced by the project's "output"---the new product or service created---to determine whether the project is worth doing. This was the type of comparison made by Slaughter, for example, in assessing whether it would be better for the users to make or to buy the stressed-skin panel innovations in her sample. However, in the case of individual user-innovators, this type of assessment can provide too narrow a perspective on what actually constitutes valuable project output. Specifically, there is evidence that individuals sometimes greatly prize benefits derived from their participation in the process of innovation. The process, they say, can produce learning and enjoyment that is of high value to them. +={Slaughter, S.;Stressed-skin panels} + +In the introductory chapter, I pointed out that some recreational activities, such as solving crossword puzzles, are clearly engaged in for process rewards only: very few individuals value the end "product" of a completed puzzle. But process rewards have also been found to be important for innovators that are producing outputs that they and others do value (Hertel, Niedner, and Herrmann 2003; Lakhani and Wolf 2005). Lakhani and Wolf studied a sample of individuals (n = 684, response rate = 34 percent) who had written new software code and contributed it to an open source project. They asked the programmers to list their three most important reasons for doing this. Fifty-eight percent of respondents said that an important motivation for writing their code was that they had a work need (33 percent), or a non-work need (30 percent) or both (5 percent) for the code itself. That is, they valued the project's "output" as this is traditionally viewed. However, 45 percent said that one of their top three reasons for writing code was intellectual stimulation, and 41 percent said one of their top three reasons was to improve their own programming skills (Lakhani and Wolf 2005, table 6). Elaborating on these responses, 61 percent of respondents said that their participation in the open source project was their most creative experience or was as creative as their most creative experience. Also, more than 60 percent said that "if there were one more hour in the day" they would always or often dedicate it to programming. +={Herrmann, S.;Hertel, G.;Lakhani, K.;Niedner, S.;Wolf, B.} + +Csikszentmihalyi (1975, 1990, 1996) systematically studied the characteristics of tasks that individuals find intrinsically rewarding, such as rock climbing. He found that a level of challenge somewhere between boredom and fear is important, and also that the experience of "flow" gained when one is fully engaged in a task is intrinsically rewarding. Amabile (1996) proposes that intrinsic motivation is a key determining factor in creativity. She defines a creative task as one that is heuristic in nature (with no predetermined path to solution), and defines a creative outcome as a novel and appropriate (useful) response to such a task. Both conditions certainly can apply to the task of developing a product or a service. +={Amiable, T.;Csikszentmihalyi, M.} + +In sum, to the extent that individual user-innovators benefit from the process of developing or modifying a product as well as from the product actually developed, they are likely to innovate even when the benefits expected from the product itself are relatively low. (Employees of a firm may wish to experience this type of intrinsic reward in their work as well, but managers and commercial constraints may give them less of an opportunity to do so. Indeed, "control over my own work" is cited by many programmers as a reason that they enjoy creating code as volunteers on open source projects more than they enjoy coding for their employers for pay.) + +1~ 5 Users' Low-Cost Innovation Niches +={Users:innovation and+50|low-cost innovation niches of+50} + +!_ The Problem-Solving Process +={Trial-and-error problem solving+11} + +Product and service development is at its core a problem-solving process. Research into the nature of problem solving shows it to consist of trial and error, directed by some amount of insight as to the direction in which a solution might lie (Baron 1988). Trial and error has also been found to be prominent in the problem-solving work of product and process development (Marples 1961; Allen 1966; von Hippel and Tyre 1995; Thomke 1998, 2003). +={Allen, T.;Baron, J.;Marples, D.;Thomke, S.;Tyre, M.;von Hippel, E.} + +Trial-and-error problem solving can be envisioned as a four-phase cycle that is typically repeated many times during the development of a new product or service. Problem solvers first conceive of a problem and a related solution based on their best knowledge and insight. Next, they build a physical or virtual prototype of both the possible solution they have envisioned and the intended use environment. Third, they run the experiment---that is, they operate their prototyped solution and see what happens. Fourth and finally, they analyze the result to understand what happened in the trial and to assess the "error information" that they gained. (In the trial-and-error formulation of the learning process, error is the new information or learning derived from an experiment by an experimenter: it is the aspect(s) of the outcome that the experimenter did not predict.) Developers then use the new learning to modify and improve the solution under development before building and running a new trial (figure 5.1). + +Trial-and-error experimentation can be informal or formal; the underlying principles are the same. As an example on the informal side, consider a user experiencing a need and then developing what eventually turns out to be a new product: the skateboard. In phase 1 of the cycle, the user combines need and solution information into a product idea: "I am bored with roller skating. How can I get down this hill in a more exciting way? Maybe it would be fun to put my skates' wheels under a board and ride down on that." In phase 2, the user builds a prototype by taking his skates apart and hammering the wheels onto the underside of a board. In phase 3, he runs the experiment by climbing onto the board and heading down the hill. In phase 4, he picks himself up from an inaugural crash and thinks about the error information he has gained: "It is harder to stay on this thing than I thought. What went wrong, and how can I improve things before my next run down the hill?" + +% (2) BUILD +% (3) RUN +% (4) ANALYZE +% (1) DESIGN +% DONE +% DESIGN REQUIREMENTS +% DESIGN ACTIVITY +% Changes in +% exogenous +% information +% Use learning from previous +% cycle(s) to conceive and design +% an improved solution. +% . +% Develop models and/or build +% prototypes to be used in +% running experiments. +% . +% Test model/prototype in real +% or simulated use environment. +% . +% Analyze findings from +% previous step and learn. +% . +% Figure 5.1 +% The trial-and-error cycle of product development. + +{di_evh_f5-1.png}image + +!_ Figure 5.1 +The trial-and-error cycle of product development. + +As an example of more formal experimentation, consider a product-development engineer working in a laboratory to improve the performance of an automobile engine. In phase 1, need and solution information are again combined into a design idea: "I need to improve engine fuel efficiency. I think that a more even expansion of the flame in the cylinders is a possible solution direction, and I think that changing the shape of the spark plug electrodes will improve this." In phase 2, the engineer builds a spark plug incorporating her new idea. In phase 3, she inserts the new spark plug into a lab test engine equipped with the elaborate instrumentation needed to measure the very rapid propagation of a flame in the cylinders of an auto engine and runs the test. In phase 4, she feeds the data into a computer and analyzes the results. She asks: "Did the change in spark plug design change the flame front as expected? Did it change fuel efficiency? How can I use what I have learned from this trial to improve things for the next one?" + +In addition to the difference in formality, there is another important difference between these two examples. In the first example, the skateboard user was conducting trial and error with a full prototype of the intended product in a real use environment---his own. In the second example, the experimental spark plug might have been a full prototype of a real product, but it probably consisted only of that portion of a real spark plug that actually extends into a combustion chamber. Also, only /{aspects}/ of the use environment were involved in the lab experiment. That is, the test engine was not a real auto engine, and it was not being operated in a real car traveling over real roads. + +Experimentation is often carried out using simplified versions---models--- of the product being designed and its intended use environment. These models can be physical (as in the example just given), or they can be virtual (as in the case of thought experiments or computer simulations). In a computer simulation, both the product and the environment are represented in digital form, and their interaction is tested entirely within a computer. For example, one might make a digital model of an automobile and a crash barrier. One could then use a computer to simulate the crash of the model car into the model barrier. One would analyze the results by calculating the effects of that crash on the structure of the car. + +The value of using models rather than the real thing in experimentation is twofold. First, it can reduce the cost of an experiment---it can be much cheaper to crash a simulated BMW than a real one. Second, it can make experimental results clearer by making them simpler or otherwise different than real life. If one is trying to test the effect of a small change on car safety, for example, it can be helpful to remove everything not related to that change from the experiment. For example, if one is testing the way a particular wheel suspension structure deforms in a crash, one does not have to know (or spend time computing) how a taillight lens will react in the crash. Also, in a real crash things happen only once and happen very fast. In a virtual crash executed by computer, on the other hand, one can repeat the crash sequence over and over, and can stretch time out or compress it exactly as one likes to better understand what is happening (Thomke 2003). +={Thomke, S.+2} + +Users and others experimenting with real prototypes in real use environments can also modify things to make tests simpler and clearer. A restaurant chef, for example, can make slight variations in just a small part of a recipe each time a customer calls for it, in order to better understand what is happening and make improvements. Similarly, a process machine user can experiment with only a small portion of machine functioning over and over to test changes and detect errors. + +Sometimes designers will test a real experimental object in a real experimental context only after experimenting with several generations of models that isolate different aspects of the real and/or encompass increasing amounts of the complexity of the real. Developers of pharmaceuticals, for example, might begin by testing a candidate drug molecule against just the purified enzyme or receptor it is intended to affect, then test it again and again against successively more complex models of the human organism (tissue cultures, animal models, etc.) before finally seeking to test its effect on real human patients during clinical trials (Thomke, von Hippel, and Franke 1998). +={Franke, N.;von Hippel, E.} + +!_ Sticky Information +={Sticky information+11:innovation and+11} + +Any experiment is only as accurate as the information that is used as inputs. If inputs are not accurate, outcomes will not be accurate: "garbage in, garbage out." + +The goal of product development and service development is to create a solution that will satisfy needs of real users within real contexts of use. The more complete and accurate the information on these factors, the higher the fidelity of the models being tested. If information could be transferred costlessly from place to place, the quality of the information available to problem solvers would or could be independent of location. But if information is costly to transfer, things are different. User-innovators, for example, will then have better information about their needs and their use context than will manufacturers. After all, they create and live in that type of information in full fidelity! Manufacturer-innovators, on the other hand, must transfer that information to themselves at some cost, and are unlikely to be able to obtain it in full fidelity at any cost. However, manufacturers might well have a higher-fidelity model of the solution types in which they specialize than users have. +={Information asymmetries+31;Local information+35;Users:information asymmetries of+31} + +It turns out that much information needed by product and service designers is "sticky." In any particular instance, the stickiness of a unit of information is defined as the incremental expenditure required to transfer that unit of information to a specified location in a form usable by a specified information seeker. When this expenditure is low, information stickiness is low; when it is high, stickiness is high (von Hippel 1994). That information is often sticky has been shown by studying the costs of transferring information regarding fully developed process technology from one location to another with full cooperation on both sides. Even under these favorable conditions, costs have been found to be high---leading one to conclude that the costs of transferring information during product and service development are likely to be at least as high. Teece (1977), for example, studied 26 international technology-transfer projects and found that the costs of information transfer ranged from 2 percent to 59 percent of total project costs and averaged 19 percent---a considerable fraction. Mansfield et al. (1982) also studied a number of projects involving technology transfer to overseas plants, and also found technology-transfer costs averaging about 20 percent of total project costs. Winter and Suzlanski (2001) explored replication of well-known organizational routines at new sites and found the process difficult and costly. +={Mansfield, E.;Suzlanski, G.;Teece, D.;Winter, S.;von Hippel, E.+63} + +Why is information transfer so costly? The term "stickiness" refers only to a consequence, not to a cause. Information stickiness can result from causes ranging from attributes of the information itself to access fees charged by an information owner. Consider tacitness---a lack of explicit encoding. Polanyi (1958, pp. 49--53) noted that many human skills are tacit because "the aim of a skilful performance is achieved by the observance of a set of rules which are not known as such to the person following them." For example, swimmers are probably not aware of the rules they employ to keep afloat (e.g., in exhaling, they do not completely empty their lungs), nor are medical experts generally aware of the rules they follow in order to reach a diagnosis of a disease. "Indeed," Polanyi says, "even in modern industries the indefinable knowledge is still an essential part of technology." Information that is tacit is also sticky because it cannot be transferred at low cost. As Polanyi points out, "an art which cannot be specified in detail cannot be transmitted by prescription, since no prescription for it exists. It can be passed on only by example from master to apprentice. . . ." Apprenticeship is a relatively costly mode of transfer. +={Polanyi, M.} + +Another cause of information stickiness is related to absorptive capacity. A firm's or an individual's capacity to absorb new, outside technical information is largely a function of prior related knowledge (Cohen and Levinthal 1990). Thus, a firm knowing nothing about circuit design but seeking to apply an advanced technique for circuit engineering may be unable to apply it without first learning more basic information. The stickiness of the information about the advanced technique for the firm in question is therefore higher than it would be for a firm that already knows that basic information. (Recall that the stickiness of a unit of information is defined as the incremental expenditure required to transfer a unit of information to a specified site in a form usable by a /{specific}/ information seeker.) +={Cohen, W.;Levinthal, D.} + +Total information stickiness associated with solving a specific problem is also determined by the amount of information required by a problem solver. Sometimes a great deal is required, for two reasons. First, as Rosenberg (1976, 1982) and Nelson (1982, 1990) point out, much technological knowledge deals with the specific and the particular. Second, one does not know in advance of problem solving which particular items will be important. +={Nelson, R.;Rosenberg, N.} + +An example from a study by von Hippel and Tyre (1995) illustrates both points nicely. Tyre and I studied how and why novel production machines failed when they were first introduced into factory use. One of the machines studied was an automated machine used by a computer manufacturing firm to place large integrated circuits onto computer circuit boards. The user firm had asked an outside group to develop what was needed, and that group had developed and delivered a robot arm coupled to a machine-vision system. The arm, guided by the vision system, was designed to pick up integrated circuits and place them on a circuit board at precise locations. +={Tyre, M.+4} + +Upon being installed in the factory, the new component-placing machine failed many times as a result of its developers' lack of some bit of information about the need or use environment. For example, one day machine operators reported that the machine was malfunctioning---again---and they did not know why. Investigation traced the problem to the machine-vision system. This system used a small TV camera to locate specific metalized patterns on the surface of each circuit board being processed. To function, the system needed to "see" these metalized patterns clearly against the background color of the board's surface. The vision system developed by the machine-development group had functioned properly in their lab when tested with sample boards from the user factory. However, the field investigation showed that in the factory it failed when boards that were light yellow in color were being processed. + +The fact that some of the boards being processed were sometimes light yellow was a surprise to the machine developers. The factory personnel who had set the specifications for the machine knew that the boards they processed varied in color; however, they had not volunteered the information, because they did not know that the developers would be interested. Early in the machine-development process, they had simply provided samples of boards used in the factory to the machine-development group. And, as it happened, these samples were green. On the basis of the samples, developers had then (implicitly) assumed that all boards processed in the field were green. It had not occurred to them to ask users "How much variation in board color do you generally experience?" Thus, they had designed the vision system to work successfully with boards that were green. + +In the case of this field failure, the item of information needed to understand or predict this problem was known to the users and could easily have been provided to the machine developers---had the developers thought to ask and/or had users thought to volunteer it. But in the actual evolution of events this was not done. The important point is that this omission was not due to poor practice; it was due to the huge amount of information about the need and the use environment that was /{potentially}/ relevant to problem solvers. Note that the use environment and the novel machine contain many highly specific attributes that could potentially interact to cause field problems. Note also that the property of the board causing this particular type of failure was very narrow and specific. That is, the problem was not that the board had physical properties, nor that it had a color. The problem was precisely that some boards were yellow, and a particular shade of yellow at that. Since a circuit board, like most other components, has many attributes in addition to color (shape, size, weight, chemical composition, resonant frequency, dielectric constant, flexibility, and so on), it is likely that problem solvers seeking to learn everything they might need to know about the use and the use environment would have to collect a very large (perhaps unfeasibly large) number of very specific items of information. + +Next, consider that the information items the problem solver will actually need (of the many that exist) are contingent on the solution path taken by the engineer designing the product. In the example, the problem caused by the yellow color of the circuit board was contingent on the design solution to the component-placing problem selected by the engineer during the development process. That is, the color of the circuit boards in the user factory became an item the problem solvers needed to know only when engineers, in the course of their development of the component placer, decided to use a vision system in the component-placing machine they were designing, and the fact that the boards were yellow became relevant only when the engineers chose a video camera and lighting that could not distinguish the metalized patterns on the board against a yellow background. Clearly, it can be costly to transfer the many items of information that a product or service developer might require---even if each individual item has low stickiness---from one site to another. + +!_ How Information Asymmetries Affect User Innovation vs. Manufacturer Innovation +={Manufacturers:information asymmetries of+11|innovation and+25} + +An important consequence of information stickiness is that it results in information asymmetries that cannot be erased easily or cheaply. Different users and manufacturers will have different stocks of information, and may find it costly to acquire information they need but do not have. As a result, each innovator will tend to develop innovations that draw on the sticky information it already has, because that is the cheapest course of action (Arora and Gambardella 1994; von Hippel 1994). In the specific case of product development, this means that users as a class will tend to develop innovations that draw heavily on their own information about need and context of use. Similarly, manufacturers as a class will tend to develop innovations that draw heavily on the types of solution information in which they specialize. +={Arora, A.;Gambardella, A.} + +This effect is visible in studies of innovation. Riggs and von Hippel (1994) studied the types of innovations made by users and manufacturers that improved the functioning of two major types of scientific instruments. +={Riggs, W.;Scientific instruments+9;Sticky information:and scientific instruments+1} + +They found that users tended to develop innovations that enabled the instruments to do qualitatively new types of things for the first time. In contrast, manufacturers tended to develop innovations that enabled users to do the same things they had been doing, but to do them more conveniently or reliably (table 5.1). For example, users were the first to modify the instruments to enable them to image and analyze magnetic domains at sub-microscopic dimensions. In contrast, manufacturers were the first to computerize instrument adjustments to improve ease of operation. Sensitivity, resolution, and accuracy improvements fall somewhere in the middle, as the data show. These types of improvements can be driven by users seeking to do specific new things, or by manufacturers applying their technical expertise to improve the products along known dimensions of merit, such as accuracy. + +!_ Table 5.1 +Users tend to develop innovations that deliver novel functions. + +% Innovation developed by + +table{~h c4; 60; 15; 15; 10; + +Type of improvement provided by innovation +User +Manufacturer +n + +New functional capability +82% +18% +17 + +Sensitivity, resolution, or accuracy improvement +48% +52% +23 + +Convenience or reliability improvement +13% +87% +24 + +Total sample size +~ +~ +64 + +}table + +Source: Riggs and von Hippel 1994, table 3. + +The variation in locus of innovation for different types of innovations, seen in table 5.1 does fit our expectations from the point of view of sticky information considerations. But these findings are not controlled for profitability, and so it might be that profits for new functional capabilities are systematically smaller than profits obtainable from improvements made to existing functionality. If so, this could also explain the patterns seen. + +Ogawa (1998) took the next necessary step and conducted an empirical study that did control for profitability of innovation opportunities. He too found the sticky-information effect---this time visible in the division of labor /{within}/ product-development projects. He studied patterns in the development of a sample of 24 inventory-management innovations. All were jointly developed by a Japanese equipment manufacturer, NEC, and by a user firm, Seven-Eleven Japan (SEJ). SEJ, the leading convenience-store company in Japan, is known for its inventory management. Using innovative methods and equipment, it is able to turn over its inventory as many as 30 times a year, versus 12 times a year for competitors (Kotabe 1995). An example of such an innovation jointly developed by SEJ and NEC is just-in-time reordering, for which SEJ created the procedures and NEC the hand-held equipment to aid store clerks in carrying out their newly designed tasks. Equipment sales to SEJ are important to NEC: SEJ has thousands of stores in Japan. +={Kotabe, M.;Ogawa, S.+1} + +The 24 innovations studied by Ogawa varied in the amount of sticky need information each required from users (having to do with store inventory- management practices) and the amount of sticky solution information required from manufacturers (having to do with new equipment technologies). Each also varied in terms of the profit expectations of both user and manufacturer. Ogawa determined how much of the design for each was done by the user firm and how much by the manufacturer firm. Controlling for profit expectations, he found that increases in the stickiness of user information were associated with a significant increase in the amount of need-related design undertaken by the user (Kendall correlation coefficient = 0.5784, P < 0.01). Conversely he found that increased stickiness of technology-related information was associated in a significant reduction in the amount of technology design done by the user (Kendall correlation coefficients = 0.4789, P < 0.05). In other words, need-intensive tasks within product-development projects will tend to be done by users, while solution-intensive ones will tend to be done by manufacturers. + +!_ Low-Cost Innovation Niches + +Just as there are information asymmetries between users and manufacturers as classes, there are also information asymmetries among individual user firms and individuals, and among individual manufacturers as well. A study of mountain biking by Lüthje, Herstatt, and von Hippel (2002) shows that information held locally by individual user-innovators strongly affects the type of innovations they develop. Mountain biking involves bicycling on rough terrain such as mountain trails. It may also involve various other extreme conditions, such as bicycling on snow and ice and in the dark (van der Plas and Kelly 1998). +={Kelly, C.;Lüthje, C.;Van der Plas, R.;Herstatt, C.+12;Lüthje, C.+12;Mountain biking+12;Users:innovate-or-buy decisions by+12} + +Mountain biking began in the early 1970s when some young cyclists started to use their bicycles off-road. Existing commercial bikes were not suited to this type of rough use, so early users put together their own bikes. They used strong bike frames, balloon tires, and powerful drum brakes designed for motorcycles. They called their creations "clunkers" (Penning 1998; Buenstorf 2002). +={Buenstorf, G.;Penning, C.} + +Commercial manufacture of mountain bikes began about 1975, when some of the early users of mountain bikes began to also build bikes for others. A tiny cottage industry developed, and by 1976 a half-dozen small assemblers existed in Marin County, California. In 1982, a small firm named Specialized, an importer of bikes and bike parts that supplied parts to the Marin County mountain bike assemblers, took the next step and brought the first mass-produced mountain bike to market. Major bike manufacturers then followed and started to produce mountain bikes and sell them at regular bike shops across the United States. By the mid 1980s the mountain bike was fully integrated in the mainstream bike market, and it has since grown to significant size. In 2000, about $58 billion (65 percent) of total retail sales in the US bicycle market were generated in the mountain bike category (National Sporting Goods Association 2002). + +Mountain biking enthusiasts did not stop their innovation activities after the introduction of commercially manufactured mountain bikes. They kept pushing mountain biking into more extreme environmental conditions, and they continued to develop new sports techniques involving mountain bikes (/{Mountain Bike}/ 1996). Thus, some began jumping their bikes from house roofs and water towers and developing other forms of acrobatics. As they did so, they steadily discovered needs for improvements to their equipment. Many responded by developing and building the improvements they needed for themselves. + +Our sample of mountain bikers came from the area that bikers call the North Shore of the Americas, ranging from British Columbia to Washington State. Expert mountain bikers told us that this was a current "hot spot" where new riding styles were being developed and where the sport was being pushed toward new limits. We used a questionnaire to collect data from members of North Shore mountain biking clubs and from contributors to the mailing lists of two North Shore online mountain biking forums. Information was obtained from 291 mountain bikers. Nineteen percent of bikers responding to the questionnaire reported developing and building a new or modified item of mountain biking equipment for their own use. The innovations users developed were appropriate to the needs associated with their own riding specialties and were heterogeneous in function. +={Custom products:heterogeneity of user needs and+6;User need+6;Users:needs of+6} + +We asked mountain bikers who had innovated about the sources of the need and solution information they had used in their problem solving. In 84.5 percent of the cases respondents strongly agreed with the statement that their need information came from /{personal needs they had frequently experienced}/ rather than from information about the needs of others. With respect to solution information, most strongly agreed with the statement that /{they used solution information they already had}/, rather than learning new solution information in order to develop their biking equipment innovation (table 5.2). + +!_ Table 5.2 +Innovators tended to use solution information they already had "in stock" to develop their ideas. Tabulated here are innovators' answers to the question "How did you obtain the information needed to develop your solution?" +={Lüthje, C.} + +table{~h c4; 55; 15; 15; 15; + +. +Mean +Median +Very high or high agreement + +"I had it due to my professional background." +4.22 +4 +47.5% + +"I had it from mountain biking or another hobby." +4.56 +5 +52.4% + +"I learned it to develop this idea." +2.11 +2 +16% + +}table + +Source: Lüthje et al. 2003. N = 61. Responses were rated on a seven-point scale, with 1 = not at all true and 7 = very true. + +!_ Discussion + +To the extent that users have heterogeneous and sticky need and solution information, they will have heterogeneous low-cost innovation niches. Users can be sophisticated developers within those niches, despite their reliance on their own need information and solution information that they already have in stock. On the need side, recall that user-innovators generally are lead users and generally are expert in the field or activity giving rise to their needs. With respect to solution information, user firms have specialties that may be at a world-class level. Individual users can also have high levels of solution expertise. After all, they are students or employees during the day, with training and jobs ranging from aerospace engineering to orthopedic surgery. Thus, mountain bikers might not want to /{learn}/ orthopedic surgery to improve their biking equipment, but if they already /{are}/ expert in that field they could easily draw on what they know for relevant solution information. Consider the following example drawn from the study of mountain biking discussed earlier: + +I'm a human movement scientist working in ergonomics and biomechanics. I used my medical experience for my design. I calculated a frame design suitable for different riding conditions (downhill, climb). I did a CAD frame design on Catia and conceived a spring or air coil that can be set to two different heights. I plan to build the bike next year. + +Users' low-cost innovation niches can be narrow because their development "labs" for such experimentation often consist largely of their individual use environment and customary activities. Consider, for example, the low-cost innovation niches of individual mountain bikers. Serious mountain bikers generally specialize in a particular type of mountain biking activity. Repeated specialized play and practice leads to improvement in related specialized skills. This, in turn, may lead to a discovery of a problem in existing mountain biking equipment and a responsive innovation. Thus, an innovating user in our mountain biking study reported the following: "When doing tricks that require me to take my feet off the bike pedals in mid-air, the pedals often spin, making it hard to put my feet back onto them accurately before landing." Such a problem is encountered only when a user has gained a high level of skill in the very specific specialty of jumping and performing tricks in mid-air. Once the problem has been encountered and recognized, however, the skilled specialist user can re-evoke the same problematic conditions at will during ordinary practice. The result is the creation of a low-cost laboratory for testing and comparing different solutions to that problem. The user is benefiting from enjoyment of his chosen activity and is developing something new via learning by doing at the same time. + +In sharp contrast, if that same user decides to stray outside his chosen activity in order to develop innovations of interest to others with needs that are different from his own, the cost properly assignable to innovation will rise. To gain an equivalent-quality context for innovation, such a user must invest in developing personal skill related to the new innovation topic. Only in this way will he gain an equivalently deep understanding of the problems relevant to practitioners of that skill, and acquire a "field laboratory" appropriate to developing and testing possible solutions to those new problems. + +Of course, these same considerations apply to user firms as well as to individual users. A firm that is in the business of polishing marble floors is a user of marble polishing equipment and techniques. It will have a low-cost learning laboratory with respect to improvements in these because it can conduct trial-and-error learning in that "lab" during the course of its customary business activities. Innovation costs can be very low because innovation activities are paid for in part by rewards unrelated to the novel equipment or technique being developed. The firm is polishing while innovating---and is getting paid for that work (Foray 2004). The low cost innovation niche of the marble polishing firm may be narrow. For example, it is unlikely to have any special advantage with respect to innovations in the polishing of wood floors, which requires different equipment and techniques. +={Foray, D.} + +1~ 6 Why Users Often Freely Reveal Their Innovations +={Free revealing of innovation information:evidence of+10|users and+50;Information commons+13;Intellectual property rights:free revealing and+50;Users:free revealing by+50} + +Products, services, and processes developed by users become more valuable to society if they are somehow diffused to others that can also benefit from them. If user innovations are not diffused, multiple users with very similar needs will have to invest to (re)develop very similar innovations, which would be a poor use of resources from the social welfare point of view. Empirical research shows that new and modified products developed by users often do diffuse widely---and they do this by an unexpected means: user-innovators themselves often voluntarily publicly reveal what they have developed for all to examine, imitate, or modify without any payment to the innovator. + +In this chapter, I first review evidence that free revealing is frequent. Next, I discuss the case for free revealing from an innovators' perspective, and argue that it often can be the best /{practical}/ route for users to increase profit from their innovations. Finally, I discuss the implications of free revealing for innovation theory. + +!_ Evidence of Free Revealing +={Free revealing of innovation information} + +When my colleagues and I say that an innovator "freely reveals" proprietary information, we mean that all intellectual property rights to that information are voluntarily given up by that innovator and all parties are given equal access to it---the information becomes a public good (Harhoff, Henkel, and von Hippel 2003). For example, placement of non-patented information in a publicly accessible site such as a journal or public website would be free revealing as we define it. Free revealing as so defined does not mean that recipients necessarily acquire and utilize the revealed information at no cost to themselves. Recipients may, for example, have to pay for a subscription to a journal or for a field trip to an innovation site to acquire the information being freely revealed. Also, some may have to obtain complementary information or other assets in order to fully understand that information or put it to use. However, if the possessor of the information does not profit from any such expenditures made by its adopters, the information itself is still freely revealed, according to our definition. This definition of free revealing is rather extreme in that revealing with some small constraints, as is sometimes done, would achieve largely the same economic effect. Still, it is useful to discover that innovations are often freely revealed even in terms of this stringent definition. +={Harhoff, D.;Henkel, J.} + +Routine and intentional free revealing among profit-seeking firms was first described by Allen (1983). He noticed the phenomenon, which he called collective invention, in historical records from the nineteenth-century English iron industry. In that industry, ore was processed into iron by means of large furnaces heated to very high temperatures. Two attributes of the furnaces used had been steadily improved during the period 1850--1875: chimney height had been increased and the temperature of the combustion air pumped into the furnace during operation had been raised. These two technical changes significantly and progressively improved the energy efficiency of iron production---a very important matter for producers. Allen noted the surprising fact that employees of competing firms publicly revealed information on their furnace design improvements and related performance data in meetings of professional societies and in published material. +={Allen, R.;Free revealing of innovation information:collective invention and} + +After Allen's initial observation, a number of other authors searched for free revealing among profit-seeking firms and frequently found it. Nuvolari (2004) studied a topic and time similar to that studied by Allen and found a similar pattern of free revealing in the case of improvements made to steam engines used to pump out mines in the 1800s. At that time, mining activities were severely hampered by water that tended to flood into mines of any depth, and so an early and important application of steam engines was for the removal of water from mines. Nuvolari explored the technical history of steam engines used to drain copper and tin mines in England's Cornwall District. Here, patented steam engines developed by James Watt were widely deployed in the 1700s. After the expiration of the Watt patent, an engineer named Richard Trevithick developed a new type of high-pressure engine in 1812. Instead of patenting his invention, he made his design available to all for use without charge. The engine soon became the basic design used in Cornwall. Many mine engineers improved Trevithick's design further and published what they had done in a monthly journal, /{Leans Engine Reporter}/. This journal had been founded by a group of mine managers with the explicit intention of aiding the rapid diffusion of best practices among these competing firms. +={Nuvolari, A.;Trevithick, R.;Watt, J.} + +Free revealing has also been documented in the case of more recent industrial equipment innovations developed by users. Lim (2000) reports that IBM was first to develop a process to manufacture semiconductors that incorporated copper interconnections among circuit elements instead of the traditionally used aluminum ones. After some delay, IBM revealed increasing amounts of proprietary process information to rival users and to equipment suppliers. Widespread free revealing was also found in the case of automated clinical chemistry analyzers developed by the Technicon Corporation for use in medical diagnosis. After commercial introduction of the basic analyzer, many users developed major improvements to both the analyzer and to the clinical tests processed on that equipment. These users, generally medical personnel, freely revealed their improvements via publication, and at company-sponsored seminars (von Hippel and Finkelstein 1979). Mishina (1989) found free, or at least selective no-cost revealing in the lithographic equipment industry. He reported that innovating equipment users would sometimes reveal what they had done to machine manufacturers. Morrison, Roberts, and I, in our study of library IT search software (discussed in chapter 2 above), found that innovating users freely revealed 56 percent of the software modifications they had developed. Reasons given for not revealing the remainder had nothing to do with considerations of intellectual property protection. Rather, users who did not share said they had no convenient users' group forum for doing so, and/or they thought their innovation was too specialized to be of interest to others. +={IBM;Finkelstein, S.;Lim, K.;Mishina, K.;Morrison, Pamela;Roberts, J.;Technicon Corporation;Free revealing of innovation information:and library information search system;Lead users:library information search system and;Library information search system} + +Innovating users of sports equipment also have been found to freely reveal their new products and product modifications. Franke and Shah (2003), in their study of four communities of serious sports enthusiasts described in chapter 2, found that innovating users uniformly agreed with the statement that they shared their innovation with their entire community free of charge---and strongly disagreed with the statement that they sold their innovations (p < 0.001, t-test for dependent samples). Interestingly, two of the four communities they studied engaged in activities involving significant competition among community members. Innovators in these two communities reported high but significantly less willingness to share, as one might expect in view of the potentially higher level of competitive loss free revealing would entail. +={Franke, N.;Shah, S.;Free revealing of innovation information:and sports equipment;Sporting equipment:free revealing and} + +Contributors to the many open source software projects extant (more than 83,000 were listed on SourceForge.net in 2004) also routinely make the new code they have written public. Well-known open source software products include the Linux operating system software and the Apache web server computer software. Some conditions are attached to open source code licensing to ensure that the code remains available to all as an information commons. Because of these added protections, open source code does not quite fit the definition of free revealing given earlier in this chapter. (The licensing of open source software will be discussed in detail in chapter 7.) +={Linux+1;Apache web server software;Free revealing of innovation information:and open source software+1;Open source software:free revealing and+1} + +Henkel (2003) showed that free revealing is sometimes practiced by directly competing manufacturers. He studied manufacturers that were competitors and that had all built improvements and extensions to a type of software known as embedded Linux. (Such software is "embedded in" and used to operate equipment ranging from cameras to chemical plants.) He found that these manufacturers freely revealed improvements to the common software platform that they all shared and, with a lag, also revealed much of the equipment-specific code they had written. +={Henkel, J.;Free revealing of innovation information:manufacturers and;Manufacturers:free revealing and} + +!_ The Practical Case for Free Revealing +={Free revealing of innovation information:case for+2} + +The "private investment model" of innovation assumes that innovation will be supported by private investment if and as innovators can make attractive profits from doing so. In this model, any free revealing or uncompensated "spillover" of proprietary knowledge developed by private investment will reduce the innovator's profits. It is therefore assumed that innovators will strive to avoid spillovers of innovation-related information. From the perspective of this model, then, free revealing is a major surprise: it seems to make no sense that innovators would intentionally give away information for free that they had invested money to develop. + +In this subsection I offer an explanation for the puzzle by pointing out that free revealing is often the best /{practical}/ option available to user innovators. Harhoff, Henkel, and von Hippel (2003) found that it is in practice very difficult for most innovators to protect their innovations from direct or approximate imitation. This means that the practical choice is typically /{not}/ the one posited by the private investment model: should innovators voluntarily freely reveal their innovations, or should they protect them? Instead, the real choice facing user innovators often is whether to voluntarily freely reveal or to arrive at the same end state, perhaps with a bit of a lag, via involuntary spillovers. The practical case for voluntary free revealing is further strengthened because it can be accomplished at low cost, and often yields private benefits to the innovators. When benefits from free revealing exceed the benefits that are /{practically}/ obtainable from holding an innovation secret or licensing it, free revealing should be the preferred course of action for a profit-seeking firm or individual. +={Harhoff, D.;Henkel, J.;Free revealing of innovation information:and information diffusion+9} + +!_ Others Often Know Something Close to "Your" Secret + +Innovators seeking to protect innovations they have developed as their intellectual property must establish some kind of monopoly control over the innovation-related information. In practice, this can be done either by effectively hiding the information as a trade secret, or by getting effective legal protection by patents or copyrights. (Trademarks also fall under the heading of intellectual property, but we do not consider those here.) In addition, however, it must be the case that /{others}/ do not know substitute information that skirts these protections and that they /{are}/ willing to reveal. If multiple individuals or firms have substitutable information, they are likely to vary with respect to the competitive circumstances they face. A specific innovator's ability to protect "its" innovation as proprietary property will then be determined for all holders of such information by the decision of the one having the least to lose by free revealing. If one or more information holders expect no loss or even a gain from a decision to freely reveal, then the secret will probably be revealed despite other innovators' best efforts to avoid this fate. +={Intellectual property rights:copyrights and|patents and|trade secrets and} + +Commonly, firms and individuals have information that would be valuable to those seeking to imitate a particular innovation. This is because innovators and imitators seldom need access to a specific version of an innovation. Indeed, engineers seldom even want to see a solution exactly as their competitors have designed it: specific circumstances differ even among close competitors, and solutions must in any case be adapted to each adopter's precise circumstances. What an engineer does want to extract from the work of others is the principles and the general outline of a possible improvement, rather than the easily redevelopable details. This information is likely to be available from many sources. + +For example, suppose you are a system developer at a bank and you are tasked with improving in-house software for checking customers' credit online. On the face of it, it might seem that you would gain most by studying the details of the systems that competing banks have developed to handle that same task. It is certainly true that competing banks may face market conditions very similar to your bank, and they may well not want to reveal the valuable innovations they have developed to a competitor. However, the situation is still by no means bleak for an imitator. There are also many non-bank users of online credit checking systems in the world---probably millions. Some will have innovated and be willing to reveal what they have done, and some of these will have the information you need. The likelihood that the information you seek will be freely revealed by some individual or firm is further enhanced by the fact that your search for novel basic improvements may profitably extend far beyond the specific application of online credit checking. Other fields will also have information on components of the solution you need. For example, many applications in addition to online credit checking use software components designed to determine whether persons seeking information are authorized to receive it. Any can potentially be a provider of information for this element of your improved system. + +A finding by Lakhani and von Hippel (2003) illustrates the possibility that many firms and individuals may have similar information. Lakhani and von Hippel studied Apache help-line websites. These sites enable users having problems with Apache software to post questions, and others to respond with answers. The authors asked those who provided answers how many other help-line participants they thought also knew a solution to specific and often obscure problems they had answered on the Apache online forum. Information providers generally were of the opinion that some or many other help-line participants also knew a solution, and could have provided an answer if they themselves had not done so (table 6.1). +={Lakhani, K;Apache web server software+2} + +!_ Table 6.1 +Even very specialized information is often widely known. Tabulated here are answers to a question asked of help-line information providers: "How many others do you think knew the answer to the question you answered?" +={Lakhani, K} + +table{~h c3; 40; 30; 30; + +~ +Frequent providers (n = 21) + +Other providers (n = 67) + +Many +38% +61% + +A few with good Apache knowledge +38% +18% + +A few with specific problem experience +24% +21% + +}table + +Source: Lakhani and von Hippel 2003, table 10. + +Even in the unlikely event that a secret is held by one individual, that information holder will not find it easy to keep a secret for long. Mansfield (1985) studied 100 American firms and found that "information concerning development decisions is generally in the hands of rivals within about 12 to 18 months, on the average, and information concerning the detailed nature and operation of a new product or process generally leaks out within about a year." This observation is supported by Allen's previously mentioned study of free revealing in the nineteenth-century English iron industry. Allen (1983, p. 17) notes that developers of improved blast furnace designs were unlikely to be able to keep their valuable innovations secret because "in the case of blast furnaces and steelworks, the construction would have been done by contractors who would know the design." Also, "the designs themselves were often created by consulting engineers who shifted from firm to firm." +={Allen, R.;Mansfield, E.+4;Free revealing of innovation information:evidence of} + +!_ Low Ability to Profit from Patenting +={Free revealing of innovation information:patent protection and+12;Intellectual property rights:patents and+12} + +Next, suppose that a single user-innovator is the only holder of a particular unit of innovation-related information, and that for some reason there are no easy substitutes. That user actually does have a real choice with respect to disposing of its intellectual property: it can keep the innovation secret and profit from in-house use only, it can license it, or it can choose to freely reveal the innovation. We have just seen that the practical likelihood of keeping a secret is low, especially when there are multiple potential providers of very similar secrets. But if one legally protects an innovation by means of a patent or a copyright, one need not keep an innovation secret in order to control it. Thus, a firm or an individual that freely reveals is forgoing any chance to get a profit via licensing of intellectual property for a fee. What, in practical terms, is the likelihood of succeeding at this and so of forgoing profit by choosing to freely reveal? +={Intellectual property rights:copyrights and+1|patents and+4} + +In most subject matters, the relevant form of legal protection for intellectual property is the patent, generally the "utility" patent. (The notable exception is the software industry, where material to be licensed is often protected by copyright.) In the United States, utility patents may be granted for inventions related to composition of matter and/or a method and/or a use. They may not be granted for ideas per se, mathematical formulas, laws of nature, and anything repugnant to morals and public policy. Within subject matters potentially protectable by patent, protection will be granted only when the intellectual property claimed meets additional criteria of usefulness, novelty, and non-obviousness to those skilled in the relevant art. (The tests for whether these criteria have been met are based on judgement. When a low threshold is used, patents are easier to get, and vice-versa (Hall and Harhoff 2004).) +={Hall, B.;Harhoff, D.} + +The real-world value of patent protection has been studied for more than 40 years. Various researchers have found that, with a few exceptions, innovators do /{not}/ think that patents are very useful either for excluding imitators or for capturing royalties in most industries. (Fields generally cited as exceptions are pharmaceuticals, chemicals, and chemical processes, where patents do enable markets for technical information (Arora et al. 2001).) Most respondents also say that the availability of patent protection does not induce them to invest more in research and development than they would if patent protection did not exist. Taylor and Silberston (1973) reported that 24 of 32 firms said that only 5 percent or less of their R&D expenditures were dependent on the availability of patent protection. Levin et al. (1987) surveyed 650 R&D executives in 130 different industries and found that all except respondents from the chemical and pharmaceutical industries judged patents to be "relatively ineffective." Similar findings have been reported by Mansfield (1968, 1985), by Cohen et al. (2000, 2002), by Arundel (2001), and by Sattler (2003). +={Arora, A.;Arundel, A.;Cohen, W.;Gambardella, A.;Levin, R.;Sattler, H.;Silberston, Z.;Taylor, C.} + +% Slaughter, S., 83--85 + +% ={Fosfuri, A.;Goto, A.} + +Despite recent governmental efforts to strengthen patent enforcement, a comparison of survey results indicates only a modest increase between 1983 and 1994 in large firms' evaluations of patents' effectiveness in protecting innovations or promoting innovation investments. Of course, there are notable exceptions: some firms, including IBM and TI, report significant income from the licensing of their patented technologies. + +% ={IBM} + +Obtaining a patent typically costs thousands of dollars, and it can take years (Harhoff, Henkel, and von Hippel 2003). This makes patents especially impractical for many individual user-innovators, and also for small and medium-size firms of limited means. As a stark example, it is hard to imagine that an individual user who has developed an innovation in sports equipment would find it appealing to invest in a patent and in follow-on efforts to find a licensee and enforce payment. The few that do attempt this, as Shah (2000) has shown, seldom gain any return from licensees as payment for their time and expenditures. +={Harhoff, D.;Henkel, J.;Shah, S.;Intellectual property rights:licensing of+1} + +Copyright is a low-cost and immediate form of legal protection that applies to original writings and images ranging from software code to movies. Authors do not have to apply for copyright protection; it "follows the author's pen across the page." Licensing of copyrighted works is common, and it is widely practiced by commercial software firms. When one buys a copy of a non-custom software product, one is typically buying only a license to use the software, not buying the intellectual property itself. However, copyright protection is also limited in an important way. Only the specific original writing itself is protected, not the underlying invention or ideas. As a consequence, copyright protections can be circumvented. For example, those who wish to imitate the function of a copyrighted software program can do so by writing new software code to implement that function. +={Intellectual property rights:copyrights and;Free revealing of innovation information:copyright protection and} + + +Given the above, we may conclude that in practice little profit is being sacrificed by many user-innovator firms or individuals that choose to forgo the possibility of legally protecting their innovations in favor of free revealing. + +!_ Positive Incentives for Free Revealing +={Free revealing of innovation information:incentives for+7;Information commons+7} + +As was noted earlier, when we say that an innovator "freely reveals" proprietary information we mean that all existing and potential intellectual property rights to that information are voluntarily given up by that innovator and that all interested parties are given access to it---the information becomes a public good. These conditions can often be met at a very low cost. For example, an innovator can simply post information about the innovation on a website without publicity, so those potentially interested must discover it. Or a firm that has developed a novel process machine can agree to give a factory tour to any firm or individual that thinks to ask for one, without attempting to publicize the invention or the availability of such tours in any way. However, it is clear that many innovators go beyond basic, low-cost forms of free revealing. They spend significant money and time to ensure that their innovations are seen in a favorable light, and that information about them is effectively and widely diffused. Writers of computer code may work hard to eliminate all bugs and to document their code in a way that is very easy for potential adopters to understand before freely revealing it. Plant owners may repaint their plant, announce the availability of tours at a general industry meeting, and then provide a free lunch for their visitors. + +Innovators' /{active}/ efforts to diffuse information about their innovations suggest that there are positive, private rewards to be obtained from free revealing. A number of authors have considered what these might be. Allen (1983) proposed that reputation gained for a firm or for its managers might offset a reduction in profits for the firm caused by free revealing. Raymond (1999) and Lerner and Tirole (2002) elaborated on this idea when explaining free revealing by contributors to open source software development projects. Free revealing of high-quality code, they noted, can increase a programmer's reputation with his peers. This benefit can lead to other benefits, such as an increase in the programmer's value on the job market. Allen has argued that free revealing might have effects that actually increase a firm's profits if the revealed innovation is to some degree specific to assets owned by the innovator (see also Hirschleifer 1971). +={Allen, R.+1;Hirschleifer, J.;Lerner, J.;Raymond, E.;Tirole, J.;Free revealing of innovation information:and open source software;Open source software:free revealing and} + +Free revealing may also increase an innovator's profit in other ways. When an innovating user freely reveals an innovation, the direct result is to increase the diffusion of that innovation relative to what it would be if the innovation were either licensed at a fee or held secret. The innovating user may then benefit from the increase in diffusion via a number of effects. Among these are network effects. (The classic illustration of a network effect is that the value of each telephone goes up as more are sold, because the value of a phone is strongly affected by the number of others who can be contacted in the network.) In addition, and very importantly, an innovation that is freely revealed and adopted by others can become an informal standard that may preempt the development and/or commercialization of other versions of the innovation. If, as Allen suggested, the innovation that is revealed is designed in a way that is especially appropriate to conditions unique to the innovator, this can result in creating a permanent source of advantage for that innovator. + +Being first to reveal a certain type of innovation increases a user firm's chances of having its innovation widely adopted, other things being equal. This may induce innovators to race to reveal first. Firms engaged in a patent race may disclose information voluntarily if the profits from success do not go only to the winner of the race. If being second quickly is preferable to being first relatively late, there will be an incentive for voluntary revealing in order to accelerate the race (de Fraja 1993). +={de Fraja, G.} + +Incentives to freely reveal have been most deeply explored in the specific case of open source software projects. Students of the open source software development process report that innovating users have a number of motives for freely revealing their code to open source project managers and open source code users in general. If they freely reveal, others can debug and improve upon the modules they have contributed, to everyone's benefit. They are also motivated to have their improvement incorporated into the standard version of the open source software that is generally distributed by the volunteer open source user organization, because it will then be updated and maintained without further effort on the innovator's part. This volunteer organization is the functional equivalent of a manufacturer with respect to inducing manufacturer improvements, because a user-developed improvement will be assured of inclusion in new "official" software releases only if it is approved and adopted by the coordinating user group. Innovating users also report being motivated to freely reveal their code under a free or open source license by a number of additional factors. These include giving support to open code and "giving back" to those whose freely revealed code has been of value to them (Lakhani and Wolf 2005). +={Lakhani, K.;Lakhani, K.;Wolf, B.;Free revealing of innovation information:and open source software+4;Open source software:free revealing and+4} + +By freely revealing information about an innovative product or process, a user makes it possible for manufacturers to learn about that innovation. Manufacturers may then improve upon it and/or offer it at a price lower than users' in-house production costs (Harhoff et al. 2003). When the improved version is offered for sale to the general market, the original user-innovator (and other users) can buy it and gain from in-house use of the improvements. For example, consider that manufacturers often convert user-developed innovations ("home-builts") into a much more robust and reliable form when preparing them for sale on the commercial market. Also, manufacturers offer related services, such as field maintenance and repair programs, that innovating users must otherwise provide for themselves. +={iHarhoff, D.+1} + +A variation of this argument applies to the free revealing among competing manufacturers documented by Henkel (2003). Competing developers of embedded Linux systems were creating software that was specifically designed to run the hardware products of their specific clients. Each manufacturer could freely reveal this equipment-specific code without fear of direct competitive repercussions: it was applicable mainly to specific products made by a manufacturer's client, and it was less valuable to others. At the same time, all would jointly benefit from free revealing of improvements to the underlying embedded Linux code base, upon which they all build their proprietary products. After all, the competitive advantages of all their products depended on this code base's being equal to or better than the proprietary software code used by other manufacturers of similar products. Additionally, Linux software was a complement to hardware that many of the manufacturers in Henkel's sample also sold. Improved Linux software would likely increase sales of their complementary hardware products. (Complement suppliers' incentives to innovate have been modeled by Harhoff (1996).) +={Linux;Henkel, J.} + +!_ Free Revealing and Reuse +={Free revealing of innovation information+2} + +Of course, free revealing is of value only if others (re)use what has been revealed. It can be difficult to track what visitors to an information commons take away and reuse, and there is as yet very little empirical information on this important matter. Valuable forms of reuse range from the gaining of general ideas of development paths to pursue or avoid to the adoption of specific designs. For example, those who download software code from an open source project repository can use it to learn about approaches to solving a particular software problem and/or they may reuse portions of the downloaded code by inserting it directly into a software program of their own. Von Krogh et al. (2004) studied the latter type of code reuse in open source software and found it very extensive. Indeed, they report that /{most}/ of the lines of software code in the projects they studied were taken from the commons of other open source software projects and software libraries and reused. +={von Krogh, G.+10} + +% Spaeth, S., 88,? + +In the case of academic publications, we see evidence that free revealing does increase reuse---a matter of great importance to academics. A citation is an indicator that information contained in an article has been reused: the article has been read by the citing author and found useful enough to draw to readers' attention. Recent empirical studies are finding that articles to which readers have open access---articles available for free download from an author's website, for example---are cited significantly more often than are equivalent articles that are available only from libraries or from publishers' fee-based websites. Antelman (2004) finds an increase in citations ranging from 45 percent in philosophy to 91 percent in mathematics. She notes that "scholars in diverse disciplines are adopting open-access practices at a surprisingly high rate and are being rewarded for it, as reflected in [citations]." +={Antelman, K.} + +!_ Implications for Theory + +We have seen that in practice free revealing may often be the best practical course of action for innovators. How can we tie these observations back to theory, and perhaps improve theory as a result? At present there are two major models that characterize how innovation gets rewarded. The private investment model is based on the assumption that innovation will be supported by private investors expecting to make a profit. To encourage private investment in innovation, society grants innovators some limited rights to the innovations they generate via patents, copyrights, and trade secrecy laws. These rights are intended to assist innovators in getting private returns from their innovation-related investments. At the same time, the monopoly control that society grants to innovators and the private profits they reap create a loss to society relative to the free and unfettered use by all of the knowledge that the innovators have created. Society elects to suffer this social loss in order to increase innovators' incentives to invest in the creation of new knowledge (Arrow 1962; Dam 1995). +={Arrow, K.;Dam, K.;Intellectual property rights:copyrights and|trade secrets and;Free revealing of innovation information:copyright protection and|patents and|trade secrecy and} + +The second major model for inducing innovation is termed the collective action model. It applies to the provision of public goods, where a public good is defined by its non-excludability and non-rivalry: if any user consumes it, it cannot be feasibly withheld from other users, and all consume it on the same terms (Olson 1967). The collective action model assumes that innovators are /{required}/ to relinquish control of knowledge or other assets they have developed to a project and so make them a public good. This requirement enables collective action projects to avoid the social loss associated with the restricted access to knowledge of the private investment model. At the same time, it creates problems with respect to recruiting and motivating potential contributors. Since contributions to a collective action project are a public good, users of that good have the option of waiting for others to contribute and then free riding on what they have done (Olson 1967). +={Olson, M.+1;Free revealing of innovation information:collective action model for+6;Social welfare:free revealing and} + +The literature on collective action deals with the problem of recruiting contributors to a task in a number of ways. Oliver and Marwell (1988) and Taylor and Singleton (1993) predict that the description of a project's goals and the nature of recruiting efforts should matter a great deal. Other researchers argue that the creation and deployment of selective incentives for contributors is essential to the success of collective action projects. For example, projects may grant special credentials to especially productive project members (Friedman and McAdam 1992; Oliver 1980). The importance of selective incentives suggests that small groups will be most successful at executing collective action projects. In small groups, selective incentives can be carefully tailored for each group member and individual contributions can be more effectively monitored (Olson 1967; Ostrom 1998). +={Friedman, D.;Marwell, G.;McAdam, D.;Oliver, P.;Ostrom, E.;Singleton, S.;Taylor, M.} + +Interestingly, successful open source software projects do not appear to follow any of the guidelines for successful collective action projects just described. With respect to project recruitment, goal statements provided by successful open source software projects vary from technical and narrow to ideological and broad, and from precise to vague and emergent (for examples, see goal statements posted by projects hosted on Sourceforge.net).~{ As a specific example of a project with an emergent goal, consider the beginnings of the Linux open source software project. In 1991, Linus Torvalds, a student in Finland, wanted a Unix operating system that could be run on his PC, which was equipped with a 386 processor. Minix was the only software available at that time but it was commercial, closed source, and it traded at US$150. Torvalds found this too expensive, and started development of a Posix-compatible operating system, later known as Linux. Torvalds did not immediately publicize a very broad and ambitious goal, nor did he attempt to recruit contributors. He simply expressed his private motivation in a message he posted on July 3, 1991, to the USENET newsgroup comp.os.minix (Wayner 2000): /{Hello netlanders, Due to a project I'm working on (in minix), I'm interested in the posix standard definition.}/ [Posix is a standard for UNIX designers. A software using POSIX is compatible with other UNIX-based software.] /{Could somebody please point me to a (preferably) machine-readable format of the latest posix-rules? Ftp-sites would be nice.}/ In response, Torvalds got several return messages with Posix rules and people expressing a general interest in the project. By the early 1992, several skilled programmers contributed to Linux and the number of users increased by the day. Today, Linux is the largest open source development project extant in terms of number of developers. }~ Further, such projects may engage in no active recruiting beyond simply posting their intended goals and access address on a general public website customarily used for this purpose (for examples, see the Freshmeat.net website). Also, projects have shown by example that they can be successful even if large groups---perhaps thousands---of contributors are involved. Finally, open source software projects seem to expend no effort to discourage free riding. Anyone is free to download code or seek help from project websites, and no apparent form of moral pressure is applied to make a compensating contribution (e.g., "If you benefit from this code, please also contribute . . ."). +={Free revealing of innovation information:open source software and} + +What can explain these deviations from expected practice? What, in other words, can explain free revealing of privately funded innovations and enthusiastic participation in projects to produce a public good? From the theoretical perspective, Georg von Krogh and I think the answer involves revisiting and easing some of the basic assumptions and constraints conventionally applied to the private investment and collective action models of innovation. Both, in an effort to offer "clean" and simple models for research, have excluded from consideration a very rich and fertile middle ground where incentives for private investment and collective action can coexist, and where a "private-collective" innovation model can flourish. More specifically, a private-collective model of innovation occupies the middle ground between the private investment model and the collective action model by: +={Free revealing of innovation information:private-collective model for+3;Private-collective model+3;Social welfare:private-collective model and+3} + +_* Eliminating the assumption in private investment models that free revealing of innovations developed with private funds will represent a loss of private profit for the innovator and so will not be engaged in voluntarily. Instead the private-collective model proposes that under common conditions free revealing of proprietary innovations may increase rather than decrease innovators' private profit. + +_* Eliminating the assumption in collective action models that a free rider obtains benefits from the completed public good that are equal to those a contributor obtains. Instead, the private-collective model proposes that contributors to a public good can /{inherently}/ obtain greater private benefits than free riders. These provide incentives for participation in collective action projects that need not be managed by project personnel (von Hippel and von Krogh 2003). +={von Hippel, E.} + +In summation: Innovations developed at private cost are often revealed freely, and this behavior makes economic sense for participants under commonly encountered conditions. A private-collective model of innovation incentives can explain why and when knowledge created by private funding may be offered freely to all. When the conditions are met, society appears to have the best of both worlds---new knowledge is created by private funding and then freely revealed to all. + +1~ 7 Innovation Communities +={Innovation communities+56:innovation and+56} + +It is now clear that users often innovate, and that they often freely reveal their innovations. But what about informal cooperation among users? What about /{organized}/ cooperation in development of innovations and other matters? The answer is that both flourish among user-innovators. Informal user-to-user cooperation, such as assisting others to innovate, is common. Organized cooperation in which users interact within communities, is also common. Innovation communities are often stocked with useful tools and infrastructure that increase the speed and effectiveness with which users can develop and test and diffuse their innovations. + +In this chapter, I first show that user innovation is a widely distributed process and so can be usefully drawn together by innovation communities. I next explore the valuable functions such communities can provide. I illustrate with a discussion of free and open source software projects, a very successful form of innovation community in the field of software development. Finally, I point out that innovation communities are by no means restricted to the development of information products such as software, and illustrate with the case of a user innovation community specializing in the development of techniques and equipment used in the sport of kitesurfing. +={Free software;Kitesurfing;Open source software:innovation communities and} + +!_ User Innovation Is Widely Distributed +={Users:innovation and+8;Innovation:distributed process of+8} + +When users' needs are heterogeneous and when the information drawn on by innovators is sticky, it is likely that product-development activities will be widely distributed among users, rather than produced by just a few prolific user-innovators. It should also be the case that different users will tend to develop different innovations. As was shown in chapter 5, individual users and user firms tend to develop innovations that serve their particular needs, and that fall within their individual "low-cost innovation niches." For example, a mountain biker who specializes in jumping from high platforms and who is also an orthopedic surgeon will tend to develop innovations that draw on both of these types of information: he might create a seat suspension that reduces shock to bikers' spines upon landing from a jump. Another mountain biker specializing in the same activity but with a different background---say aeronautical engineering---is likely to draw on this different information to come up with a different innovation. From the perspective of Fleming (2001), who has studied innovations as consisting of novel combinations of pre-existing elements, such innovators are using their membership in two distinct communities to combine previously disparate elements. Baldwin and Clark (2003) and Henkel (2004a) explore this type of situation in theoretical terms. +={Baldwin, C.;Clark, K.;Fleming, L.;Henkel, J.;User need;Custom products:heterogeneity of user needs and+3;Sticky information:innovation and+3;Users:low-cost innovation niches of+3;Scientific instruments;Sporting equipment:innovation communities and;Mountain biking;Local information+1;Users:innovate-or-buy decisions by} + +The underlying logic echoes that offered by Eric Raymond regarding "Linus's Law" in software debugging. In software, discovering and repairing subtle code errors or bugs can be very costly (Brooks 1979). However, Raymond argued, the same task can be greatly reduced in cost and also made faster and more effective when it is opened up to a large community of software users that each may have the information needed to identify and fix some bugs. Under these conditions, Raymond says, "given a large enough beta tester and co-developer base, almost every problem will be characterized quickly and the fix obvious to someone. Or, less formally, `given enough eyeballs, all bugs are shallow."' He explains: "More users find more bugs because adding more users adds more ways of stressing the program. . . . Each [user] approaches the task of bug characterization with a slightly different perceptual set and analytical toolkit, a different angle on the problem. So adding more beta-testers . . . increases the probability that someone's toolkit will be matched to the problem in such a way that the bug is shallow to /{that person}/." (1999, pp. 41--44) +={Brooks, F.;Raymond, E.;Free software;Open source software:innovation communities and;Innovation communities:and sources of innovation+2} + +The analogy to distributed user innovation is, of course, that each user has a different set of innovation-related needs and other assets in place which makes a particular type of innovation low-cost ("shallow") to /{that user}/. The assets of /{some}/ user will then generally be found to be a just-right fit to many innovation development problems. (Note that this argument does not mean that /{all}/ innovations will be cheaply done by users, or even done by users at all. In essence, users will find it cheaper to innovate when manufacturers' economies of scale with respect to product development are more than offset by the greater scope of innovation assets held by the collectivity of individual users.) + +Available data support these expectations. In chapter 2 we saw evidence that users tended to develop very different innovations. To test whether commercially important innovations are developed by just a few users or by many, I turn to studies documenting the functional sources of important innovations later commercialized. As is evident in table 7.1, most of the important innovations attributed to users in these studies were done by /{different}/ users. In other words, user innovation does tend to be widely distributed in a world characterized by users with heterogeneous needs and heterogeneous stocks of sticky information. +={Innovation:functional sources of} + +!_ Table 7.1 +User innovation is widely distributed, with few users developing more than one major innovation. NA: data not available. +={Riggs, W.;Shah, S.;von Hippel, E.;Scientific instruments;Sporting equipment:innovation communities and} + +Number of users developing this number of major innovations + +table{~h c7; 30; 10; 10; 10; 10; 10; 20; + +~ +1 +2 +3 +6 +NA +Sample (n) + +Scientific Instruments^{a}^ +28 +0 +1 +0 +1 +32 + +Scientific Instruments^{b}^ +20 +1 +0 +1 +0 +28 + +Process equipment^{c}^ +19 +1 +0 +0 +8 +29 + +Sports equipment^{d}^ +7 +0 +0 +0 +0 +7 + +}table + +a. Source: von Hippel 1988, appendix: GC, TEM, NMR Innovations.<:br> +b. Source: Riggs and von Hippel, Esca and AES.<:br> +c. Source: von Hippel 1988, appendix: Semiconductor and pultrusion process equipment innovations.<:br> +d. Source: Shah 2000, appendix A: skateboarding, snowboarding, and windsurfing innovations. + +!_ Innovation Communities +={Innovation communities:and sources of innovation} + +User-innovators may be generally willing to freely reveal their information. However, as we have seen, they may be widely distributed and each may have only one or a few innovations to offer. The practical value of the "freely revealed innovation commons" these users collectively offer will be increased if their information is somehow made conveniently accessible. This is one of the important functions of "innovation communities." +={Information commons+3} + +I define "innovation communities" as meaning nodes consisting of individuals or firms interconnected by information transfer links which may involve face-to-face, electronic, or other communication. These can, but need not, exist within the boundaries of a membership group. They often do, but need not, incorporate the qualities of communities for participants, where "communities" is defined as meaning"networks of interpersonal ties that provide sociability, support, information, a sense of belonging, and social identity" (Wellman et al. 2002, p. 4).~{ When they do not incorporate these qualities, they would be more properly referred to as networks---but communities is the term commonly used, and I follow that practice here. }~ +={Wellman, B.} + +Innovation communities can have users and/or manufacturers as members and contributors. They can flourish when at least some innovate and voluntarily reveal their innovations, and when others find the information revealed to be of interest. In previous chapters, we saw that these conditions do commonly exist with respect to user-developed innovations: users innovate in many fields, users often freely reveal, and the information revealed is often used by manufacturers to create commercial products---a clear indication many users, too, find this information of interest. + +Innovation communities are often specialized, serving as collection points and repositories for information related to narrow categories of innovations. They may consist only of information repositories or directories in the form of physical or virtual publications. For example, userinnovation.mit.edu is a specialized website where researchers can post articles on their findings and ideas related to innovation by users. Contributors and non-contributors can freely access and browse the site as a convenient way to find such information. + +Innovation communities also can offer additional important functions to participants. Chat rooms and email lists with public postings can be provided so that contributors can exchange ideas and provide mutual assistance. Tools to help users develop, evaluate, and integrate their work can also be provided to community members---and such tools are often developed by community members themselves. + +All the community functionality just mentioned and more is visible in communities that develop free and open source software programs. The emergence of this particular type of innovation community has also done a great deal to bring the general phenomenon to academic and public notice, and so I will describe them in some detail. I first discuss the history and nature of free and open source software itself (the product). Next I outline key characteristics of the free and open source software development projects typically used to create and maintain such software (the community-based development process). +={Free software+10;Innovation communities:open source software and+22;Open source software:innovation communities and+22} + +!_ Open Source Software +={Open source software:innovation and+21} + +In the early days of computer programming, commercial "packaged" software was a rarity---if you wanted a particular program for a particular purpose, you typically wrote the code yourself or hired someone to write it for you. Much of the software of the 1960s and the 1970s was developed in academic and corporate laboratories by scientists and engineers. These individuals found it a normal part of their research culture to freely give and exchange software they had written, to modify and build on one another's software, and to freely share their modifications. This communal behavior became a central feature of "hacker culture." (In communities of open source programmers, "hacker" is a positive term that is applied to talented and dedicated programmers.~{ !{hacker}! n. [originally, someone who makes furniture with an axe] 1. A person who enjoys exploring the details of programmable systems and how to stretch their capabilities, as opposed to most users, who prefer to learn only the minimum necessary. 2. One who programs enthusiastically (even obsessively) or who enjoys programming rather than just theorizing about programming. 3. A person capable of appreciating !{hack value}!. 4. A person who is good at programming quickly. . . . 8. [deprecated] A malicious meddler who tries to discover sensitive information by poking around. Hence /{password hacker}/, /{network hacker}/. The correct term for this sense is !{cracker}! (Raymond 1996). }~ ) +={Hackers+4;Raymond, E.} + +In 1969, the Defense Advanced Research Projects Agency, a part of the US Department of Defense, established the ARPANET, the first transcontinental high-speed computer network. This network eventually grew to link hundreds of universities, defense contractors, and research laboratories. Later succeeded by the Internet, it also allowed hackers to exchange software code and other information widely, easily, and cheaply---and also enabled them to spread hacker norms of behavior. + +The communal hacker culture was very strongly present among a group of programmers---software hackers---housed at MIT's Artificial Intelligence Laboratory in the 1960s and the 1970s (Levy 1984). In the 1980s this group received a major jolt when MIT licensed some of the code created by its hacker employees to a commercial firm. This firm, in accordance with normal commercial practice, then promptly restricted access to the "source code"~{ Source code is a sequence of instructions to be executed by a computer to accomplish a program's purpose. Programmers write computer software in the form of source code, and also document that source code with brief written explanations of the purpose and design of each section of their program. To convert a program into a form that can actually operate a computer, source code is translated into machine code using a software tool called a compiler. The compiling process removes program documentation and creates a binary version of the program---a sequence of computer instructions consisting only of strings of ones and zeros. Binary code is very difficult for programmers to read and interpret. Therefore, programmers or firms that wish to prevent others from understanding and modifying their code will release only binary versions of the software. In contrast, programmers or firms that wish to enable others to understand and update and modify their software will provide them with its source code. (Moerke 2000, Simon 1996). }~ of that software, and so prevented non-company personnel---including the MIT hackers who had been instrumental in developing it---from continuing to use it as a platform for further learning and development. +={Levy, S.;MIT Artificial Intelligence Laboratory+1;Innovation communities:open source software and;Open source software:innovation communities and} + +Richard Stallman, a brilliant programmer in MIT's Artificial Intelligence Laboratory, was especially distressed by the loss of access to communally developed source code. He also was offended by a general trend in the software world toward development of proprietary software packages and the release of software in forms that could not be studied or modified by others. Stallman viewed these practices as morally wrong impingements on the rights of software users to freely learn and create. In 1985, in response, he founded the Free Software Foundation and set about to develop and diffuse a legal mechanism that could preserve free access for all to the software developed by software hackers. Stallman's pioneering idea was to use the existing mechanism of copyright law to this end. Software authors interested in preserving the status of their software as "free" software could use their own copyright to grant licenses on terms that would guarantee a number of rights to all future users. They could do this by simply affixing a standard license to their software that conveyed these rights. The basic license developed by Stallman to implement this seminal idea was the General Public License or GPL (sometimes referred to as copyleft, in a play on the word "copyright"). Basic rights transferred to those possessing a copy of free software include the right to use it at no cost, the right to study its source code, the right to modify it, and the right to distribute modified or unmodified versions to others at no cost. Licenses conveying similar rights were developed by others, and a number of such licenses are currently used in the open source field. Free and open source software licenses do not grant users the full rights associated with free revealing as that term was defined earlier. Those who obtain the software under a license such as the GPL are restricted from certain practices. For example, they cannot incorporate GPL software into proprietary software that they then sell.~{ See www.gnu.org/licenses/licenses.html#GPL }~ Indeed, contributors of code to open source software projects are very concerned with enforcing such restrictions in order to ensure that their code remains accessible to all (O'Mahony 2003). +={Stallman, R.+2;Intellectual property rights:copyrights and|licensing of+1} + +The idea of free software did not immediately become mainstream, and industry was especially suspicious of it. In 1998, Bruce Perens and Eric Raymond agreed that a significant part of the problem resided in Stallman's term "free" software, which might understandably have an ominous ring to the ears of businesspeople. Accordingly, they, along with other prominent hackers, founded the open source software movement (Perens 1999). Open source software uses the licensing practices pioneered by the free software movement. It differs from that movement primarily on philosophical grounds, preferring to emphasize the practical benefits of its licensing practices over issues regarding the moral importance of granting users the freedoms offered by both free and open source software. The term "open source" is now generally used by both practitioners and scholars to refer to free or open source software, and that is the term I use in this book. +={Perens, B.;Raymond, E.} + +Open source software has emerged as a major cultural and economic phenomenon. The number of open source software projects has been growing rapidly. In mid 2004, a single major infrastructure provider and repository for open source software projects, Sourceforge.net,~{ http://www.sourceforge.net }~ hosted 83,000 projects and had more than 870,000 registered users. A significant amount of software developed by commercial firms is also being released under open source licenses. + +!_ Open Source Software Development Projects + +Software can be termed "open source" independent of how or by whom it has been developed: the term denotes only the type of license under which it is made available. However, the fact that open source software is freely accessible to all has created some typical open source software development practices that differ greatly from commercial software development models---and that look very much like the "hacker culture" behaviors described above. +={Hackers+1} + +Because commercial software vendors typically wish to sell the code they develop, they sharply restrict access to the source code of their software products to firm employees and contractors. The consequence of this restriction is that only insiders have the information required to modify and improve that proprietary code further (Meyer and Lopez 1995; Young, Smith, and Grimm 1996; Conner and Prahalad 1996). In sharp contrast, all are offered free access to the source code of open source software if that code is distributed by its authors. In early hacker days, this freedom to learn and use and modify software was exercised by informal sharing and co-development of code---often by the physical sharing and exchange of computer tapes and disks on which the code was recorded. In current Internet days, rapid technological advances in computer hardware and software and networking technologies have made it much easier to create and sustain a communal development style on ever-larger scales. Also, implementing new projects is becoming progressively easier as effective project design becomes better understood, and as prepackaged infrastructural support for such projects becomes available on the Web. +={Conner, K.;Grimm, C.;Meyer, M.;Lopez, L.;Prahalad, C.;Smith, G.;Young, G.} + +Today, an open source software development project is typically initiated by an individual or a small group seeking a solution to an individual's or a firm's need. Raymond (1999, p. 32) suggests that "every good work of software starts by scratching a developer's personal itch" and that "too often software developers spend their days grinding away for pay at programs they neither need nor love. But not in the (open source) world. . . ." A project's initiators also generally become the project's "owners" or "maintainers" who take on responsibility for project management.~{ "The owner(s) [or `maintainers'] of an open source software project are those who have the exclusive right, recognized by the community at large, to redistribute modified versions. . . . According to standard open source licenses, all parties are equal in the evolutionary game. But in practice there is a very well-recognized distinction between `official' patches [changes to the software], approved and integrated into the evolving software by the publicly recognized maintainers, and `rogue' patches by third parties. Rogue patches are unusual and generally not trusted." (Raymond 1999, p. 89) }~ Early on, this individual or group generally develops a first, rough version of the code that outlines the functionality envisioned. The source code for this initial version is then made freely available to all via downloading from an Internet website established by the project. The project founders also set up infrastructure for the project that those interested in using or further developing the code can use to seek help, provide information or provide new open source code for others to discuss and test. In the case of projects that are successful in attracting interest, others do download and use and "play with" the code---and some of these do go on to create new and modified code. Most then post what they have done on the project website for use and critique by any who are interested. New and modified code that is deemed to be of sufficient quality and of general interest by the project maintainers is then added to the authorized version of the code. In many projects the privilege of adding to the authorized code is restricted to only a few trusted developers. These few then serve as gatekeepers for code written by contributors who do not have such access (von Krogh and Spaeth 2002). +={Spaeth, S.;von Krogh, G.;Raymond, E.} + +Critical tools and infrastructure available to open source software project participants includes email lists for specialized purposes that are open to all. Thus, there is a list where code users can report software failures ("bugs") that they encounter during field use of the software. There is also a list where those developing the code can share ideas about what would be good next steps for the project, good features to add, etc. All of these lists are open to all and are also publicly archived, so anyone can go back and learn what opinions were and are on a particular topic. Also, programmers contributing to open source software projects tend to have essential tools, such as specific software languages, in common. These are generally not specific to a single project, but are available on the web. Basic toolkits held in common by all contributors tends to greatly ease interactions. Also, open source software projects have version-control software that allows contributors to insert new code contributions into the existing project code base and test them to see if the new code causes malfunctions in existing code. If so, the tool allows easy reversion to the status quo ante. This makes "try it and see" testing much more practical, because much less is at risk if a new contribution inadvertently breaks the code. Toolkits used in open source projects have been evolved through practice and are steadily being improved by user-innovators. Individual projects can now start up using standard infrastructure sets offered by sites such as Sourceforge.net. +={Toolkits:open source software and} + +Two brief case histories will help to further convey the flavor of open source software development. + +!_ Apache Web Server Software +={Apache web server software+3;Innovation communities:Apache web server software and+3} + +Apache web server software is used on web server computers that host web pages and provide appropriate content as requested by Internet browsers. Such 7 computers are a key element of the Internet-based World Wide Web infrastructure. + +The web server software that evolved into Apache was developed by University of Illinois undergraduate Rob McCool for, and while working at, the National Center for Supercomputing Applications (NCSA). The source code as developed and periodically modified by McCool was posted on the web so that users at other sites could download it, use it, modify it, and develop it further. When McCool departed NCSA in mid 1994, a small group of webmasters who had adopted his web server software for their own sites decided to take on the task of continued development. A core group of eight users gathered all documentation and bug fixes and issued a consolidated patch. This "patchy" web server software evolved over time into Apache. Extensive user feedback and modification yielded Apache 1.0, released on December 1, 1995. +={McCool, Rob} + +In 4 years, after many modifications and improvements contributed by many users, Apache became the most popular web server software on the Internet, garnering many industry awards for excellence. Despite strong competition from commercial software developers such as Microsoft and Netscape, it is currently used by over 60 percent of the world's millions of websites. Modification and updating of Apache by users and others continues, with the release of new versions being coordinated by a central group of 22 volunteers. +={Microsoft} + +!_ Fetchmail---An Internet Email Utility Program +={Fetchmail+4;Innovation communities:fetchmail and+4} + +Fetchmail is an Internet email utility program that "fetches" email from central servers to a local computer. The open source project to develop, maintain, and improve this program was led by Eric Raymond (1999). +={Raymond, E.+3} + +Raymond first began to puzzle about the email delivery problem in 1993 because he was personally dissatisfied with then-existing solutions. "What I wanted," Raymond recalled (1999, p. 31), "was for my mail to be delivered on snark, my home system, so that I would be notified when it arrived and could handle it using all my local tools." Raymond decided to try and develop a better solution. He began by searching databases in the open source world for an existing, well-coded utility that he could use as a development base. He knew it would be efficient to build on others' related work if possible, and in the world of open source software (then generally called free software) this practice is understood and valued. Raymond explored several candidate open source programs, and settled on one in small-scale use called "popclient." He developed a number of improvements to the program and proposed them to the then maintainer of popclient. It turned out that this individual had lost interest in working further on the program, and so his response to Raymond's suggestions was to offer his role to Raymond so that he could evolve the popclient further as he chose. + +Raymond accepted the role of popclient's maintainer, and over the next months he improved the program significantly in conjunction with advice and suggestions from other users. He carefully cultivated his more active beta list of popclient users by regularly communicating with them via messages posted on an public electronic bulletin board set up for that purpose. Many responded by volunteering information on bugs they had found and perhaps fixed, and by offering improvements they had developed for their own use. The quality of these suggestions was often high because "contributions are received not from a random sample, but from people who are interested enough to use the software, learn about how it works, attempt to find solutions to the problems they encounter, and actually produce an apparently reasonable fix. Anyone who passes all these filters is highly likely to have something useful to contribute." (ibid., p. 42) + +Eventually, Raymond arrived at an innovative design that he knew worked well because he and his beta list of co-developers had used it, tested it and improved it every day. Popclient (now renamed fetchmail) became standard software used by millions users. Raymond continues to lead the group of volunteers that maintain and improve the software as new user needs and conditions dictate. + +!_ Development of Physical Products by Innovation Communities +={Innovation communities:physical products and+14} + +User innovation communities are by no means restricted to the development of information products like software. They also are active in the development of physical products, and in very similar ways. Just as in the case of communities devoted to information product, communities devoted to physical products can range from simple information exchange sites to sites well furnished with tools and infrastructure. Within sports, Franke and Shah's study illustrates relatively simple community infrastructure. Thus, the boardercross community they studied consisted of semi-professional athletes from all over the world who meet in up to 10 competitions a year in Europe, North America, and Japan. Franke and Shah report that community members knew one another well, and spent a considerable amount of time together. They also assisted one another in developing and modifying equipment for their sport. However, the community had no specialized sets of tools to support joint innovation development. +={Franke, N.;Shah, S.;Innovation communities:sporting equipment and+2} + +More complex communities devoted to the development of physical products often look similar to open source software development communities in terms of tools and infrastructure. As an example, consider the recent formation of a community dedicated to the development and diffusion of information regarding novel kitesurfing equipment. Kitesurfing is a water sport in which the user stands on a special board, somewhat like a surfboard, and is pulled along by holding onto a large, steerable kite. Equipment and technique have evolved to the point that kites can be guided both with and against the wind by a skilled kitesurfer, and can lift rider and board many meters into the air for tens of seconds at a time. +={Innovation communities:kitesurfing and+1;Kitesurfing+1} + +Designing kites for kitesurfing is a sophisticated undertaking, involving low-speed aerodynamical considerations that are not yet well understood. Early kites for kitesurfing were developed and built by user-enthusiasts who were inventing both kitesurfing techniques and kitesurfing equipment interdependently. In about 2001, Saul Griffith, an MIT PhD student with a long-time interest in kitesurfing and kite development, decided that kite-surfing would benefit from better online community interaction. Accordingly, he created a site for the worldwide community of user-innovators in kitesurfing (www.zeroprestige.com). Griffith began by posting patterns for kites he had designed on the site and added helpful hints and tools for kite construction and use. Others were invited to download this information for free and to contribute their own if they wished. Soon other innovators started to post their own kite designs, improved construction advice for novices, and sophisticated design tools such as aerodynamics modeling software and rapid prototyping software. Some kitesurfers contributing innovations to the site had top-level technical skills; at least one was a skilled aerodynamicist employed by an aerospace firm. +={Griffith, S.;Zeroprestige.com} + +Note that physical products are information products during the design stage. In earlier days, information about an evolving design was encoded on large sheets of paper, called blueprints, that could be copied and shared. The information on blueprints could be understood and assessed by fellow designers, and could also be used by machinists to create the actual physical products represented. Today, designs for new products are commonly encoded in computer-aided design (CAD) files. These files can be created and seen as two-dimensional and three-dimensional renderings by designers. The designs they contain can also be subjected to automated analysis by various engineering tools to determine, for example, whether they can stand up to stresses to which they will be subjected. CAD files can then be downloaded to computer-controlled fabrication machinery that will actually build the component parts of the design. + +The example of the kitesurfing group's methods of sharing design information illustrates the close relationship between information and physical products. Initially, users in the group exchanged design ideas by means of simple sketches transferred over the Internet. Then group members learned that computerized cutters used by sail lofts to cut sails from large pieces of cloth are suited to cutting cloth for surfing kites. They also learned that sail lofts were interested in their business. Accordingly, innovation group members began to exchange designs in the form of CAD files compatible with sail lofts' cutting equipment. When a user was satisfied with a design, he would transmit the CAD file to a local sail loft for cutting. The pieces were then sewn together by the user or sent to a sewing facility for assembly. The total time required to convert an information product into a physical one was less than a week, and the total cost of a finished kite made in this way was a few hundred dollars---much less than the price of a commercial kite. +={Innovation communities:kitesurfing and;Kitesurfing} + +!_ User-to-User Assistance +={Innovation communities:user-to-user assistance and+9;Users:innovation communities and+9} + +Clearly, user innovation communities can offer sophisticated support to individual innovators in the form of tools. Users in these innovation communities also tend to behave in a collaborative manner. That is, users not only distribute and evaluate completed innovations; they also volunteer other important services, such as assisting one another in developing and applying innovations. + +Franke and Shah (2003) studied the frequency with which users in four sporting communities assisted one another with innovations, and found that such assistance was very common (table 7.2). They also found that those who assisted were significantly more likely to be innovators themselves (table 7.3). The level of satisfaction reported by those assisted was very high. Seventy-nine percent agreed strongly with the statement "If I had a similar problem I would ask the same people again." Jeppesen (2005) similarly found extensive user-to-user help being volunteered in the field of computer gaming. +={Franke, N.;Jeppesen, L.;Shah, S.;Innovation communities:sporting equipment and;Sporting equipment:innovation communities and+7|user-to-user assistance and+7} + +!_ Table 7.2 +Number of people from whom innovators received assistance. +={Franke, N.;Shah, S.} + +table{~h c3; 34; 33; 33; + +Number of people +Number of cases +Percentage + +0 +0 +0 + +1 +3 +6 + +2 +14 +26 + +3--5 +25 +47 + +6--10 +8 +15 + +> 10 +3 +6 + +Total +53 +100 + +}table + +Source: Franke and Shah 2003, table 4. + +!_ Table 7.3 +Innovators tended to be the ones assisting others with their innovations (p < 0.0001). +={Franke, N.;Shah, S.} + +table{~h c4; 40; 20; 20; 20; + +~ +Innovators +Non-innovators +Total + +Gave assistance +28 +13 +41 + +Did not give assistance +32 +115 +147 + +Total +60 +128 +~ + +}table + +Source: Franke and Shah 2003, table 7. + +Such helping activity is clearly important to the value contributed by innovation communities to community participants. Why people might voluntarily offer assistance is a subject of analysis. The answers are not fully in, but the mysteries lessen as the research progresses. An answer that appears to be emerging is that there are private benefits to assistance providers, just as there are for those who freely reveal innovations (Lakhani and von Hippel 2003). In other words, provision of free assistance may be explicable in terms of the private-collective model of innovation-related incentives discussed earlier. +={Lakhani, K.;Free revealing of innovation information:in information communities|private-collective model for;Private-collective model;Social welfare:private-collective model and} + +1~ 8 Adapting Policy to User Innovation +={Government policy+44:user innovation and+19;Innovation:and government policy+44;Manufacturers:innovation and+44;Users:government policy and+44|innovation and+44} + +Government policy makers generally wish to encourage activities that increase social welfare, and to discourage activities that reduce it. Therefore, it is important to ask about the social welfare effects of innovation by users. Henkel and von Hippel (2005) explored this matter and concluded that social welfare is likely to be higher in a world in which both users and manufacturers innovate than in a world in which only manufacturers innovate. +={Henkel, J.;Government policy:manufacturer innovation and+1;Manufacturers:government policy and+1;Government policy:social welfare and+21;Social welfare: government policy+4;Users:social welfare and+4} + +In this chapter, I first explain that innovation by users complements manufacturer innovation and can also be a source of success-enhancing new product ideas for manufacturers. Next, I note that innovation by users does not exhibit several welfare-reducing effects associated with innovation by manufacturers. Finally, I evaluate the effects of public policies on user innovation, and suggest modifications to those that---typically unintentionally---discriminate against innovation by users. + +!_ Social Welfare Effects of User Innovation + +Social welfare functions are used in welfare economics to provide a measure of the material welfare of society, using economic variables as inputs. A social welfare function can be designed to express many social goals, ranging from population life expectancies to income distributions. Much of the literature on product diversity, innovation, and social welfare evaluates the impact of economic phenomena and policy on social welfare from the perspective of total income of a society without regard to how that income is distributed. We will take that viewpoint here. + +!_ User Innovation Improves Manufacturers' Success Rates +={Government policy:manufacturer innovation and+5;Manufacturers:government policy and+5} + +It is striking that most new products developed and introduced to the market by manufacturers are commercial failures. Mansfield and Wagner (1975) found the overall probability of success for new industrial products to be only 27 percent. Elrod and Kelman (1987) found an overall probability of success of 26 percent for consumer products. Balachandra and Friar (1997), Poolton and Barclay (1998), and Redmond (1995) found similarly high failure rates in new products commercialized. Although there clearly is some recycling of knowledge from failed projects to successful ones, much of the investment in product development is highly specific. This high failure rate therefore represents a huge inefficiency in the conversion of R&D investment to useful output, and a corresponding reduction in social welfare. +={Balachandra, R;Barclay, I.;Elrod, T.;Kelman, A.;Friar, J.;Mansfield, E.+1;Poolton, J.;Redmond, W.;Wagner, S.+1} + +% Robertson, A., 108 + +Research indicates that the major reason for the commercial failure of manufacturer-developed products is poor understanding of users' needs by manufacturer-innovators. The landmark SAPPHO study showed this in a very clear and convincing way. This study was based on a sample of 31 product pairs. Members of each pair were selected to address the same function and market. (For example, one pair consisted of two "roundness meters," each developed by a separate company.) One member of each pair was a commercial success (which showed that there was a market for the product type); the other was a commercial failure. The development process for each successful and failing product was then studied in detail. The primary factor found to distinguish success from failure was that a deeper understanding of the market and the need was associated with successful projects (Achilladelis et al. 1971; Rothwell et al. 1974). A study by Mansfield and Wagner (1975) came to the same conclusion. More recent studies of information stickiness and the resulting asymmetries of information held by users and manufacturers, discussed in chapter 3, support the reasonableness of this general finding. Users are the generators of information regarding their needs. The decline in accuracy and completeness of need information after transfer from user to manufacturer is likely to be substantial because important elements of this information are likely to be sticky (von Hippel 1994; Ogawa 1998). +={Achilladelis, B.;Ogawa, S.;Rothwell, R.;Project SAPPHO;SAPPHO study;Sticky information:innovation and} + +Innovations developed by users can improve manufacturers' information on users' needs and so improve their new product introduction success rates. Recall from previous chapters that innovation by users is concentrated among lead users. These lead users tend, as we have seen, to develop functionally novel products and product modifications addressing their own needs at the leading edge of markets where potential sales are both small and uncertain. Manufacturers, in contrast, have poorer information on users' needs and use contexts, and will prefer to manufacture innovations for larger, more certain markets. In the short term, therefore, user innovations will tend to /{complement}/ rather than substitute for products developed by manufacturers. In the longer term, the market as a whole catches up to the needs that motivated the lead user developments, and manufacturers will begin to find production of similar innovations to be commercially attractive. At that point, innovations by lead users can provide very useful information to manufacturers that they would not otherwise have. + +As lead users develop and test their solutions in their own use environments, they learn more about the real nature of their needs. They then often freely reveal information about their innovations. Other users then may adopt the innovations, comment on them, modify and improve them, and freely reveal what they have done in turn. All of this freely revealed activity by lead users offers manufacturers a great deal of useful information about both needs embodied in solutions and about markets. Given access to a user-developed prototype, manufacturers no longer need to understand users' needs very accurately and richly. Instead they have the much easier task of replicating the function of user prototypes that users have already demonstrated are responsive to their needs. For example, a manufacturer seeking to commercialize a new type of surgical equipment and coming upon prototype equipment developed by surgeons need not understand precisely why the innovators want this product or even precisely how it is used; the manufacturer need only understand that many surgeons appear willing to pay for it and then reproduce the important features of the user-developed prototypes in a commercial product. +={Free revealing of innovation information:lead users and+35|users and;Surgical equipment} + +Observation of innovation by lead users and adoption by follow-on users also can give manufacturers a better understanding of the size of the potential market. Projections of product sales have been shown to be much more accurate when they are based on actual customer behavior than when they are based on potential buyers' pre-use expectations. Monitoring of field use of user-built prototypes and of their adoption by other users can give manufacturers rich data on precisely these matters and so should improve manufacturer's commercial success. In net, user innovation helps to reduce information asymmetries between users and manufacturers and so increases the efficiency of the innovation process. +={Information asymmetries;Users:information asymmetries of} + +!_ User Innovation and Provisioning Biases + +The economic literature on the impact of innovation on social welfare generally seeks to understand effects that might induce society to create too many product variations (overprovisioning) or too few (underprovisioning) from the viewpoint of net social economic income (Chamberlin 1950). Greater variety of products available for purchase is assumed to be desirable, in that it enables consumers to get more precisely what they want and/or to own a more diverse array of products. However, increased product diversity comes at a cost: smaller quantities of each product will be produced on average. This in turn means that development-related and production-related economies of scale are likely to be less. The basic tradeoff between variety and cost is what creates the possibility of overprovisioning or underprovisioning product variety. Innovations such as flexible manufacturing may reduce fixed costs associated with increased diversity and so shift the optimal degree of diversity upward. Nonetheless, the conflict still persists. +={Chamberlin, E.;Social welfare:innovation and+11} + +Henkel and I studied the welfare impact of adding users as a source of innovation to existing analyses of product diversity, innovation, and social welfare. Existing models uniformly contained the assumption that new products and services were supplied to the economy by manufacturers only. We found that the addition of innovation by users to these analyses largely avoids the welfare-reducing biases that had been identified. For example, consider "business stealing" (Spence 1976). This term refers to the fact that commercial manufacturers benefit by diverting business from their competitors. Since they do not take this negative externality into account, their private gain from introducing new products exceeds society's total gain, tilting the balance toward overprovision of variety. In contrast, a freely revealed user innovation may also reduce incumbents' business, but not to the innovator's benefit. Hence, innovation incentives are not socially excessive. +={Henkel, J.;Spence, M.;Free revealing of innovation information:intellectual property rights and+11|social welfare and+5;Government policy:free revealing and+10;Government policy:provisioning biases and;Intellectual property rights:free revealing and+11;Users:social welfare and+5} + +Freely revealed innovations by users are also likely to reduce deadweight loss caused by pricing of products above their marginal costs. (Deadweight loss is a reduction in social welfare that occurs when goods are sold at a price above their marginal cost of production.) When users make information about their innovations available for free, and if the marginal cost of revealing that information is zero, an imitator only has to bear the cost of adoption. This is statically efficient. The availability of free user innovations can also induce sellers of competing commercial offerings to reduce their prices, thus indirectly leading to another reduction in dead-weight loss. + +Reducing prices toward marginal costs can also reduce incentives to over-provision variety (Tirole 1988). +={Tirole, J.} + +Henkel and I also explored a few special situations where social welfare might be /{reduced}/ by the availability of freely revealed user innovations. One of these was the effect of reduced pricing power on manufacturers that create "platform" products. Often, a manufacturer of such a product will want to sell the platform---a razor, an ink-jet printer, a video-game player---at a low margin or a loss, and then price necessary add-ons (razor blades, ink cartridges, video games) at a much higher margin. If the possibility of freely revealed add-ons developed by users makes development of a platform unprofitable for a manufacturer, social welfare can thereby be reduced. However, it is only the razor-vs.-blade pricing scheme that may become unprofitable. Indeed, if the manufacturer makes positive margins on the platform, then the availability of user-developed add-ons can have a positive effect: it can increase the value of the platform to users, and so allow manufacturers to charge higher margins on it and/or sell more units. Jeppesen (2004) finds that this is in fact the outcome when users introduce free game modifications (called mods) operating on proprietary game software platform products (called engines) sold by game manufacturers. Even though the game manufacturers also sell mods commercially that compete with free user mods, many provide active support for the development and diffusion of user mods built on their proprietary game engines, because they find that the net result is increased sales and profits. +={Henkel, J.;Jeppesen, L.;Government policy:intellectual property rights and|trade secrets and;Intellectual property rights:trade secrets and} + +!_ Public Policy Choices + +If innovation by users is welfare enhancing and is also significant in amount and value, then it makes sense to consider the effects of public policy on user innovation. An important first step would be to collect better data. Currently, much innovation by users---which may in aggregate turn out to be a very large fraction of total economic investment in innovation--- goes uncounted or undercounted. Thus, innovation effort that is volunteered by users, as is the case with many contributions to open source software, is currently not recorded by governmental statistical offices. This is also the case for user innovation that is integrated with product and service production. For example, much process innovation by manufacturers occurs on the factory floor as they produce goods and simultaneously learn how to improve their production processes. Similarly, many important innovations developed by surgeons are woven into learning by doing as they deliver services to patients. +={Open source software:innovation communities and} + +Next, it will be important to review innovation-related public policies to identify and correct biases with respect to sources of innovation. On a level playing field, users will become a steadily more important source of innovation, and will increasingly substitute for or complement manufacturers' innovation-related activities. Transitions required of policy making to support this ongoing evolution are important but far from painless. To illustrate, we next review issues related to the protection intellectual property, related to policies restricting product modifications, related to source-biased subsidies for R&D, and related to control over innovation diffusion channels. +={Government policy:provisioning biases and|intellectual property rights and+16|patents and+8;Intellectual property rights:patents and+8} + +!_ Intellectual Property + +Earlier, when we explored why users might freely reveal their innovations, we concluded that it was often their best /{practical}/ choice in view of how intellectual property law actually functions (or, often, does not function) to protect innovations today. For example, recall from chapter 6 that most innovators do not judge patents to be very effective, and that the availability of patent grant protection does not appear to increase innovation investments in most fields. Recall also that patent protection is costly to obtain, and thus of little value to developers of minor innovations---with most innovations being minor. We also saw that in practice it was often difficult for innovators to protect their innovations via trade secrecy: it is hard to keep a secret when many others know similar things, and when some of these information holders will lose little or nothing from freely revealing what they know. +={Intellectual property rights:trade secrets and;Government policy:trade secrets and;Intellectual property rights:trade secrets and} + +These findings show that the characteristics of present-day intellectual property regimes as actually experienced by innovators are far from the expectations of theorists and policy makers. The fundamental reason that societies elect to grant intellectual property rights to innovators is to increase private investment in innovation. At the same time, economists have long known that there will be social welfare losses associated with these grants: owners of intellectual property will generally restrict the use of their legally protected information in order to increase private profits. In other words, intellectual property rights are thought to be good for innovation and bad for competition. The consensus view has long been that the good outweighs the bad, but Foray (2004) explains that this consensus is now breaking down. Some---not all---are beginning to think that intellectual property rights are bad for innovation too in many cases. +={oray, D.;Government policy:intellectual commons and+13} + +The need to grant private intellectual property rights to achieve socially desirable levels of innovation is being questioned in the light of apparent counterexamples. Thus, as we saw earlier, open source software communities do not allow contributing innovators to use their intellectual property rights to control the use of their code. Instead, contributors use their authors' copyright to assign their code to a common pool to which all--- contributors and non-contributors alike---are granted equal access. Despite this regime, innovation seems to be flourishing. Why? As we saw in our earlier discussions of why innovators might freely reveal their innovations, researchers now understand that significant private rewards to innovation can exist independent of intellectual property rights grants. As a general principle, intellectual property rights grants should not be offered if and when developers would seek protection but would innovate without it. +={Intellectual property rights:copyrights and;Open source software:innovation communities and+1;Innovation:open source software and+1} + +The debate rages. Gallini and Scotchmer (2002) assert that "intellectual property is the foundation of the modern information economy" and that "it fuels the software, lifesciences and computer industries, and pervades most other products we consume." They also conclude that the positive or negative effect of intellectual property rights on innovation depends centrally on "the ease with which innovators can enter into agreements for rearranging and exercising those rights." This is precisely the rub from the point of view of those who urge that present intellectual property regimes be reconsidered: it is becoming increasingly clear that in practice rearranging and exercising intellectual property rights is often difficult rather than easy. It is also becoming clear that the protections afforded by existing intellectual property law can be strategically deployed to achieve private advantage at the expense of general innovative progress (Foray 2004). +={oray, D.;Gallini, N.;Scotchmer, S.} + +Consider an effect first pointed out by Merges and Nelson (1990) and further explored as the "tragedy of the anticommons" by Heller (1998) and Heller and Eisenberg (1998). A resource such as innovation-related information is prone to underuse---a tragedy of the anticommons---when multiple owners each have a right to exclude others and no one has an effective privilege of use. The nature of the patent grant can lead to precisely this type of situation. Patent law is so arranged that an owner of a patent is not granted the right to practice its invention---it is only granted the right to exclude others from practicing it. For example, suppose you invent and patent the chair. I then follow by inventing and patenting the rocking chair---implemented by building rockers onto a chair covered by your patent. In this situation I cannot manufacture a rocking chair without getting a license from you for the use of your chair patent, and you cannot build rocking chairs either without a license to my rocker patent. If we cannot agree on licensing terms, no one will have the right to build rocking chairs. +={Eisenberg, R.+1;Heller, M.+1;Merges, Robert;Nelson, R.;Government policy:user innovation and+19} + +In theory and in a world of costless transactions, people could avoid tragedies of the anticommons by licensing or trading their intellectual property rights. In practice the situation can be very different. Heller and Eisenberg point specifically to the field of biomedical research, and argue that conditions for anticommons effects do exist there. In that field, patents are routinely allowed on small but important elements of larger research problems, and upstream research is increasingly likely to be private. "Each upstream patent," Heller and Eisenberg note, "allows its owner to set up another tollbooth on the road to product development, adding to the cost and slowing the pace of downstream biomedical innovation." +={Transaction costs} + +A second type of strategic behavior based on patent rights involves investing in large portfolios of patents to create "patent thickets"---dense networks of patent claims across a wide field (Merges and Nelson 1990; Hall and Ham Ziedonis 2001; Shapiro 2001; Bessen 2003). Patent thickets create plausible grounds for patent infringement suits across a wide field. Owners of patent thickets can use the threat of such suits to discourage others from investing research dollars in areas of technical advance relevant to their products. Note that this use of patents is precisely opposite to policy mak' intentions to stimulate innovation by providing ways for innovators to assert intellectual property rights. Indeed, Bessen and Hunt (2004) have found in the field of software that, on average, as firm's investments in patent protection go up, their investments in research and development actually go down. If this relationship proves causal, there is a reasonable explanation from the viewpoint of private profit: corporations that can use a patent thicket to deter others' research in a field might well decide that there is less need to do research of their own. +={Bessen, J.;Hall, B.;Hunt, R.;Merges, Robert;Nelson, R.;Shapiro, C.;Ziedonis, R.;Government policy:patent thickets and;Intellectual property rights:patent thickets and} + +Similar innovation-retarding strategies can be applied by owners of large collections of copyrighted work in the movie, publishing, and software fields. Copyright owners can prevent others from building new works on characters (e.g. Mickey Mouse) that are already familiar to customers. The result is that owners of large portfolios of copyrighted work can gain an advantage over those with no or small portfolios in the creation of derivative works. Indeed, Benkler (2002) argues that institutional changes strengthening intellectual property protection tend to foster concentration of information production in general. Lessig (2001) and Boldrin and Levine (2002) arrive at a similarly negative valuation of overly strong and lengthy copyright protection. +={Benkler, Y.;Boldrin, M.;Levine, D.;Lessig, L.;Intellectual property rights:copyrights and;Government policy:copyrights and} + +These types of innovation-discouraging effects can affect innovation by users especially strongly. The distributed innovation system we have documented consists of users each of whom might have only a few innovations and a small amount of intellectual property. Such innovators are clearly hurt differentially by a system that gives advantage to the owners of large shares of the intellectual property in a field. + +What can be done? A solution approach open to policy makers is to change intellectual property law so as to level the playing field. But owners of large amounts of intellectual property protected under the present system are often politically powerful, so this type of solution will be difficult to achieve. + +Fortunately, an alternative solution approach may be available to innovators themselves. Suppose that many elect to contribute the intellectual property they individually develop to a commons in a particular field. If the commons then grows to contain reasonable substitutes for much of the proprietary intellectual property relevant to the field, the relative advantage accruing to large holders of this information will diminish and perhaps even disappear. At the same time and for the same reason, the barriers that privately held stocks of intellectual property currently may raise to further intellectual advance will also diminish. Lessig supports this possibility with his creation and publication of standard "Creative Commons" licenses on the website creativecommons.org. Authors interested in contributing their work to the commons, perhaps with some restrictions, can easily find and adopt an appropriate license at that site. +={Lessig, L.;Intellectual commons+13;Intellectual property rights:intellectual commons and+13;Open source software:intellectual commons and+13|intellectual property rights and+13;Intellectual property rights:licensing of} + +Reaching agreement on conditions for the formation of an intellectual commons can be difficult. Maurer (2005) makes this clear in his cautionary tale of the struggle and eventual failure to create a commons for data on human mutations. However, success is possible. For example, an extensive intellectual commons of software code is contained and maintained in the many open source software projects that now exist. +={Maurer, S.;Innovation communities+2} + +Interesting examples also exist regarding on the impact a commons can have on the value of intellectual property innovators seek to hold apart from it. Weber (2004) recounts the following anecdote: In 1988, Linux developers were building new graphical interfaces for their open source software. One of the most promising of these, KDE, was offered under the General Public License. However, Matthias Ettrich, its developer, had built KDE using a proprietary graphical library called Qt. He felt at the time that this could be an acceptable solution because Qt was of good quality and Troll Tech, owner of Qt, licensed Qt at no charge under some circumstances. However, Troll Tech did require a developer's fee be paid under other circumstances, and some Linux developers were concerned about having code not licensed under the GPL as part of their code. They tried to convince Troll Tech to change the Qt license so that it would be under the GPL when used in free software. But Troll Tech, as was fully within its rights, refused to do this. Linux developers then, as was fully within their rights, began to develop open source alternatives to Qt that could be licensed under the GPL. As those projects moved toward success, Troll Tech recognized that Qt might be surpassed and effectively shut out of the Linux market. In 2000 the company therefore decided to license Qt under the GPL. +={Ettrich, M.;Linux;Weber, S.;Intellectual property rights:licensing of+1;Linux} + +Similar actions can keep conditions for free access to materials held within a commons from degrading and being lost over time. Chris Hanson, a Principal Research Scientist at MIT, illustrates this with an anecdote regarding an open source software component called ipfilter. The author of ipfilter attempted to "lock" the program by changing licensing terms of his program to disallow the distribution of modified versions. His reasoning was that Ipfilter, a network-security filter, must be as bug-free as possible, and that this could best be ensured by his controlling access. His actions ignited a flame war in which the author was generally argued to be selfish and overreaching. His program, then an essential piece of BSD operating systems, was replaced by newly written code in some systems within the year. The author, Hanson notes, has since changed his licensing terms back to a standard BSD-style (unrestricted) license. +={Hanson, C.} + +We will learn over time whether and how widely the practice of creating and defending intellectual commons diffuses across fields. There obviously can be cases where it will continue to make sense for innovators, and for society as well, to protect innovations as private intellectual property. However, it is likely that many user innovations are kept private not so much out of rational motives as because of a general, not-thought-through attitude that "we do not give away our intellectual property," or because the administrative cost of revealing is assumed to be higher than the benefits. Firms and society can benefit by rethinking the benefits of free revealing and (re)developing policies regarding what is best kept private and what is best freely revealed. +={Government policy:social welfare and+9|trade secrets and+3;Intellectual property rights:trade secrets and+3} + +!_ Constraints on Product Modification +={Custom products:manufacturers and+2;Government policy:manufacturer innovation and+8;Manufacturers:government policy and+8} + +Users often develop prototypes of new products by buying existing commercial products and modifying them. Current efforts by manufacturers to build technologies into the products they sell that restrict the way these products are used can undercut users' traditional freedom to modify what they purchase. This in turn can raise the costs of innovation development by users and so lessen the amount of user innovation that is done. For example, makers of ink-jet printers often follow a razor-and-blade strategy, selling printers at low margins and the ink cartridges used in them at high margins. To preserve this strategy, printer manufacturers want to prevent users from refilling ink cartridges with low-cost ink and using them again. Accordingly, they may add technical modifications to their cartridges to prevent them from functioning if users have refilled them. This manufacturer strategy can potentially cut off both refilling by the economically minded and modifications by user-innovators that might involve refilling (Varian 2002). Some users, for example, have refilled cartridges with special inks not sold by printer manufacturers in order to adapt ink-jet printing to the printing of very high-quality photographs. Others have refilled cartridges with food colorings instead of inks in order to develop techniques for printing images on cakes. Each of these applications might have been retarded or prevented by technical measures against cartridge refilling. +={Varian, H.} + +The Digital Millennium Copyright Act, a legislative initiative intended to prevent product copying, may negatively affect users' abilities to change and improve the products they own. Specifically, the DMCA makes it a crime to circumvent anti-piracy measures built into most commercial software. It also outlaws the manufacture, sale, or distribution of code-cracking devices used to illegally copy software. Unfortunately, code cracking is also a needed step for modification of commercial software products by user-innovators. Policy makers should be aware of "collateral damage" that may be inflicted on user innovation by legislation aimed at other targets, as is likely in this case. +={Digital Millennium Copyright Act} + +!_ Control over Distribution Channels +={Government policy:distribution channels and+1} + +Users that innovate and wish to freely diffuse innovation-related information are able to do so cheaply in large part because of steady advances in Internet distribution capabilities. Controls placed on such infrastructural factors can threaten and maybe even totally disable distributed innovation systems such as the user innovation systems documented in this book. For example, information products developed by users are commonly distributed over the Internet by peer-to-peer sharing networks. A firm that owns both a channel and content (e.g., a cable network) may have a strong incentive to shut out or discriminate against content developed by users or others in favor of its own content. The transition from the chaotic, fertile early days of radio in the United States when many voices were heard, to an era in which the spectrum was dominated by a few major networks---a transition pushed by major firms and enforced by governmental policy making--- provides a sobering example of what could happen (Lessig 2001). It will be important for policy makers to be aware of this kind of incentive problem and address it---in this case perhaps by mandating that ownership of content and ownership of channel be separated, as has long been the case for other types of common carriers. +={Lessig, L.} + +!_ R&D Subsidies and Tax Credits +={Government policy:R&D subsidies and+3} + +In many countries, manufacturing firms are rewarded for their innovative activity by R&D subsidies and tax credits. Such measures can make economic sense if average social returns to innovation are significantly higher than average private returns, as has been found by Mansfield et al. (1977) and others. However, important innovative activities carried out by users are often not similarly rewarded, because they tend to not be documentable as formal R&D activities. As we have seen, users tend to develop innovations in the course of "doing" in their normal use environments. Bresnahan and Greenstein (1996a) make a similar point. They investigate the role of "co-invention" in the move by users from mainframe to client-server architecture.~{ See also Bresnahan and Greenstein 1996b; Bresnahan and Saloner 1997; Saloner and Steinmueller 1996. }~ By "co-invention" Bresnahan and Greenstein mean organizational changes and innovations developed and implemented by users that are required to take full advantage of a new invention. They point out the high importance that co-invention has for realizing social returns from innovation. They consider the federal government's support for creating "national information infrastructures" insufficient or misallocated, since they view co-invention is the bottleneck for social returns and likely the highest value locus for invention. +={Bresnahan, T.;Greenstein, S.;Mansfield, E.;Users:co-invention and} + +Efforts to level the playing field for user innovation and manufacturer innovation could, of course, also go in the direction of lessening R&D subsidies or tax credits for all rather than attempting to increase user-innovators' access to subsidies. However, if directing subsidies to user-innovators seems desirable, social welfare will be best served if policy makers link them to free revealing by user-innovators as well as or instead of tying them to users' private investments in the development of products for exclusive in-house use. Otherwise, duplication of effort by users interested in the same innovation will reduce potential welfare gains. +={Free revealing of innovation information:government policy and+1|social welfare and+1} + +In sum, the welfare-enhancing effects found for freely revealed user innovations suggest that policy makers should consider conditions required for user innovation when creating policy and legislation. Leveling the playing field for user-innovators and manufacturer-innovators will doubtless force more rapid change onto manufacturers. However, as will be seen in the next chapter, manufacturers can adapt to a world in which user innovation is at center stage. + +1~ 9 Democratizing Innovation +={Social welfare:innovation and+45;von Hippel, E.+45} + +We have learned that lead users sometimes develop and modify products for themselves and often freely reveal what they have done. We have also seen that many users can be interested in adopting the solutions that lead users have developed. Taken together, these findings offer the basis for user-centered innovation systems that can entirely supplant manufacturer-based innovation systems under some conditions and complement them under most. User-centered innovation is steadily increasing in importance as computing and communication technologies improve. +={Free revealing of innovation information:lead users and;Manufacturers:innovation and+44;Custom products:users and;Innovation:lead users and;Lead users:innovation and;User need+3;Users:innovation and+4} + +I begin this chapter with a discussion of the ongoing democratization of innovation. I then describe some of the patterns in user-centered innovation that are emerging. Finally, I discuss how manufacturers can find ways to profitably participate in emerging, user-centered innovation processes. + +!_ The Trend toward Democratization + +Users' abilities to develop high-quality new products and services for themselves are improving radically and rapidly. Steady improvements in computer software and hardware are making it possible to develop increasingly capable and steadily cheaper tools for innovation that require less and less skill and training to use. In addition, improving tools for communication are making it easier for user innovators to gain access to the rich libraries of modifiable innovations and innovation components that have been placed into the public domain. The net result is that rates of user innovation will increase even if users' heterogeneity of need and willingness to pay for "exactly right" products remain constant. +={Custom products:users and+1;Users:paying for innovations and} + +The radical nature of the change that is occurring in design capabilities available to even individual users is perhaps difficult for those without personal innovation experience to appreciate. An anecdote from my own experience may help as illustration. When I was a child and designed new products that I wanted to build and use, the ratio of not-too-pleasurable (for me) effort required to actually build a prototype relative to the very pleasurable effort of inventing it and use-testing it was huge. (That is, in terms of the design, build, test, evaluate cycle illustrated in figure 5.1, the effort devoted to the "build" element of the cycle was very large and the rate of iteration and learning via trial and error was very low.) + +In my case it was especially frustrating to try to build anything sophisticated from mechanical parts. I did not have a machine shop in which I could make good parts from scratch, and it often was difficult to find or buy the components I needed. As a consequence, I had to try to assemble an approximation of my ideas out of vacuum cleaner parts and other bits of metal and plastic and rubber that I could buy or that were lying around. Sometimes I failed at this and had to drop an exciting project. For example, I found no way to make the combustion chamber I needed to build a large pulse-jet engine for my bicycle (in retrospect, perhaps a lucky thing!). Even when I succeeded, the result was typically "unaesthetic": the gap between the elegant design in my mind and the crude prototype that I could realize was discouragingly large. + +Today, in sharp contrast, user firms and increasingly even individual hobbyists have access to sophisticated design tools for fields ranging from software to electronics to musical composition. All these information-based tools can be run on a personal computer and are rapidly coming down in price. With relatively little training and practice, they enable users to design new products and services---and music and art---at a satisfyingly sophisticated level. Then, if what has been created is an information product, such as software or music, the design is the actual product---software you can use or music you can play. + +If one is designing a physical product, it is possible to create a design and even conduct some performance testing by computer simulation. After that, constructing a real physical prototype is still not easy. However, today users do have ready access to kits that offer basic electronic and mechanical building blocks at an affordable price, and physical product prototyping is becoming steadily easier as computer-driven 3-D parts printers continue to go up in sophistication while dropping in price. Very excitingly, even today home-built prototypes need not be poorly fashioned items that will fall apart with a touch in the wrong place---the solution components now available to users are often as good as those available to professional designers. + +Functional equivalents of the resources for innovation just described have long been available within corporations to a lucky few. Senior designers at firms have long been supported by engineers and designers under their direct control, and also with other resources needed to quickly construct and test prototype designs. When I took a job as R&D manager at a start-up firm after college, I was astounded at the difference professional-quality resources made to both the speed and the joy of innovation. Product development under these conditions meant that the proportion of one's effort that could be focused on the design and test portions of the innovation cycle rather than on prototype building was much higher, and the rate of progress was much faster. +={Innovation process+3} + +The same story can be told in fields from machine design to clothing design: just think of the staffs of seamstresses and models supplied by clothing manufacturers to their "top designers" so that these few can quickly realize and test many variations on their designs. In contrast, think of the time and effort that equally talented designers without such staff assistance must engage in to stitch together even a single high-quality garment prototype on their own. + +But, as we learned in chapter 7, the capability and the information needed to innovate in important ways are in fact widely distributed. Given this finding, we can see that the traditional pattern of concentrating innovation-support resources on just a few pre-selected potential innovators is hugely inefficient. High-cost resources for innovation support cannot be allocated to "the right people," because one does not know who they are until they develop an important innovation. When the cost of high-quality resources for design and prototyping becomes very low---which is the trend we have described---these resources can be diffused widely, and the allocation problem then diminishes in significance. The net result is and will be to democratize the opportunity to create. + +Democratization of the opportunity to create is important beyond giving more users the ability to make exactly right products for themselves. As we saw in a previous chapter, the joy and the learning associated with creativity and membership in creative communities are also important, and these experiences too are made more widely available as innovation is democratized. The aforementioned Chris Hanson, a Principal Research Scientist at MIT and a maintainer in the Debian Linux community, speaks eloquently of this in his description of the joy and value he finds from his participation in an open source software community: +={Hanson, C.;Linux;Users:innovate-or-buy decisions by+3|innovation process and+3;Free revealing of innovation information:innovation and+3;Innovation communities:open source software and;Linux;Open source software:innovation communities and+3} + +_1 Creation is unbelievably addictive. And programming, at least for skilled programmers, is highly creative. So good programmers are compelled to program to feed the addiction. (Just ask my wife!) Creative programming takes time, and careful attention to the details. Programming is all about expressing intent, and in any large program there are many areas in which the programmer's intent is unclear. Clarification requires insight, and acquiring insight is the primary creative act in programming. But insight takes time and often requires extensive conversation with one's peers. + +_1 Free-software programmers are relatively unconstrained by time. Community standards encourage deep understanding, because programmers know that understanding is essential to proper function. They are also programming for themselves, and naturally they want the resulting programs to be as good as they can be. For many, a free software project is the only context in which they can write a program that expresses their own vision, rather than implementing someone else's design, or hacking together something that the marketing department insists on. No wonder programmers are willing to do this in their spare time. This is a place where creativity thrives. + +_1 Creativity also plays a role in the programming community: programming, like architecture, has both an expressive and a functional component. Unlike architecture, though, the expressive component of a program is inaccessible to non-programmers. A close analogy is to appreciate the artistic expression of a novel when you don't know the language in which it is written, or even if you know the language but are not fluent. This means that creative programmers want to associate with one another: only their peers are able to truly appreciate their art. Part of this is that programmers want to earn respect by showing others their talents. But it's also important that people want to share the beauty of what they have found. This sharing is another act that helps build community and friendship. + +!_ Adapting to User-Centered Innovation---Like It or Not +={Innovation process+5;Free revealing of innovation information:innovation and+5} + +User-centered innovation systems involving free revealing can sometimes supplant product development carried out by manufacturers. This outcome seems reasonable when manufacturers can obtain field-tested user designs at no cost. As an illustration, consider kitesurfing (previously discussed in chapter 7). The recent evolution of this field nicely shows how manufacturer-based product design may not be able to survive when challenged by a user innovation community that freely reveals leading-edge designs developed by users. In such a case, manufacturers may be obliged to retreat to manufacturing only, specializing in modifying user-developed designs for producibility and manufacturing these in volume. +={Free revealing of innovation information:users and;Kitesurfing+4} + +Recall that equipment for kitesurfing was initially developed and built by user-enthusiasts who were inventing both kitesurfing techniques and kitesurfing equipment interdependently. Around 1999, the first of several small manufacturers began to design and sell kitesurfing equipment commercially. The market for kitesurfing equipment then began to grow very rapidly. In 2001 about 5,000 kite-and-board sets were sold worldwide. In 2002 the number was about 30,000, and in 2003 it was about 70,000. With a basic kite-and-board set selling for about $1,500, total sales in 2003 exceeded $100 million. (Many additional kites, home-made by users, are not included in this calculation.) As of 2003, about 40 percent of the commercial market was held by a US firm called Robbie Naish (Naishkites.com). + +Recall also that in 2001 Saul Griffith, an MIT graduate student, established an Internet site called Zeroprestige.com as a home for a community of kitesurfing users and user-innovators. In 2003, the general consensus of both site participants and manufacturers was that the kite designs developed by users and freely revealed on Zeroprestige.com were at least as advanced as those developed by the leading manufacturers. There was also a consensus that the level of engineering design tools and aggregate rate of experimentation by kite users participating on the Zeroprestige.com site was superior to that within any kite manufacturer. Indeed, this collective user effort was probably superior in quality and quantity to the product-development work carried out by all manufacturers in the industry taken together. +={Griffith, S.;Zeroprestige.com+1} + +In late 2003, a perhaps predictable event occurred: a kite manufacturer began downloading users' designs from Zeroprestige.com and producing them for commercial sale. This firm had no internal kitesurfing product-development effort and offered no royalties to user-innovators---who sought none. It also sold its products at prices much lower than those charged by companies that both developed and manufactured kites. + +It is not clear that manufacturers of kitesurfing equipment adhering to the traditional developer-manufacturer model can---or should---survive this new and powerful combination of freely revealed collaborative design and prototyping effort by a user innovation community combined with volume production by a specialist manufacturer. In effect, free revealing of product designs by users offsets manufacturers' economies of scale in design with user communities' economies of scope. These economies arise from the heterogeneity in information and resources found in a user community. +={Custom products:heterogeneity of user needs and;User need+2} + +!_ Manufacturers' Roles in User-Centered Innovation + +Users are not required to incorporate manufacturers in their product-development and product-diffusion activities. Indeed, as open source software projects clearly show, horizontal innovation communities consisting entirely of users can develop, diffuse, maintain, and consume software and other /{information}/ products by and for themselves---no manufacturer is required. Freedom from manufacturer involvement is possible because information products can be "produced" and distributed by users essentially for free on the web (Kollock 1999). In contrast, production and diffusion of physical products involves activities with significant economies of scale. For this reason, while product development and early diffusion of copies of physical products developed by users can be carried out by users themselves and within user innovation communities, mass production and general diffusion of physical products incorporating user innovations are usually carried out by manufacturing firms. +={Innovation communities:open source software and+5;Open source software:innovation and+5|innovation communities and+5} + +For information products, general distribution is carried out within and beyond the user community by the community itself; no manufacturer is required: + +Innovating lead users ➔ All users. + +For physical products, general distribution typically requires manufacturers: + +Innovating lead users ➔ Manufacturer ➔ All users. + +In light of this situation, how can, should, or will manufacturers of products, services, and processes play profitable roles in user-centered innovation systems? Behlendorf (1999), Hecker (1999) and Raymond (1999) explore what might be possible in the specific context of open source software. More generally, many are experimenting with three possibilities: (1) Manufacturers may produce user-developed innovations for general commercial sale and/or offer a custom manufacturing service to specific users. (2) Manufacturers may sell kits of product-design tools and/or "product platforms" to ease users' innovation-related tasks. (3) Manufacturers may sell products or services that are complementary to user-developed innovations. +={Behlendorf, B.;Hecker, F.;Raymond, E.} + +!_ Producing User-Developed Products + +Firms can make a profitable business from identifying and mass producing user-developed innovations or developing and building new products based on ideas drawn from such innovations. They can gain advantages over competitors by learning to do this better than other manufacturers. They may, for example, learn to identify commercially promising user innovations more effectively that other firms. Firms using lead user search techniques such as those we will describe in chapter 10 are beginning to do this systematically rather than accidentally---surely an improvement. Effectively transferring user-developed innovations to mass manufacture is seldom as simple as producing a product based on a design by a single lead user. Often, a manufacturer combines features developed by several independent lead users to create an attractive commercial offering. This is a skill that a company can learn better than others in order to gain a competitive advantage. +={Lead users:innovation and|manufacturers and+1;Manufacturers:lead users and+1} + +The decision as to whether or when to take the plunge and commercialize a lead user innovation(s) is also not typically straightforward, and companies can improve their skills at inviting in the relevant information and making such assessments. As was discussed previously, manufacturers often do not understand emerging user needs and markets nearly as well as lead users do. Lead users therefore may engage in entrepreneurial activities, such as "selling" the potential of an idea to potential manufacturers and even lining up financing for a manufacturer when they think it very important to rapidly get widespread diffusion of a user-developed product. Lettl, Herstatt, and Gemünden (2004), who studied the commercialization of major advances in surgical equipment, found innovating users commonly engaging in these activities. It is also possible, of course, for innovating lead users to become manufacturers and produce the products they developed for general commercial sale. This has been shown to occur fairly frequently in the field of sporting goods (Shah 2000; Shah and Tripsas 2004; Hienerth 2004). +={Gemünden, H.;Lettl, C.;Herstatt, C.;Hienerth, C.;Shah, S.;Tripsas, M.;Lead users:surgical equipment and;Windsurfing;Custom products:innovation and+1|manufacturers and+1;users and+1;Users:custom products and+1} + +Manufacturers can also elect to provide custom production or "foundry" services to users, differentiating themselves by producing users' designs faster, better, and/or cheaper than competitors. This type of business model is already advanced in many fields. Custom machine shops specialize in manufacturing mechanical parts to order; electronic assembly shops produce custom electronic products, chemical manufacturers offer "toll" manufacturing of custom products designed by others, and so on. Suppliers of custom integrated circuits offer an especially good example of custom manufacture of products designed by users. More than $15 billion worth of custom integrated circuits were produced in 2002, and the cumulative average growth rate of that market segment was 29 percent. Users benefit from designing their own circuits by getting exactly what they want more quickly than manufacturer-based engineers could supply what they need, and manufacturers benefit from producing the custom designs for users (Thomke and von Hippel 2002). +={Thomke, S.;von Hippel, E.;Custom products:|suppliers and+2;Suppliers+2;Economic benefit, expectations of by lead users:by manufacturers|by users} + +!_ Supplying Toolkits and/or Platform Products to Users +={Custom products:product platforms and+7;Toolkits+7:platform products and+7} + +Users interested in designing their own products want to do it efficiently. Manufacturers can therefore attract them to kits of design tools that ease their product-development tasks and to products that can serve as "platforms" upon which to develop and operate user-developed modifications. Some are supplying users with proprietary sets of design tools only. Cadence, a supplier of design tools for corporate and even individual users interested in designing their own custom semiconductor chips, is an example of this. Other manufacturers, including Harley-Davidson in the case of motorcycles and Microsoft in the case of its Excel spreadsheet software, sell platform products intentionally designed for post-sale modification by users. +={Microsoft} + +Some firms that sell platform products or design tools to users have learned to systematically incorporate valuable innovations that users may develop back into their commercial products. In effect, this second strategy can often be pursued jointly with the manufacturing strategy described above. Consider, for example, StataCorp of College Station, Texas. StataCorp produces and sells Stata, a proprietary software program designed for statistics. It sells the basic system bundled with a number of families of statistical tests and with design tools that enable users to develop new tests for operation on the Stata platform. Advanced customers, many of them statisticians and social science researchers, find this capability very important to their work and do develop their own tests. Many then freely reveal tests they have developed on Internet websites set up by the users themselves. Other users then visit these sites to download and use, and perhaps to test, comment on, and improve these tests, much as users do in open source software communities. +={StataCorp statistical software+2;Toolkits:StataCorp and+2;Free revealing of innovation information:innovation and+3|intellectual property rights and+3;Government policy:free revealing and+3|intellectual property rights and+3|trade secrets and+3;Innovation communities+3:open source software and+3|and sources of innovation+3;Intellectual property rights:free revealing and+3|trade secrets and+3;Open source software:innovation and+10|innovation communities and+10} + +StataCorp personnel monitor the activity at user sites, and note the new tests that are of interest to many users. They then bring the most popular tests into their product portfolio as Stata modules. To do this, they rewrite the user's software code while adhering to the principles pioneered by the user-innovator. They then subject the module to extensive validation testing---a very important matter for statisticians. The net result is a symbiotic relationship. User-innovators are publicly credited by Stata for their ideas, and benefit by having their modules professionally tested. StataCorp gains a new commercial test module, rewritten and sold under its own copyright. Add-ons developed by users that are freely revealed will increase StataCorp's profits more than will equivalent add-ons developed and sold by manufacturers (Jokisch 2001). Similar strategies are pursued by manufacturers of simulator software (Henkel and Thies 2003). +={Henkel, J.;Jokisch, M.;Thies, S.;Economic benefit, expectations of by lead users:by manufacturers|by users} + +Note, however, that StataCorp, in order to protect its proprietary position, does not reveal the core of its software program to users, and does not allow any user to modify it. This creates problems for those users who need to make modifications to the core in order to solve particular problems they encounter. Users with problems of this nature and users especially concerned about price have the option of turning to non-proprietary free statistical software packages available on the web, such as the "R" project (www.r-project.org). These alternatives are developed and supported by user communities and are available as open source software. The eventual effect of open source software alternatives on the viability of the business models of commercial vendors such as StataCorp and its competitors remains to be seen. + +A very similar pattern exists in the online gaming industry. Vendors of early online computer games were surprised to discover that sophisticated users were deciphering their closed source code in order to modify the games to be more to their liking. Some of these "mods" attracted large followings, and some game vendors were both impressed and supportive. Manufacturers also discovered that the net effect of user-developed mods was positive for them: mods actually increased the sales of their basic software, because users had to buy the vendors' proprietary software engine code in order to play the mods. Accordingly, a number of vendors began to actively support user-developers by supplying them with design tools to make it easier for them to build mods on their proprietary engine platforms (Jeppesen and Molin 2003). +={Jeppesen, L.+1;Molin, M.} + +Both manufacturers and users involved with online gaming are experimenting with the possibilities of user-manufacturer symbiosis in a number of additional ways. For example, some vendors are experimenting with creating company-supported distribution channels through which users---who then become vendors---can sell their mods rather than simply offering them as free downloads (Jeppesen 2004). At the same time, some user communities are working in the opposite direction by joining together to develop open source software engines for video games. If the latter effort is successful, it will offer mod developers a platform and design tools that are entirely non-proprietary for the first time. As in the case of statistical software, the eventual outcomes of all these experiments are not yet clear. + +As a final example of a strategy in which manufacturers offer a platform to support user innovation of value to them, consider General Electric's innovation pattern with respect to the magnetic-resonance imaging machines it sells for medical use. Michael Harsh (GE's Director of R&D in the division that produces MRI machines) and his colleagues realized that nearly all the major, commercially important improvements to these machines are developed by leading-edge users rather than by GE or by competing machine producers. They also knew that commercialization of user-developed improvements would be easier and faster for GE if the users had developed their innovations using a GE MRI machine as a platform rather than a competitor's machine. Since MRI machines are expensive, GE developed a policy of selectively supplying machines at a very low price to scientists GE managers judged most likely to develop important improvements. These machines are supplied with restrictive interlocks removed so that the users can easily modify them. In exchange for this research support, the medical researchers give GE preferred access to innovations they develop. Over the years, supported researchers have provided a steady flow of significant improvements that have been first commercialized by GE. Managers consider the policy a major source of GE's commercial success in the MRI field. +={General Electric;Harsh, M.;Toolkits:GE and} + +!_ Providing Complementary Products or Services + +Many user innovations require or benefit from complementary products or services, and manufacturers can often supply these at a profit. For example, IBM profits from user innovation in open source software by selling the complement of computer hardware. Specifically, it sells computer servers with open source software pre-installed, and as the popularity of that software goes up, so do server sales and profits. A firm named Red Hat distributes a version of the open source software computer operating system Linux, and also sells the complementary service of Linux technical support to users. Opportunities to provide profitable complements are not necessarily obvious at first glance, and providers often reap benefits without being aware of the user innovation for which they are providing a complement. Hospital emergency rooms, for example, certainly gain considerable business from providing medical care to the users and user-developers of physically demanding sports, but may not be aware of this. +={IBM;Linux;Users:innovation and+3;Innovation communities:open source software and;Linux;Innovation communities:sources of innovation and} + +!_ Discussion +={Government policy:intellectual property rights and+2} + +All the examples above explore how manufacturers can integrate themselves into a user-centered innovation system. However, manufacturers will not always find user innovations based on or related to their products to be in their interest. For example, manufacturers may be concerned about legal liabilities and costs sometimes associated with "unauthorized user tinkering." For example, an automaker might legitimately worry about the user-programmed engine controller chips that racing aficionados and others often install to change their cars' performance. The result can be findings of eventual commercial value as users explore new performance regimes that manufacturers' engineers might not have considered. However, if users choose to override manufacturers' programming to increase engine performance, there is also a clear risk of increased warrantee costs for manufacturers if engines fail as a consequence (Mollick 2004). +={Mollick, Ethan} + +We have seen that manufacturers can often find ways to profit from user innovation. It is also the case, however, that user innovators and user innovation communities can provide many of these same functions for themselves. For example, StataCorp is successfully selling a proprietary statistical software package. User-developed alternatives exist on the web that are developed and maintained by user-innovators and can be downloaded at no charge. Which ownership model will prove more robust under what circumstances remains to be seen. Ultimately, since users are the customers, they get to choose. +={StataCorp statistical software;Toolkits:StataCorp and;Users:innovation communities and;Innovation communities} + +1~ 10 Application: Searching for Lead User Innovations +={Users:innovation and+59;Lead users+59:innovation and+59|identification of+49} + +Users and manufacturers can apply the insights developed in this book to improve their innovation processes. In this chapter, I illustrate by showing how firms can profit by /{systematically}/ searching for innovations developed by lead users. I first explain how this can be done. I then present findings of a study conducted at 3M to assess the effectiveness of lead user idea-generation techniques. Finally, I briefly review other studies reporting systematic searches for lead users by manufacturers, and the results obtained. +={Lead users:idea generation and|3M and;Manufacturers:innovation and|lead users and+16;3M Corporation} + +!_ Searching for Lead Users + +Product-development processes traditionally used by manufacturers start with market researchers who study customers in their target markets to learn about unsatisfied needs. Next, the need information they uncover is transferred to in-house product developers who are charged with developing a responsive product. In other words, the approach is to find a user need and to fill it by means of in-house product development. +={Marketing research+2} + +These traditional processes cannot easily be adapted to systematic searching for lead user innovations. The focus on target-market customers means that lead users are regarded as outliers of no interest. Also, traditional market-research analyses focus on collecting and analyzing need information and not on possible solutions that users may have developed. For example, if a user says "I have developed this new product to make task X more convenient," market-research analyses typically will note that more convenience is wanted but not record the user-developed solution. After all, product development is the province of in-house engineers! + +We are therefore left with a question: How can manufacturers build a product-development process that systematically searches for and evaluates lead user-generated innovations? (See figure 10.1.) It turns out that the answer differs depending on whether the lead users sought are at the leading edge of "advanced analog" fields or at the leading edge of target markets. Searching for the former is more difficult, but experience shows that the user-developed innovations that are most radical (and profitable) relative to conventional thinking often come from lead users in "advanced analog" fields. +={Manufacturers:innovation and;Lead users:characteristics of+1;Marketing research+1} + +% Only lead user +% prototypes available +% Time +% Commercial versions of product available +% Number +% of users +% perceiving +% need +% Figure 10.1 +% Innovations by lead users precede equivalent commercial products. + +{di_evh_f10-1.png}image + +!_ Figure 10.1 +Innovations by lead users precede equivalent commercial products. + +!_ Identifying Lead Users in Advanced Analog Fields + +Lead users in advanced analog fields experience needs that are related to but more extreme than those being faced by /{any}/ users, including lead users, within the target market. They also often face a different set of constraints than those affecting users in the target market. These differences can force them to develop solutions that are entirely new from the perspective of the target market. + +As an example, consider the relationship between the braking requirements faced by users of automobiles (let's call auto users the target market) and the braking requirements faced by large commercial airplanes as they land on an airport runway (the advanced analog market). Clearly, the braking demands on large airplanes are much more extreme. Airplanes are much heavier than autos and land at higher speeds: their brakes must rapidly dissipate hundreds of times more energy to bring the vehicle to a stop. Also, the situational constraints are different. For example, auto drivers are often assisted in braking in winter by the application of salt or sand to icy roads. These aids cannot be applied in the case of aircraft: salt would damage aircraft bodies, and sand would be inhaled into jet engines and damage them. + +The result of the more extreme demands and additional constraints placed on solutions to aircraft braking was the development of antilock braking systems (ABS) for aircraft. Auto firms conducting searches for valuable lead user innovations regarding auto braking were able to learn about this out-of-field innovation and adapt if for use in autos---where it is common today. Before the development of ABS for autos, an automobile firm could have learned about the underlying concept by studying the practices of users with a strong need for controlling skidding while braking such as stock car auto racing teams. These lead users had learned to manually "pump" their brakes to help control this problem. However, auto company engineers were able to learn much more by studying the automated solutions developed in the "advanced analog" field of aerospace.~{ ABS braking is intended to keep a vehicle's wheels turning during braking. ABS works by automatically and rapidly "pumping" the brakes. The result is that the wheels continue to revolve rather than "locking up," and the operator continues to have control over steering. }~ + +Finding lead users in advanced analog markets can be difficult because discovering the relevance of a particular analog can itself be a creative act. One approach that has proven effective is to ask the more easily identified lead users in target markets for nominations. These lead users tend to know about useful advanced analogs, because they have been struggling with their leading-edge problems for a long time, and often have searched beyond the target market for information. + +Networking from innovators to more advanced innovators in this way is called pyramiding (von Hippel, Thomke, and Sonnack 1999). Pyramiding is a modified version of the "snowballing" technique sometimes used by sociologists to identify members of a group or accumulate samples of rare respondents (Bijker 1995). Snowballing relies on the fact that people with rare interests or attributes tend to know others like themselves. Pyramiding modifies this idea by assuming that people with a strong interest in a topic or field can direct an enquiring researcher to people /{more}/ expert than themselves. Experiments have shown that pyramiding can identify high-quality informants much more efficiently than can mass-screening techniques under many conditions (von Hippel, Franke, and Prügl 2005). Pyramiding was made into a practical industrial process by Mary Sonnack, a Division Scientist at 3M, and Joan Churchill, a psychologist specializing in the development of industrial training programs. +={Bijker, W.;Churchill, J.;Franke, N.;Prügl, R.;Thomke, S.;von Hippel, E.+46} + +!_ Identifying Lead Users in Target Markets + +In general it is easier to identify users at the leading edge of target markets than it is to identify users in advanced analog fields. Screening for users with lead user characteristics can be used. When the desired type of lead user is so rare as to make screening impractical---often the case---pyramiding can be applied. In addition, manufacturers can take advantage of the fact that users at the leading edge of a target market often congregate at specialized sites or events that manufacturers can readily identify. At such sites, users may freely reveal what they have done and may learn from others about how to improve their own practices still further. Manufacturers interested in learning from these lead users can easily visit the sites and listen in. For example, sports equipment companies can go to sporting meets where lead users are known to compete, observe user innovations in action, and compare notes. +={Manufacturers:innovation and} + +Essentially the same thing can be done at virtual sites. For example, recall the practices of StataCorp, a supplier of statistical software. Stata sells a set of standard statistical tests and also a language and tools that statisticians can use to design new tests to serve their own evolving needs. Some Stata users (statisticians) took the initiative to set up a few specialized websites, unaffiliated with StataCorp, where they post their innovations for others to download, use, comment on, and improve. StataCorp personnel visit these sites, learn about the user innovations, and observe which tests seem to be of interest to many users. They then develop proprietary versions of the more generally useful tests as commercial products. +={Lead users:StataCorp statistical software and;StataCorp statistical software} + +When specialized rendezvous sites for lead users don't exist in a particular field, manufacturers may be able to create them. Technicon Corporation, for example, set up a series of seminars at which innovating users of their medical equipment got together and exchanged information on their innovations. Technicon engineers were free to listen in, and the innovations developed by these users were the sources of most of Technicon's important new product improvements (von Hippel and Finkelstein 1979). +={Finkelstein, S.;Technicon Corporation} + +!_ The 3M Experiment +={Lead users:3M and+32;3M Corporation+32} + +To test whether lead users in advanced analog fields can in fact generate information that leads to commercially valuable new products, Lilien, Morrison, Searls, Sonnack, and von Hippel (2002) studied a natural experiment at 3M. That firm was carrying out both lead user projects and traditional market research-based idea-generation projects in the same divisions at the same time, and in sufficient numbers to make statistical comparisons of outcomes possible. +={Lilien, G.+26;Morrison, Pamela+26;Searls, K.+26;Sonnack, M.+26;Lead users:idea generation and+45} + +!_ Methods + +3M first began using the lead user method in one division in 1996. By May 2000, when data collection began, five divisions of 3M had completed seven lead user (LU) idea-generation projects and had funded further development of the product concepts generated by five of these. These same five divisions also had 42 contemporaneously funded projects that used "find a need and fill it" idea-generation methodologies that were traditional practice at 3M. We used these two samples of funded ideas to compare the performance of lead user idea-generation projects with traditional idea-generation projects. Although 3M cooperated in the study and permitted access to company records and to members of the product-development teams, the firm did not offer a controlled experimental setting. Rather, we as researchers were required to account for any naturally occurring differences after the fact. + +Our study methodology required a pre-post/test-control situation, with at least quasi-random assignments to treatment cells (Cook and Campbell 1979). In other words, our goal was to compare samples of development projects in 3M divisions that differed with respect to their use of lead user idea-generation methods, but that were as similar as possible in other respects. Identifying, understanding, and controlling for the many potential sources of difference that could affect the natural experiment involved careful field explorations. Thus, possible differences between project staffing and performance incentives applied to LU and non-LU idea-generation projects were assessed. We looked for (and did not find) differences in the capabilities or motivation of LU and non-LU project team members with respect to achieving a major new product advance. 3M managers also said that there was no difference in these matters, and a content analysis of formal annual performance goals set for the individual LU and non-LU team members in a division that allowed access to these data supported their views. +={Campbell, D.;Cook, T.} + +We also found no major differences in the innovation opportunities teams faced. They also looked for Hawthorne or placebo effects that might affect the project teams differentially, and found none. (The Hawthorne effect can be described as "I do better because extra attention is being paid to me or to my performance." The placebo effect can be described as "I expect this process will work and will strive to get the results I have been told are likely.") We concluded that the 3M samples of funded LU and non-LU idea-generation projects, though not satisfying the random assignment criterion for experimental design, appeared to satisfy rough equivalence criteria in test and control conditions associated with natural or quasi-experimentation. Data were collected by interviews and by survey instruments. + +With respect to the intended difference under study---the use of lead user methods within projects---all lead user teams employed an identical lead user process taught to them with identical coaching materials and with coaching provided by members of the same small set of internal 3M coaches. Each lead user team consisted of three or four members of the marketing and technical departments of the 3M division conducting the project. Teams began by identifying important market trends. Then, they engaged in pyramiding to identify lead users with respect to each trend both within the target market and in advanced analog markets. Information from a number of innovating lead users was then combined by the team to create a new product concept and business plan---an "LU idea" (von Hippel, Thomke, and Sonnack 1999). +={Thomke, S.} + +% ={Sonnack, M.} + +Non-lead-user idea-generation projects were conducted in accordance with traditional 3M practices. I refer to these as non-LU idea generation methods and to teams using them as non-LU teams. Non-LU teams were similar to lead user teams in terms of size and make-up. They used data sources for idea generation that varied from project to project. Market data collected by outside organizations were sometimes used, as were data from focus groups with major customers and from customer panels, and information from lab personnel. Non-LU teams collected market information from target markets users but not from lead users. + +!_ Findings + +Our research compared all funded product concepts generated by LU and non-LU methods from February 1999 to May 2000 in each of the five 3M divisions that had funded one or more lead-user-developed product concepts. During that time, five ideas generated by lead user projects were being funded, along with 42 ideas generated by non-LU idea-generation methods. The results of these comparisons can be seen in table 10.1. Product concepts generated by seeking out and learning from lead users were found to be significantly more novel than those generated by non-LU methods. They were also found to address more original or newer customer needs, to have significantly higher market share, to have greater potential to develop into an entire product line, and to be more strategically important. The lead-user-developed product concepts also had projected annual sales in year 5 that were greater than those of ideas generated by non-LU methods by a factor of 8---an average of $146 million versus an average of $18 million in forecast annual sales. Thus, at 3M, lead user idea-generation projects clearly did generate new product concepts with much greater commercial potential than did traditional, non-LU methods (p < 0.005). + +!_ Table 10.1 +Concepts for new products developed by lead user project teams had far more commercial promise than those developed by non-lead-user project teams. + +table{~h c4; 40; 20; 20; 20; + +~ +LU product concepts (n =5) +Non-LU product concepts (n = 42) +Significance + +Factors related to value of concept +~ +~ +~ + +Novelty compared with competition a +9.6 +6.8 +0.01 + +Originality/newness of customer needs addressed +8.3 +5.3 +0.09 + +% market share in year 5 +68% +33% +0.01 + +Estimated sales in year 5 (deflated for forecast error) +$146m +$18m +0.00 + +Potential for entire product family a +10.0 +7.5 +0.03 + +Operating profit +22% +24.0% +0.70 + +Probability of success +80% +66% +0.24 + +Strategic importance a +9.6 +7.3 +0.08 + +Intellectual property protection a +7.1 +6.7 +0.80 + +Factors related to organizational fit of concept +~ +~ +~ + +Fit with existing distribution channels a +8.8 +8.0 +0.61 + +Fit with existing manufacturing capabilities a +7.8 +6.7 +0.92 + +Fit with existing strategic plan a +9.8 +8.4 +0.24 + +}table + +Source: Lilien et al. 2002, table 1.<:br> +a. Rated on a scale from 1 to 10. + +Note that the sales data for both the LU and non-LU projects are forecasts. To what extent can we rely on these? We explored this matter by collecting both forecast and actual sales data from five 3M division controllers. (Division controllers are responsible for authorizing new product-development investment expenditures.) We also obtained data from a 1995 internal study that compared 3M's sales forecasts with actual sales. We combined this information to develop a distribution of forecast errors for a number of 3M divisions, as well as overall forecast errors across the entire corporation. Those errors range from forecast/actual of +30 percent (over-forecast) to --13 percent (underforecast). On the basis of the information just described, and in consultation with 3M management, we deflated all sales forecast data by 25 percent. That deflator is consistent with 3M's historical experience and, we think, provides conservative sales forecasts.~{ In the general literature, Armstrong's (2001) review on forecast bias for new product introduction indicates that sales forecasts are generally optimistic, but that that upward bias decreases as the magnitude of the sales forecast increases. Coller and Yohn (1998) review the literature on bias in accuracy of management earnings forecasts and find that little systematic bias occurs. Tull's (1967) model calculates $15 million in revenue as a level above which forecasts actually become pessimistic on average. We think it reasonable to apply the same deflator to LU vs. non-LU project sales projections. Even if LU project personnel were for some reason more likely to be optimistic with respect to such projections than non-LU project personnel, that would not significantly affect our findings. Over 60 percent of the total dollar value of sales forecasts made for LU projects were actually made by personnel not associated with those projects (outside consulting firms or business analysts from other divisions). }~ Deflated data appear in table 10.1 and in the following tables. + +Rather strikingly, all five of the funded 3M lead user projects created the basis for major new product lines for 3M (table 10.2). In contrast, 41 of 42 funded product concepts generated by non-LU methods were improvements or extensions of existing product lines (χ^{2}^ test, p < 0.005). + +Following tt, p < 0.005).e advice of 3M divisional controllers, major product lines were defined as those separately reported in divisional financial statements. In 1999 in the 3M divisions we studied, sales of individual major product lines ranged from 7 percent to 73 percent of total divisional sales. The sales projections for funded lead user project ideas all fell well above the lower end of this range: projected sales five years after introduction for funded LU ideas, conservatively deflated as discussed above, ranged from 25 percent to over 300 percent of current total divisional sales. + +!_ Table 10.2 +Lead user project teams developed concepts for major new product lines. Non-lead-user project teams developed concepts for incremental product improvements. + +table{~h c3; 34; 33; 33; + +~ +Incremental product improvements +Major new product lines + +LU method +0 +5 + +Non-LU method +41 +1 + +}table + +Source: Lilien et al. 2002, table 2. + +To illustrate what the major product line innovations that the LU process teams generated at 3M were like, I briefly describe four (one is not described for 3M proprietary reasons): + +_* A new approach to the prevention of infections associated with surgical operations. The new approach replaced the traditional "one size fits all" approach to infection prevention with a portfolio of patient-specific measures based on each patient's individual biological susceptibilities. This innovation involved new product lines plus related business and strategy innovations made by the team to bring this new approach to market successfully and profitably. _* Electronic test and communication equipment for telephone field repair workers that pioneered the inclusion of audio, video, and remote data access capabilities. These capabilities enabled physically isolated workers to carry out their problem-solving work as a virtual team with co-workers for the first time. + +_* A new approach, implemented via novel equipment, to the application of commercial graphics films that cut the time of application from 48 hours to less than 1 hour. (Commercial graphics films are used, for example, to cover entire truck trailers, buses, and other vehicles with advertising or decorative graphics.) The LU team's solutions involved technical innovations plus related channel and business model changes to help diffuse the innovation rapidly. + +_* A new approach to protecting fragile items in shipping cartons that replaces packaging materials such as foamed plastic. The new product lines implementing the approach were more environmentally friendly and much faster and more convenient for both shippers and package recipients than other products and methods on the market. + +Lilien, Morrison, Searls, Sonnack, and I also explored to see whether the major product lines generated by the lead user projects had characteristics similar to those of the major product lines that had been developed at 3M in the past, including Scotch Tape. To determine this we collected data on all major new product lines introduced to the market between 1950 and 2000 by the five 3M divisions that had executed one or more lead user studies. (The year 1950 was as far back as we could go and still find company employees who could provide some data about the innovation histories of these major products lines.) Examples from our 1950--2000 sample include the following: +={Lilien, G.;Morrison, Pamela;Searls, K.} + +% ={Sonnack, M.} + +_* Scotch Tape: A line of transparent mending tapes that was first of its type and a major success in many household and commercial applications. + +_* Disposable patient drapes for operating room use: A pioneering line of disposable products for the medical field now sold in many variations. + +_* Box sealing tapes: The first type of tape strong enough to reliably seal corrugated shipping boxes, it replaced stapling in most "corrugated shipper" applications. + +_* Commercial graphics films: Plastic films capable of withstanding outdoor environments that could be printed upon and adhered to large surfaces on vehicles such as the sides of trailer trucks. This product line changed the entire approach to outdoor signage. + +Table 10.3 provides profiles of the five LU major product lines and the 16 non-LU major product lines for which we were able to collect data. As can be seen, innovations generated with inputs from lead users are similar in many ways to the major innovations developed by 3M in the past. + +!_ Table 10.3 +Major new product lines (MNPLs) generated by lead-user methods are similar to MNPLs generated by 3M in the past. + +table{~h c4; 55; 15; 15; 15; + +~ +LU MNPLs (n = 5) +Past 3M MNPLs (n = 16) +Significance + +Novelty a compared with competition +9.6 +8.0 +0.21 + +Originality/newness of customer needs addressed^{a}^ +8.3 +7.9 +0.78 + +% market share in year 5 +68% +61% +0.76 + +Estimated sales in year 5 (deflated for forecast error) +146m^{b}^ +$62m^{b}^ +0.04 + +Potential for entire product family^{a}^ +10.0 +9.4 +0.38 + +Operating profit +22% +27% +0.41 + +Probability of success +80% +87% +0.35 + +Strategic importance* +9.6 +8.5 +0.39 + +Intellectual property protection^{a}^ +7.1 +7.4 +0.81 + +Fit with distribution channels^{a}^ +8.8 +8.4 +0.77 + +Fit with manufacturing capabilities^{a}^ +7.8 +6.7 +0.53 + +Fit with strategic plan^{a}^ +9.8 +8.7 +0.32 + +}table + +Source: Lilien et al. 2002, table 4.<:br> +a. Measured on a scale from 1 to 10.<:br> +b. Five-year sales forecasts for all major product lines commercialized in 1994 or later (5 LU and 2 non-LU major product lines) have been deflated by 25% in line with 3M historical forecast error experience (see text). Five-year sales figures for major product lines commercialized before 1994 are actual historical sales data. This data has been converted to 1999 dollars using the Consumer Price Index from the Economic Report of the President (Council of Economic Advisors 2000). + +!_ Discussion + +The performance comparison between lead user and "find a need and fill it" idea-generation projects at 3M showed remarkably strong advantages associated with searching for ideas among lead users in advanced analog fields with needs similar to, but even more extreme than, needs encountered in the intended target market. The direction of this outcome is supported by findings from three other real-world industrial applications of lead user idea-generation methods that studied lead users in the target market but not in advanced analog markets. I briefly describe these three studies next. They each appear to have generated primarily next-generation products--- valuable for firms, but not the basis for radically new major product lines. + +%% + +_* Recall that Urban and von Hippel (1988) tested the relative commercial attractiveness of product concepts developed in the field of computer-aided systems for the design of printed circuit boards (PC-CAD). One of the concepts they tested contained novel features proposed by lead users that had innovated in the PC-CAD field in order to serve in-house need. The attractiveness of the "lead user concept" was then evaluated by a sample of 173 target-market users of PC-CAD systems relative to three other concept choices---one of which was a description of the best system then commercially available. Over 80 percent of the target-market users were found to prefer the concept incorporating the features developed by innovating lead users. Their reported purchase probability for a PC-CAD system incorporating the lead user features was 51 percent, over twice as high as the purchase probability indicated for any other system. The target-market users were also found willing to pay twice as much for a product embodying the lead user features than for PC-CAD products that did not incorporate them. +={Urban, G.;Printed circuit CAD software} + +_* Herstatt and von Hippel (1992) documented a lead user project seeking to develop a new line of pipe hangers---hardware used to attach pipes to the ceilings of commercial buildings. Hilti, a major manufacturer of construction-related equipment and products, conducted the project. The firm introduced a new line of pipe hanger products based on the lead user concept and a post-study evaluation has shown that this line has become a major commercial success for Hilti. +={Herstatt, C.;Pipe hanger hardware} + +_* Olson and Bakke (2001) report on two lead user studies carried out by Cinet, a leading IT systems integrator in Norway, for the firm's two major product areas, desktop personal computers, and Symfoni application GroupWare. These projects were very successful, with most of the ideas incorporated into next-generation products having been collected from lead users. +={Bakke, G.;Olson, E.} + +Active search for lead users that have innovated enables manufacturers to more rapidly commercialize lead user innovations. One might think that an alternative approach would be to identify lead users before they have innovated. Alert manufacturers could then make some prior arrangements to get preferred access to promising user-developed innovations by, for example, purchasing promising lead user organizations. I myself think that such vertical integration approaches are not practical. As was shown earlier, the character and attractiveness of innovations lead users may develop is based in part on the particular situations faced by and information stocks held by individual lead users. User innovation is therefore likely to be a widely distributed phenomenon, and it would be difficult to predict in advance which users are most likely to develop very valuable innovations. +={Manufacturers:lead users and+8} + +How do we square these findings with the arguments, put forth by Christensen (1997), by Slater and Narver (1998), and by others, that firms are likely to miss radical or disruptive innovations if they pay close attention to requests from their customers? Christensen (1997, p. 59, n. 21) writes: "The research of Eric von Hippel, frequently cited as evidence of the value of listening to customers, indicates that customers originate a large majority of new product ideas. . . . The [Christensen] value network framework would predict that the innovations toward which the customers in von Hippel's study led their suppliers would have been sustaining innovations. We would expect disruptive innovations to have come from other sources." +={Christensen, C.+2;Narver, J.;Slater, S.} + +Unfortunately, the above contains a basic misunderstanding of my research findings. My findings, and related findings by others as well, deal with innovations by lead users, not customers, and /{lead users are a much broader category than customers of a specific firm}/. Lead users that generate innovations of interest to manufacturers can reside, as we have seen, at the leading edges of target markets, and also in advanced analog markets. The innovations that some lead users develop are certainly disruptive from the viewpoint of some manufacturers---but the lead users are unlikely to care about this. After all, they are developing products to serve their own needs. Tim Berners-Lee, for example, developed the World Wide Web as a lead user working at CERN---a user of that software. The World Wide Web was certainly disruptive to the business models of many firms, but this was not Berners-Lee's concern. Lead users typically have no reason to lead, mislead, or even contact manufacturers that might eventually benefit from or be disrupted by their innovations. Indeed, the likely absence of a preexisting customer relationship is the reason that manufacturing firms must search for lead user innovations /{outside}/ their customer lists---as 3M did in its lead user idea generation studies. "Listening to the voice of the customer" is /{not}/ the same thing as seeking out and learning from lead users (Danneels 2004). +={Berners-Lee, T.;Danneels, E.;Lead users:3M and;3M Corporation;Custom products:manufacturers and+2|users and+2;Innovation:distributed process of+2|functional sources of+2} + +That basic misunderstanding aside, I do agree with Christensen and others that a manufacturer may well receive mainly requests for sustaining innovations from its /{customers}/. As was discussed in chapter 4, manufacturers have an incentive to develop innovations that utilize their existing capabilities---that are "sustaining" for them. Customers know this and, when considering switching to a new technology, are unlikely to request it from a manufacturer that would consider it to be disruptive: they know that such a manufacturer is unlikely to respond positively. The net result is that manufacturers' inputs from their existing customers may indeed be biased towards requests for sustaining innovations. + +I conclude this chapter by reminding the reader that studies of the sources of innovation show clearly that users will tend to develop some types of innovations but not all. It therefore makes sense for manufacturers to partition their product-development strategies and portfolios accordingly. They may wish, for example, to move away from actual new product development and search for lead users' innovations in the case of functionally novel products. At the same time manufacturers may decide to continue to develop products that do /{not}/ require high-fidelity models of need information and use environments to get right. One notable category of innovations with this characteristic is dimension-of-merit improvements to existing products. Sometimes users state their needs for improved products in terms of dimensions on which improvements are desired---dimensions of merit. As an example, consider that users may say "I want a computer that is as fast and cheap as possible." Similarly, users of medical imaging equipment may say "I want an image that is of as high a resolution as is technically possible." If manufacturers (or users) cannot get to the end point desired by these users right away, they will instead progressively introduce new product generations that move along the dimension of merit as rapidly and well as they can. Their rate of progress is determined by the rate at which /{solution}/ technologies improve over time. This means that sticky solution information rather than sticky need information is central to development of dimension-of-merit improvements. Manufacturers will tend to have the information they need to develop dimension of merit innovations internally. +={Manufacturers:dimensions-of-merit product improvements and;Sticky information:dimensions-of-merit product improvements and|innovation and} + +1~ 11 Application: Toolkits for User Innovation and Custom Design +={Users:innovation and+62;Custom products:manufacturers and+62|toolkits and+62;Manufacturers:innovation and+62;Toolkits+62:innovation and+62;Users:toolkits and+62} + +An improved understanding of the relative innovation capabilities of users and manufacturers can enable designs for more effective joint innovation processes. Toolkits for user innovation and custom design illustrate this possibility. In this new innovation process design, manufacturers actually /{abandon}/ their efforts to understand users' needs accurately and in detail. Instead, they outsource only /{need-related}/ innovation tasks to their users, who are equipped with appropriate toolkits. This process change differs from the lead user search processes discussed earlier in an interesting way. Lead user searchs identify existing innovations, but do nothing to change the conditions affecting user-innovators at the time a new product or service is being developed. Toolkits for users, in contrast, do change the conditions potential innovators face. By making innovation cheaper and quicker for users, they can increase the volume of user innovation. They also can channel innovative effort into directions supported by toolkits. +={Toolkits:characteristics of+3} + +In this chapter, I first explore why toolkits are useful. Next, I describe how to create an appropriate setting for toolkits and how toolkits function in detail. Finally, I discuss the conditions under which toolkits are likely to be of most value. + +!_ Benefits from Toolkits + +Toolkits for user innovation and design are integrated sets of product-design, prototyping, and design-testing tools intended for use by end users. The goal of a toolkit is to enable non-specialist users to design high-quality, producible custom products that exactly meet their needs. Toolkits often contain "user-friendly" features that guide users as they work. They are specific to a type of product or service and a specific production system. For example, a toolkit provided to customers interested in designing their own, custom digital semiconductor chips is tailored precisely for that purpose--- it cannot be used to design other types of products. Users apply a toolkit in conjunction with their rich understanding of their own needs to create a preliminary design, simulate or prototype it, evaluate its functioning in their own use environment, and then iteratively improve it until they are satisfied. +={Toolkits:manufacturers and+58|user-friendly tools for;Information asymmetries+10} + +A variety of manufacturers have found it profitable to shift the tasks of custom product design to their customers along with appropriate toolkits for innovation. Results to date in the custom semiconductor field show development time cut by 2/3 or more for products of equivalent complexity and development costs cut significantly as well via the use of toolkits. In 2000, more than $15 billion worth of custom integrated circuits were sold that had been designed with the aid of toolkits---often by circuit users---and produced in the "silicon foundries" of custom semiconductor manufacturers such as LSI (Thomke and von Hippel 2002). International Flavors and Fragrances (IFF), a global supplier of specialty flavors to the food industry, has built a toolkit that enables its customers to modify flavors for themselves, which IFF then manufactures. In the materials field, GE provides customers with Web-based tools for designing better plastic products. In software, a number of consumer product companies provide toolkits that allow people to add custom-designed modules to their standard products. For example, Westwood Studios provides its customers with toolkits that enable them to design important elements of their own video games (Jeppesen 2005). +={Jeppesen, L.;Thomke, S.;von Hippel, E.+9;Toolkits:GE and|International Flavors and Fragrances and} + +The primary function of toolkits for user design is to co-locate product-development and service-development tasks with the sticky information needed to execute them. Need-intensive tasks involved in developing a particular type of product or service are assigned to users, along with the tools needed to carry those tasks out. At the same time, solution-intensive tasks are assigned to manufacturers. +={Toolkits:users and+55|sticky information and+8;Sticky information:toolkits and+8} + +As was discussed in chapter 5, problem solving in general, and product and service development in particular, is carried out via repeated cycles of learning by trial and error. When each cycle of a trial-and-error process requires access to sticky information located at more than one site, colocation of problem-solving activity with sticky information is achieved by repeatedly shifting problem solving to the relevant sticky information sites as product development proceeds. +={Trial-and-error problem solving;Toolkits:trial-and-error learning in} + +For example, suppose that need information is sticky at the site of the potential product user and that solution information is sticky at the site of the manufacturer. A user may initiate a development project by drawing on local user-need information to specify a desired new product or service (figure 11.1). This information is likely to be sticky at least in part. Therefore, the user, even when exerting best efforts, will supply only partial and partially correct need and use-context information to the manufacturer. The manufacturer then applies its solution information to the partially accurate user information and creates a prototype that it thinks is responsive to the need and sends it to the user for testing. If the prototype is not satisfactory (and it often is not), the product is returned to the manufacturer for refinement. Typically, as empirical studies show (Tyre and von Hippel 1997; Kristensen 1992), sites of sticky need and / or solution information are repeatedly revisited as problem solvers strive to reach a satisfactory product design (figure 11.2). +={Kristensen, P.;Tyre, M.} + +%% Figure 11.1 +% Toolkits 149 +% Manufacturer +% activity +% User +% activity +% User iterates until satisfied. +% User draws on local need +% information to specify +% desired product or service. +% User draws on local need and +% context of use information to +% evaluate prototype. +% User changes specifications as +% needed. +% Manufacturer draws on +% local capability information +% to develop prototype +% responsive to specifications. +% Manufacturer iterates until +% user is satisfied. +% User-manufacturer +% boundary +% Figure 11.1 +% A pattern of problem solving often encountered in product and service development. + + +{di_evh_f11-1.png}image + +Figure 11.1 + +%% Figure 11.2 +% 0 1 2 3 4 5 6 +% 7 +% 0 +% 22 +% 7 +% 15 +% 30 +% Number of shifts +% Percent +% of +% sample +% Figure 11.2 +% Shifts in the location of problem solving from user site to lab observed during process +% machine debugging. Source: Tyre and von Hippel 1993, figure 2. + +{di_evh_f11-2.png}image + +Figure 11.2 + +Explicit management of user-manufacturer iterations has been built into a number of modern product-development processes. In the rapid application development method (Martin 1991), manufacturers learn to respond to initial user need inputs by quickly developing a partial prototype of a planned product containing the features likely to be most important to users. They deliver this to users, who apply it in their own setting to clarify their needs. Users then relay requests for changes or new features to the product developers, and this process is repeated until an acceptable fit between need and solution is found. Such iteration has been found to "better satisfy true user requirements and produce information and functionality that is more complete, more accurate, and more meaningful" (Connell and Shafer 1989). +={Connell, J.;Martin, J.;Shafer, L.} + +Even with careful management, however, iterative shifts in problem solving between users and manufacturer-based developers involve significant coordination costs. For example, a manufacturer's development team may be assigned to other tasks while it waits for user feedback, and so will not be immediately able to resume work on a project when needed feedback is received. It would be much better still to eliminate the need for cross-boundary iteration between user and manufacturer sites during product development, and this is what toolkits for user design are intended to do. The basic idea behind toolkits for user design is, as was mentioned earlier, to partition an overall product-development task into subproblems, each drawing on only one locus of sticky information. Then, each task is assigned to the party already having the sticky information needed to solve it. In this approach, both the user and the manufacturer still engage in iterative, trial-and-error problem solving to solve the problems assigned to them. But this iteration is internal to each party---no costly and time-consuming cross-boundary iteration between user and manufacturer is required (von Hippel 1998, 2001; Thomke and von Hippel 2002; von Hippel and Katz 2002). +={Katz, R.;Thomke, S.;Task partitioning+10} + +To appreciate the major advantage in problem-solving speed and efficiency that concentrating problem solving within a single locus can create, consider a familiar example: the contrast between conducting financial strategy development with and without "user-operated" financial spreadsheet software: + +_* Before the development of easy-to-use financial spreadsheet programs such as Lotus 1-2-3 and Microsoft Excel, a firm's chief financial officer might have carried out a financial strategy development exercise as follows. First, the CFO would have asked an assistant to develop an analysis incorporating a list of assumptions. A few hours or days might elapse before the result was delivered. Then the CFO would use her rich understanding of the firm and its goals to study the analysis. She would typically almost immediately spot some implications of the patterns developed, and would then ask for additional analyses to explore these implications. The assistant would take the new instructions and go back to work while the CFO switched to another task. When the assistant returned, the cycle would repeat until a satisfactory outcome was found. +={Microsoft} + +_* After the development of financial spreadsheet programs, a CFO might begin an analysis by asking an assistant to load up a spreadsheet with corporate data. The CFO would then "play with" the data, trying out various ideas and possibilities and "what if" scenarios. The cycle time between trials would be reduced from days or hours to minutes. The CFO's full, rich information would be applied immediately to the effects of each trial. Unexpected patterns---suggestive to the CFO but often meaningless to a less knowledgeable assistant---would be immediately identified and explored further. + +It is generally acknowledged that spreadsheet software that enables expert users to "do it themselves" has led to better outcomes that are achieved faster (Levy 1984; Schrage 2000). The advantages are similar in the case of product and service development. Learning by doing via trial and error still occurs, of course, but the cycle time is much faster because the complete cycle of need-related learning is carried out at a single (user) site earlier in the development process. +={Levy, S.;Schrage, M.;Trial-and-error problem solving+15} + +!_ Repartitioning of Development Tasks +={Toolkits:task partitioning+5} + +To create the setting for a toolkit, one must partition the tasks of product development to concentrate need-related information in some and solution-related information in others. This can involve fundamental changes to the underlying architecture of a product or service. As illustration, I first discuss the repartioning of the tasks involved in custom semiconductor chip development. Then, I show how the same principles can be applied in the less technical context of custom food design. + +Traditionally, fully customized integrated circuits were developed in an iterative process like that illustrated in figure 11.1. The process began with a user specifying the functions that the custom chip was to perform to a manufacturer of integrated circuits. The chip would then be designed by manufacturer employees, and an (expensive) prototype would be produced and sent to the user. Testing by the user would typically reveal faults in the chip and/or in the initial specification, responsive changes would be made, a new prototype would be built. This cycle would continue until the user was satisfied. In this traditional manufacturer-centered development process, manufacturers' development engineers typically incorporated need-related information into the design of both the fundamental elements of a circuit--- such as transistors, and the electrical "wiring" that interconnected those elements into a functioning circuit. + +The brilliant insight that allowed custom design of integrated circuits to be partitioned into solution-related and need-related subtasks was made by Mead and Conway (1980). They determined that the design of a digital chip's fundamental elements, such as its transistors, could be made standard for all circuits. This subtask required rich access to the manufacturer's sticky solution information regarding how semiconductors are fabricated, but did not require detailed information on users' specific needs. It could therefore be assigned to manufacturer-based chip-design and chip-fabrication engineers. It was also observed that the subtask of interconnecting standard circuit elements into a functioning integrated circuit required only sticky, need-related information about a chip's function---for example, whether it was to function as a microprocessor for a calculator or as a voice chip for a robotic dog. This subtask was therefore assigned to users along with a toolkit that enabled them to do it properly. In sum, this new type of chip, called a gate array, had a novel architecture created specifically to separate the problem-solving tasks requiring access to a manufacturer's sticky solution information from those requiring access to users' sticky need information. +={Conway, C.;Mead, L.;Toolkits:characteristics of} + +The same basic principle can be illustrated in a less technical context: food design. In this field, manufacturer-based designers have traditionally undertaken the entire job of developing a novel food, and so they have freely blended need-specific design into any or all of the recipe-design elements wherever convenient. For example, manufacturer-based developers might find it convenient to create a novel cake by both designing a novel flavor and texture for the cake body, and designing a complementary novel flavor and texture into the frosting. However, it is possible to repartition these same tasks so that only a few draw on need-related information, and these can then be more easily transferred to users. + +The architecture of the pizza pie illustrates how this can be done. Many aspects of the design of a pizza, such as the dough and the sauce, have been made standard. User choice has been restricted to a single task: the design of toppings. In other words, all need-related information that is unique to a particular user has been linked to the toppings-design task only. Transfer of this single design task to users can still potentially offer creative individuals a very large design space to play in (although pizza shops typically restrict it sharply). Any edible ingredient one can think of, from eye of newt to edible flowers, is a potential topping component. But the fact that need-related information has been concentrated within only a single product-design task makes it much easier to transfer design freedom to the user. + +!_ The Functionality of Toolkits +={Toolkits:characteristics of+2} + +If a manufacturer outsources need-intensive design tasks to users, it must also make sure that users have the information they need to carry out those tasks effectively. This can be done via a toolkit for user innovation. Toolkits are not new as a general concept---every manufacturer equips its own engineers with a set of tools suitable for developing the type of products or services it wishes to produce. Toolkits for users also are not new---many users have personal collections of tools that they have assembled to help them create new items or modify standard ones. For example, some users have woodworking tools ranging from saws to glue which can be used to create or modify furniture---in very novel or very standard ways. Others may have a kit of software tools needed to create or modify software. What is new, however, is integrated toolkits enabling users to create /{and}/ test designs for custom products or services that can then be produced "as is" by manufacturers. + +Present practice dictates that a high-quality toolkit for user innovation will have five important attributes. (1) It will enable users to carry out complete cycles of trial-and-error learning. (2) It will offer users a solution space that encompasses the designs they want to create. (3) It will be user friendly in the sense of being operable with little specialized training. (4) It will contain libraries of commonly used modules that users can incorporate into custom designs. (5) It will ensure that custom products and services designed by users will be producible on a manufacturer's' production equipment without modification by the manufacturer. + +!_ Learning through Trial and Error +={Toolkits:trial-and-error learning in+5} + + +It is crucial that user toolkits for innovation enable users to go through complete trial-and-error cycles as they create their designs. Recall that trial-and-error problem solving is essential to product development. For example, suppose that a user is designing a new custom telephone answering system for her firm, using a software-based computer-telephony integration (CTI) design toolkit provided by a vendor. Suppose also that the user decides to include a new rule to "route all calls of X nature to Joe" in her design. A properly designed toolkit would allow her to temporarily place the new rule into the telephone system software, so that she could actually try it out (via a real test or a simulation) and see what happened. She might discover that the solution worked perfectly. Or she might find that the new rule caused some unexpected form of trouble---for example, Joe might be flooded with too many calls---in which case it would be "back to the drawing board" for another design and another trial. + +In the same way, toolkits for innovation in the semiconductor design field allow users to design a circuit that they think will meet their needs and then test the design by "running" it in the form of a computer simulation. This quickly reveals errors that the user can then quickly and cheaply fix using toolkit-supplied diagnostic and design tools. For example, a user might discover by testing a simulated circuit design that a switch needed to adjust the circuit had been forgotten and make that discovery simply by trying to make a needed adjustment. The user could then quickly and cheaply design in the needed switch without major cost or delay. + +One can appreciate the importance of giving the user the capability for trial-and-error learning by doing in a toolkit by thinking about the consequences of not having it. When users are not supplied with toolkits that enable them to draw on their local, sticky information and engage in trial-and-error learning, they must actually order a product and have it built to learn about design errors---typically a very costly and unsatisfactory way to proceed. For example, automobile manufacturers allow customers to select a range of options for their cars, but they do not offer the customer a way to learn during the design process and before buying. The cost to the customer is unexpected learning that comes too late: "That wide-tire option did look great in the picture. But now that the car has been delivered, I discover that I don't like the effect on handling. Worse, I find that my car is too wide to fit into my garage!" + +Similar disasters are often encountered by purchasers of custom computers. Many custom computer manufacturers offer a website that allows users to "design your own computer online." However, these websites do not allow users to engage in trial-and-error design. Instead, they simply allow users to select computer components such as processor chips and disk drives from lists of available options. Once these selections have been made, the design transaction is complete and the computer is built and shipped. The user has no way to test the functional effects of these choices before purchase and first field use---followed by celebration or regret. + +In contrast, a sophisticated toolkit for user innovation would allow the user to conduct trial-and-error tests to evaluate the effects of initial choices made and to improve on them. For example, a computer design site could add this capability by enabling users to actually test and evaluate the hardware configuration they specify on their own programs and computing tasks before buying. To do this, the site might, for example, provide access to a remote computer able to simulate the operation of the computer that the user has specified, and provide performance diagnostics and related choices in terms meaningful to the user (e.g., "If you add option x at cost y, the time it takes to complete your task will decrease by z seconds"). The user could then modify or confirm initial design choices according to trade-off preferences only he or she knows. + +!_ Appropriate Solution Spaces +={Toolkits:solution spaces and+3} + +Economical production of custom products and services is achievable only when a custom design falls within the pre-existing capability and degrees of freedom built into a particular manufacturer's production system. My colleagues and I call this the /{solution space}/ offered by that system. A solution space may vary from very large to small, and if the output of a toolkit is tied to a particular production system, then the design freedom that a toolkit can offer a user will be accordingly large or small. For example, the solution space offered by the production process of a manufacturer of custom integrated circuits offers a huge solution space to users---it will produce any combination of logic elements interconnected in any way that a user-designer might desire, with the result that the user can invent anything from a novel type of computer processor to a novel silicon organism within that space. However, note that the semiconductor production process also has stringent limits. It will only implement product designs expressed in terms of semiconductor logic---it will not implement designs for bicycles or houses. Also, even within the arena of semiconductors, it will only be able to produce semiconductors that fit within a certain range with respect to size and other properties. Another example of a production system offering a very large solution space to designers---and, potentially to user-designers via toolkits---is the automated machining center. Such a device can basically fashion any shape out of any machinable material that can be created by any combination of basic machining operations such as drilling and milling. As a consequence, toolkits for innovation intended to create designs that can be produced by automated machining centers can offer users access to that very large solution space. + +Large solution spaces can typically be made available to user-designers when production systems and associated toolkits allow users to manipulate and combine relatively basic and general-purpose building blocks and operations, as in the examples above. In contrast, small solution spaces typically result when users are only allowed to combine a relatively few pre-designed options. Thus, users who want to design their own custom automobiles are restricted to a relatively small solution space: they can only make choices from lists of options regarding such things as engines, transmissions, and paint colors. Similarly, purchasers of eyeglasses are restricted to combining "any frame from this list" of pre-designed frames, with "any lens type from that list" of pre-designed options. + +The reason producers of custom products or services enforce constraints on the solution space that user-designers may use is that custom products can be produced at reasonable prices only when custom user designs can be implemented by simply making low-cost adjustments to the production process. This condition is met within the solution space on offer. However, responding to requests that fall outside that space will require small or large additional investments by the manufacturer. For example, a producer of integrated circuits may have to invest many millions of dollars and rework an entire production process in order to respond to a customer's request for a larger chip that falls outside the solution space associated with its present production equipment. + +!_ User-Friendly Tools +={Toolkits:user-friendly tools for+6} + +User toolkits for innovation are most effective and successful when they are made "user friendly" by enabling users to use the skills they already have and to work in their own customary and well-practiced design language. This means that users don't have to learn the---typically different---design skills and language customarily used by manufacturer-based designers, and so they will require much less training to use the toolkit effectively. + +For example, in the case of custom integrated circuit design, the users of toolkits are typically electrical engineers who are designing electronic systems that will incorporate custom semiconductor chips. The digital design language normally used by electrical engineers is Boolean algebra. Therefore, user-friendly toolkits for custom semiconductor design are provided that allow toolkit users to design in this language. That is, users can create a design, test how it works, and make improvements using only their own, customary design language. At the conclusion of the design process, the toolkit then translates the user's logical design into the design inputs required by the semiconductor manufacturer's production system. + +A design toolkit based on a language and skills and tools familiar to the user is only possible to the extent that the user /{has}/ familiarity with some appropriate and reasonably complete language and set of skills and tools. Interestingly, this is the case more frequently than one might initially suppose, at least in terms of the /{function}/ that a user wants a product or service to perform---because functionality is the face that the product or a service presents to the user. (Indeed, an expert user of a product or service may be much more familiar with that functional face than manufacturer-based experts.) Thus, the user of a custom semiconductor is the expert in what he or she wants that custom chip to /{do}/, and is skilled at making complex tradeoffs among familiar functional elements to achieve a desired end: "If I increase chip clock speed, I can reduce the size of my cache memory and. . . ." + +As a less technical example, consider the matter of designing a custom hairstyle. There is certainly a great deal of information known to hairstylists that even an expert user may not know, such as how to achieve a certain look by means of layer cutting, or how to achieve a certain streaked color pattern by selectively dying some strands of hair. However, an expert user is often very well practiced at the skill of examining the shape of his or her face and hairstyle as reflected in a mirror, and visualizing specific improvements that might be desirable in matters such as curls, shape, or color. In addition, the user will be very familiar with the nature and functioning of everyday tools used to shape hair, such as scissors and combs. + +A user-friendly toolkit for hairstyling innovation can be built upon these familiar skills and tools. For example, a user can be invited to sit in front of a computer monitor, and study an image of her face and hairstyle as captured by a video camera. Then, she can select from a palette of colors and color patterns offered on the screen, can superimpose the effect on her existing hairstyle, can examine it, and can repeatedly modify it in a process of trial-and-error learning. Similarly, the user can select and manipulate images of familiar tools, such as combs and scissors, to alter the image of the length and shape of her own hairstyle as projected on the computer screen, can study and further modify the result achieved, and so forth. Note that the user's new design can be as radically new as is desired, because the toolkit gives the user access to the most basic hairstyling variables and tools such as hair color and scissors. When the user is satisfied, the completed design can be translated into technical hairstyling instructions in the language of a hairstyling specialist---the intended production system in this instance. + +In general, steady improvements in computer hardware and software are enabling toolkit designers to provide information to users in increasingly friendly ways. In earlier days, information was often provided to users in the form of specification sheets or books. The user was then required to know when a particular bit of information was relevant to a development project, find the book, and look it up. Today, a large range of potentially needed information can be embedded in a computerized toolkit, which is programmed to offer the user items of information only if and as a development being worked on makes them relevant. + +!_ Module Libraries +={Toolkits:module libraries for+1} + +Custom designs seldom are novel in all their parts. Therefore, a library of standard modules will be a valuable part of a toolkit for user innovation. Provision of such standard modules enables users to focus their creative work on those aspects of their product or service designs that cannot be implemented via pre-designed options. For example, architects will find it very useful to have access to a library of standard components, such as a range of standard structural support columns with pre-analyzed structural characteristics, that they can incorporate into their novel building designs. Similarly, users who want to design custom hairstyles will often find it helpful to begin by selecting a hairstyle from a toolkit library. The goal is to select a style that has some elements of the desired look. Users can then proceed to develop their own desired style by adding to and subtracting from that starting point. + +!_ Translating Users' Designs for Production + +The "language" of a toolkit for user innovation must be convertible without error into the language of the intended production system at the conclusion of the user's design work. If it is not, the entire purpose of the toolkit will be lost---because a manufacturer receiving a user design will essentially have to do the design work over again. Error-free translation need not emerge as a major problem---for example, it was never a major problem during the development of toolkits for integrated circuit design, because both chip designers and chip producers already used a language based on digital logic. In contrast, in some fields, translating from the design language preferred by users to the language required by intended production systems can be /{the}/ central problem in toolkit design. As an illustration, consider a recent toolkit test project managed by Ernie Gum, the Director of Food Product Development for the USA FoodServices Division of Nestlé. +={Gum, E.+5;Toolkits:Nestlé and+5} + +One major business of Nestlé FoodServices is producing custom food products, such as custom Mexican sauces, for major restaurant chains. Custom foods of this type have traditionally been developed by or modified by the chains' executive chefs, using what are in effect design and production toolkits taught by culinary schools: recipe development procedures based on food ingredients available to individuals and restaurants, and processed with restaurant-style equipment. After using their traditional toolkits to develop or modify a recipe for a new menu item, executive chefs call in Nestlé Foodservices or another custom food producer and ask that firm to manufacture the product they have designed---and this is where the language translation problem rears its head. + +There is no error-free way to translate a recipe expressed in the language of a traditional restaurant-style culinary toolkit into the language required by a food-manufacturing facility. Food factories must use ingredients that can be obtained in quantity at consistent quality. These are not the same as, and may not taste quite the same as, the ingredients used by the executive chef during recipe development. Also, food factories use volume production equipment, such as huge-steam-heated retorts. Such equipment is very different from restaurant-style stoves and pots and pans, and it often cannot reproduce the cooking conditions created by the executive chef on a stove-top---for example, very rapid heating. Therefore, food-production factories cannot simply produce a recipe developed by or modified by an executive chef "as is" under factory conditions---it will not taste the same. + +As a consequence, even though an executive chef creates a prototype product using a traditional chef's toolkit, food manufacturers find most of that information---the information about ingredients and processing conditions---useless because it cannot be straightforwardly translated into factory-relevant terms. The only information that can be salvaged is the information about taste and texture contained in the prototype. And so, production chefs carefully examine and taste the customer's custom food prototype, then try to make something that tastes the same using factory ingredients and methods. But an executive chef's taste buds are not necessarily the same as production chef taste buds, and so the initial factory version---and the second and the third---is typically not what the customer wants. So the producer must create variation after variation until the customer is finally satisfied. + +To solve the translation problem, Gum created a novel toolkit of pre-processed food ingredients to be used by executive chefs during food development. Each ingredient in the toolkit was the Nestlé factory version of an ingredient traditionally used by chefs during recipe development: That is, it was an ingredient commercially available to Nestlé that had been processed as an independent ingredient on Nestlé factory equipment. Thus, a toolkit designed for developing Mexican sauces would contain a chili puree ingredient processed on industrial equipment identical to that used to produce food in commercial-size lots. (Each ingredient in such a toolkit also contains traces of materials that will interact during production---for example, traces of tomato are included in the chili puree---so that the taste effects of such interactions will also be apparent to toolkit users.) + +Chefs interested in using the Nestlé toolkit to prototype a novel Mexican sauce would receive a set of 20--30 ingredients, each in a separate plastic pouch. They would also be given instructions for the proper use of these ingredients. Toolkit users would then find that each component differs slightly from the fresh components he or she is used to. But such differences are discovered immediately through direct experience. The chef can then adjust ingredients and proportions to move to the desired final taste and texture that is desired. When a recipe based on toolkit components is finished, it can be immediately and precisely reproduced by Nestlé factories--- because now the executive chef is using the same language as the factory. In the Nestlé case, field testing by Food Product Development Department researchers showed that adding the error-free translation feature to toolkit-based design by users reduced the time of custom food development from 26 weeks to 3 weeks by eliminating repeated redesign and refinement interactions between Nestlé and purchasers of its custom food products. + +!_ Discussion + +A toolkit's success in the market is significantly correlated with that toolkit's quality and with industry conditions. Thus, Prügl and Franke (2005) studied the success of 100 toolkits offered in a single industry: computer gaming. They found that success, evaluated by independent experts, was significantly correlated with the quality of execution of the attributes of toolkits that have been discussed in this chapter. That is, success was found to be significantly affected by the quality of trial-and-error learning enabled by a toolkit, by the quality of fit of the solution space offered to users' design problems, by the user friendliness of the tools provided, and by the quality of module libraries offered with the toolkit. Schreier and Franke (2004) also obtained information on the importance of toolkit quality in a study of the value that users placed on consumer products (scarves, T shirts, cell phone covers) customized with a simple, manufacturer-supplied toolkit. They found user willingness to pay for custom designs, as measured by Vickrey auctions, was significantly negatively affected by the difficulty of creating custom designs with a toolkit. In contrast, willingness to pay was significantly positively affected by enjoyment experienced in using a toolkit. +={Franke, N.;Prügl, R.;Schreier, M.;Trial-and-error problem solving;Custom products:heterogeneity of user needs and+3;User need+3} + +With respect to industry and market conditions, the toolkit-for-user innovation approach to product design is likely to be most appealing to toolkit suppliers when the heterogeneous needs of /{many}/ users can be addressed by a standard solution approach encoded in a toolkit. This is because it can be costly to encode all the solution and production information relevant to users' design decisions. For example, a toolkit for custom semiconductor design must contain information about the semi-conductor production process needed to ensure that product designs created by users are in fact producible. Encoding such information is a one-time cost, so it makes the best economic sense for solution approaches that many will want to use. + +Toolkits for user innovation are not an appropriate solution for all product needs, even when heterogeneous needs can be addressed by a common solution approach. Specifically, toolkits will not be the preferred approach when the product being designed requires the highest achievable performance. Toolkits incorporate automated design rules that cannot, at least at present, translate designs into products or software as skillfully as a human designer can. For example, a design for a gate array generated with a toolkit will typically take up more physical space on a silicon chip than would a fully custom-developed design of similar complexity. Even when toolkits are on offer, therefore, manufacturers may continue to design certain products (those with difficult technical demands) while customers take over the design of others (those involving complex or rapidly evolving user needs). + +Toolkits can be designed to offer a range of capabilities to users. At the high end, with toolkits such as those used to design custom integrated circuits, users can truly innovate, creating anything implementable in digital electronics, from a dishwasher controller to a novel supercomputer or form of artificial life. At the low end, the product configurators commonly offered by manufacturers of mass-customized products enable, for example, a watch purchaser to create a custom watch by selecting from lists of pre-designed faces, hands, cases, and straps. (Mass-customized production systems can manufacture a range of product variations in single-unit quantities at near mass-production costs (Pine 1993). In the United States, production systems used by these manufacturers are generally based on computerized production equipment.) +={Pine, J.} + +The design freedom provided by toolkits for user innovation may not be of interest to all or even to most users in a market characterized by heterogeneous needs. A user must have a great enough need for something different to offset the costs of putting a toolkit to use for that approach to be of interest. Toolkits may therefore be offered only to a subset of users. In the case of software, toolkits may be provided to all users along with a standard, default version of the product or service, because the cost of delivering the extra software is essentially zero. In such a case, the toolkit's capability will simply lie unused in the background unless and until a user has sufficient incentive to evoke and employ it. +={Lead users:toolkits and+3;Toolkits:lead users and+3} + +Provision of toolkits to customers can be a complement to lead user idea-generation methods for manufacturers. Some users choosing to employ a toolkit to design a product precisely right for their own needs will be lead users, whose present strong need foreshadows a general need in the market. Manufacturers can find it valuable to identify and acquire the generally useful improvements made by lead users of toolkits, and then supply these to the general market. For this reason, manufacturers may find it valuable implement toolkits for innovation even if the portion of the target market that can directly use them is relatively small. + +Toolkits can affect existing business models in a field in ways that may or may not be to manufacturers' competitive advantage in the longer run. For example, consider that many manufacturers of products and services profit from both their design capabilities and their production capabilities. A switch to user-based customization via toolkits can affect their ability to do this over the long term. Thus, a manufacturer that is early in introducing a toolkit approach to custom product or service design may initially gain an advantage by tying that toolkit to its particular production facility. However, when toolkits are made available to customer designers, this tie often weakens over time. Customers and independent tool developers can eventually learn to design toolkits applicable to the processes of several manufacturers. Indeed, this is precisely what has happened in the custom integrated circuit industry. The toolkits revealed to users by the initial innovator, LSI, and later by rival producers were producer-specific. Over time, however, Cadance and other specialist toolkit supply firms emerged and developed toolkits that could be used to make designs producible by a number of vendors. The end result is that manufacturers that previously benefited from selling their product-design skills and their production skills can be eventually forced by the shifting of design tasks to customers via toolkits to a position of benefiting from their production skills only. + +Manufacturers that think long-term disadvantages may accrue from a switch to toolkits for user innovation and design will not necessarily have the luxury of declining to introduce toolkits. If any manufacturer introduces a high-quality toolkit into a field favoring its use, customers will tend to migrate to it, forcing competitors to follow. Therefore, a firm's only real choice in a field where conditions are favorable to the introduction of toolkits may be whether to lead or to follow. + +1~ 12 Linking User Innovation to Other Phenomena and Fields + +This final chapter is devoted to describing links between user-centered innovation and other phenomena and literatures. Of course, innovation writ large is related to anything and everything, so the phenomena and the literatures I will discuss here are only those hanging closest on the intellectual tree. My goal is to enable interested readers to migrate to further branches as they wish, assisted by the provision of a few important references. With respect to phenomena, I will first point out the relationship of user innovation to /{information}/ communities---of which user innovation communities are a subset. With respect to related fields, I begin by linking user-centric innovation phenomena explored in this book to the literature on the economics of knowledge, and to the competitive advantage of nations. Next I link it to research on the sociology of technology. Finally, I point out how findings regarding user innovation could---but do not yet---link to and complement the way that product development is taught to managers. +={Information commons;Information communities;Product development;Technical communities} + +!_ Information Communities +={Information commons+8;Information communities+8} + +Many of the considerations I have discussed with respect to user innovation communities apply to /{information}/ communities as well---a much more general category of which user innovation communities are a subset. I define information communities as communities or networks of individuals and/or organizations that rendezvous around an information commons, a collection of information that is open to all on equal terms. +={Technical communities+1} + +In close analogy to our discussions of innovation communities, I propose that commons-based information communities or networks will form when the following conditions hold: (1) Some have information that is not generally known. (2) Some are willing to freely reveal what they know. (3) Some beyond the information source have uses for what is revealed. On an intuitive basis, one can immediately see that these conditions are often met. Of course, people and firms know different things. Of course there are many things that one would not be averse to freely revealing; and of course others would often be interested in what is freely revealed. After all, as individuals we all regularly freely reveal information not generally known to people who ask, and presumably these people value at least some of the information we provide. +={Free revealing of innovation information:in information communities+3} + +The economics of information communities can be much simpler than that of the user innovation communities discussed earlier, because valuable proprietary information is often not at center stage. When the service provided by information communities is to offer non-proprietary "content" in a more convenient and accessible form, one need consider only the costs and benefits associated with information diffusion. One need not also consider potential losses associated with the free revealing of proprietary innovation-related information. + +It is likely that information communities are getting steadily more pervasive for the same reasons that user innovation communities are: the costs of diffusing information are getting steadily lower as computing and communication technologies improve. As a result, information communities may have a rapidly increasing impact on the economy and on the landscape of industry. They are and will be especially empowering to fragmented groups, whose members may for the first time gain low-cost access to a great deal of rich and fresh information of mutual interest. As is the case for user innovation networks, information networks can actually store content that participants freely reveal and make it available for free downloading. (Wikipedia is an example of this.) And/or, information networks can function to link information seekers and information holders rather than actually storing information. In the latter case, participants post to the network, hoping that someone with the requested information will spot their request and provide an answer (Lakhani and von Hippel 2003). Prominent examples can be found in the medical field in the form of specialized websites where patients with relatively rare conditions can for the first time find each other and also find specialists in those conditions. Patients and specialists who participate in these groups can both provide and get access to information that previously was scattered and for most practical purposes inaccessible. +={Lakhani, K;Wikipedia;von Hippel, E.} + +Just as is the case in user innovation groups, open information communities are developing rapidly, and the behaviors and infrastructure needed for success are being increasingly learned and codified. These communities are by no means restricted to user-participants. Thus, both patients and doctors frequently participate in medical information communities. Also, information communities can be run by profit-making firms and/or on a non-profit basis for and by information providers and users themselves--- just as we earlier saw was the case with innovation communities. Firms and users are developing many versions of open information communities and testing them in the market. As an example of a commercially supported information commons, consider e-Bay, where information is freely revealed by many under a structure provided by a commercial firm. The commercial firm then extracts a profit from commissions on transactions consummated between information providers and information seekers. As an example of an information community supported by users themselves, again consider Internet sites specializing in specific diseases---for example, childrenfacingillness.com. +={Marketing research+1} + +Information communities can have major effects on established ways of doing business. For example, markets become more efficient as the information provided to transaction participants improves. Thus, product and service manufacturers benefit from good information on the perceptions and preferences of potential buyers. Similarly, product and service purchasers benefit from good information on the characteristics of the various offerings in the market. Traditionally, firms have collected information on users' needs and on products' characteristics by means of face-to-face interviewing and (in the case of mass markets) questionnaires. Similar information of high quality now can be collected nearly without cost and can be posted on special Internet sites by users themselves and/or by for-profit enterprises. Dellarocas, Awad, and Zhang (2004) show that volunteered online movie reviews provide information that is just as accurate as that collected by surveys of representative samples of respondents. This emerging new approach to data aggregation will clearly affect the established business models of firms specializing in information collection, with websites like www.ciao.co.uk illustrating new possibilities. If the quality of information available to transaction participants goes up and the information price is low, transaction quality should go up. With the aid of online product-evaluation sites, it is likely that consumers will be able to apply much better information even to small buying decisions, such as the choice of a restaurant for tonight's dinner. +={Awad, N.;Dellarocas C.;Zhang, X.} + +What Paul David and colleagues call "open science" is a type of information community that is closely related to the innovation communities discussed earlier (David 1992; Dasgupta and David 1994; David 1998). Free revealing of findings is, of course, a characteristic of modern science. Academic scientists publish regularly and so freely reveal information that may have high proprietary value. This raises the same question explored in the case of innovation communities: Why, in view of the potential of free ridership, do scientists freely reveal the information they have developed at private cost? The answer overlaps with but also differs from the answers provided in the case of free revealing of proprietary innovations by innovation users. With respect to similarities, sociologists of science have found that reputation among peers is important to scientists, and that priority in the discovery of new knowledge is a major component of reputation. Because of the importance of priority, scientists generally rush their research projects to completion and then rush to freely reveal their new findings. This dynamic creates a great advantage from the point of view of social welfare (Merton 1973). +={Dasgupta, P.;David, P.;Merton, Robert;Free revealing of innovation information:in information communities+1;Intellectual property rights:information communities and;Users:free revealing by+1|information communities and+1} + +With respect to major differences, it is public policy in many countries to subsidize research with public funds. These policies are based on the assumption that only inadequate amounts of scientific research can be drawn forth by reputational inducements alone. Recall that, in contrast, innovations developed and freely revealed by innovation users are not subsidized from any source. Users, unlike "scientists," by definition have a personal or corporate use for the innovation-related knowledge they generate. This additional source of private reward may explain why user innovation communities can flourish without subsidy. +={Knowledge, production and distribution of} + +!_ The Economics of Knowledge +={Knowledge, production and distribution of+7;Users:knowledge and+7} + +In this field, Foray (2004) provides a rich road map regarding the economics of knowledge and the central role played by users. Foray argues that the radical changes in information and communication technologies (ICT) are creating major changes in the economics of knowledge production and distribution. Economists have traditionally reduced knowledge production to the function of research and development, defined as the activity specifically devoted to invention and innovation. Starting with Machlup (1962), economists also have identified the knowledge-based economy as consisting of specialized sectors focused on activities related to communication, education, the media, and computing and information-related services. Foray argues that these simplifications, although providing a rationale for a way to measure knowledge-generation activities, were never appropriate and now are totally misleading. +={Machlup, F.;Foray, D.+2} + +Knowledge generation, Foray says, is now a major activity across all industrial sectors and is by no means restricted to R&D laboratories: we are in the age of the knowledge economy. He makes a central distinction between R&D that is conducted in laboratories remote from doing, and learning by doing at the site of production. He argues that both are important, and have complementary advantages and drawbacks. Laboratory research can ignore some of the complexities involved in production in search of basic understanding. Learning by doing has the contrasting advantage of being in the full fidelity of the real production process. The drawback to learning by doing, however, is that one is attempting to do two things at once---producing and learning---and this can force compromises onto both. + +Foray positions users at the heart of knowledge production. He says that one major challenge for management is to capture the knowledge being generated by users "on line" during the process of doing and producing, and to integrate it with knowledge created "off line" in laboratories. He discusses implications of the distributed nature of knowledge production among users and others, and notes that the increased capabilities of information and communication technologies tend to reduce innovators' ability to control the knowledge they create. He proposes that the most effective knowledge-management policies and practices will be biased toward knowledge sharing. + +Weber (2004, pp. 72--73) explores similar ideas in the specific context of open source software. "The conventional language of industrial-era economics," he notes, "identifies producers and consumers, supply and demand. The open source process scrambles these categories. Open source software users are not consumers in the conventional sense. . . . Users integrate into the production process itself in a profound way." Weber's central thesis is that the open source process is a new way of organizing production: +={Weber, S.;Open source software:knowledge and} + +_1 One solution is the familiar economy that depends upon a blend of exclusive property rights, divisions of labor, reduction of transaction costs, and the management of principal-agent problems. The success of open source demonstrates the importance of a fundamentally different solution, built on top of an unconventional understanding of property rights configured around distribution. . . . And it relies on a set of organizational structures to coordinate behavior around the problem of managing distributed innovation, which is different from the division of labor. (ibid., p. 224) + +Weber details the property-rights regime used by open source projects, and also the nature of open source innovation communities and incentives acting on participants. He then argues that this new mode of production can extend beyond the development of open source software, to an extent and a degree that are not yet understood: +={Weber, S.;Open source software:knowledge and} + +One important direction in which the open source experiment points is toward moving beyond the discussion of transaction as a key determinant of institutional design. . . . The elegant analytics of transaction cost economics do very interesting work in explaining how divisions of labor evolve through outsourcing of particular functions (the decision to buy rather than make something). But the open source process adds another element. The notion of open-sourcing as a strategic organizational decision can be seen as an efficiency choice around distributed innovation, just as outsourcing was an efficiency choice around transactions costs. . . . As information about what users want and need to do becomes more fine-grained, more individually differentiated, and harder to communicate, the incentives grow to shift the locus of innovation closer to them by empowering them with freely modifiable tools. (ibid., pp. 265--267) + +!_ National Competitive Advantage +={Government policy:and national competitive advantage+6;Manufacturers:and national competitive advantage+6;National competitive advantage+6:See also Government policy;Users:national competitive advantage and+6} + +Understanding national innovation systems and the competitive advantage of a nation's firms is an important matter for national policy makers (Nelson 1993). Can what we have learned in this book shed any light on their concerns? Porter (1991), assessing national competitive advantage through the intellectual lens of competitive strategy, concludes that one of four major factors determining the competitive advantage of nations is demand conditions. "A nation's firms," he argues, "gain competitive advantage if domestic buyers are, or are among, the world's most sophisticated and demanding buyers for the product or service. Such buyers provide a window into the most advanced buyer needs. . . . Buyers are demanding where home product needs are especially stringent or challenging because of local circumstances." For example: "The continental United States has been intensely drilled, and wells are being drilled in increasingly difficult and marginal fields. The pressure has been unusually great for American oil field equipment suppliers to perfect techniques that minimize the cost of difficult drilling and ensure full recovery from each field. This has pushed them to advance the state of the art and sustain strong international positions." (ibid., pp. 89--90) +={Nelson, R.;Porter, M.+5} + +Porter also argues that /{early}/ domestic demand is also important: "Provided it anticipates buyer needs in other nations, early local demand for a product or service in a nation helps local firms to move sooner than foreign rivals to become established in an industry. They get the jump in building large-scale facilities and accumulating experience. . . . Only if home demand is anticipatory of international need will home demand contribute to advantage." (ibid., p. 95) + +From my perspective, Porter is making the case for the value of a nation's domestic lead users to national competitive advantage. However, he is also assuming that it is /{manufacturers}/ that innovate in response to advanced or stringent user demand. On the basis of the findings reported on in this book, I would modify this assumption by noting that, often, domestic manufacturers' links to /{innovating lead users}/ have the impacts on national competitive advantage that he describes---but that the lead users' input to favored domestic firms would include innovations as well as needs. + +Domestic lead users make a difference to national competitive advantage, Porter argues, because "local firms often enjoy some natural advantages in serving their home market compared to foreign firms, a result of proximity as well as language, regulation, and cultural affinities (even, frequently, if foreign firms are staffed with local nationals)." Porter continues: "Preferred access to a large domestic customer base can be a spur to investment by local firms. Home demand may be perceived as more certain and easier to forecast, while foreign demand is seen as uncertain even if firms think they have the ability to fill it." (ibid., p. 93) + +What new insights and research questions can the work of this book contribute to this analysis of national competitive advantage? On the one hand, I certainly see the pattern Porter describes in some studies of lead user innovation. For example, early in the history of the US semiconductor industry, AT&T, the inventor of the transistor and an early innovator, developed a number of novel types of production equipment as a user organization. AT&T engineers went to local machine shops to have these machines produced in volume to meet AT&T's in-house production needs. A side effect of this procurement strategy was to put many of these previously undistinguished firms into the business of producing advanced semi-conductor equipment to the world (von Hippel 1977, 1988). +={von Hippel, E.} + +On the other hand, the findings of this book suggest that the "natural advantages" Porter proposes that domestic manufacturers will have with respect to filling the needs of local lead users may be eroding in the Internet age. As has been seen in the case of open source software, and by extension in the cases of other information-based products, users are capable of developing complex products in a coordinated way without geographic proximity. Participants in a particular open source project, for example, may come from a number of countries and may never meet face to face. In the case of physical products, the emergence of a pattern of user-based design followed by "foundry-style" production may also reduce the importance of propinquity between innovating lead users and manufacturers. As in the cases of integrated circuits and kitesurfing discussed earlier in this book, users can transmit CAD product-design information files from anywhere to any suitably equipped manufacturer for production. Probably only in the case of physical products where the interaction between product and production methods are not clear will geography continue to matter deeply in the age of the Internet. Nations may be able to create comparative advantages for domestic manufacturers with respect to profiting from innovation by lead users; however, they cannot assume that such advantages will continue to exist simply because of propinquity. +={Custom products:product platforms and|users and;Innovation communities:open source software and;Kitesurfing;Open source software;Printed circuit CAD software;Open source software:innovation communities and;Toolkits:platform products and;Users:custom products and} + +!_ The Sociology of Technical Communities +={Information commons+8;Innovation communities:sociology of+8;Technical communities+8} + +Relevant elements of this field include studies in the sociology of technology in general and studies of the sociology of open source software communities in particular. Historical accounts of the evolution of a technology have often taken a linear view of their subject. In the linear view, a technology such as aerodynamics and related technological artifacts such as the airplane start at point A and then naturally evolve to end point B. In other words, it is implicitly assumed that the airplane will evolve from the artifact of wood and fabric and wire developed by the Wright brothers to the characteristics we associate with aircraft today. Nothing much to explain about that. +={Open source software;Open source software:innovation communities and} + +In the Social Construction of Technology (SCOT) model of technological evolution (Pinch and Bijker 1987), the direction in which an artifact (a product, for example) evolves depends very much on the meanings that different "groups with a problem" construct for it. These meanings, in turn, affect which of the many possible variations of a product are developed, how they evolve, and whether and how they eventually die. Groups that construct the meanings of a product centrally include, but are not restricted to, product users. For example, in the case of the bicycle, some relevant groups were users of various types---people who wanted to travel from place to place via bicycle, people who wanted to race bicycles, etc. Relevant non-user groups included "anticyclists," who had a negative view of the bicycle in its early days and wanted it to fail (Bijker 1995). +={Bijker, W.;Pinch, T.+4;Custom products:users and} + +When one takes the views of all relevant groups into account, one gets a much richer view of the "socially constructed" evolution of a technology. As a relatively recent example, consider the supersonic transport plane (SST) planned in the United States during the 1970s. Airlines, and potential passengers were "groups with a problem" who presumably wanted the technology for different reasons. Other relevant groups with a problem included people who expected to be negatively affected by the sonic boom the SST would cause, people who were concerned about the pollution its engines would cause in the stratosphere, and people who had other reasons for opposing or supporting the SST. Proposed designs evolved in an attempt to satisfy the various contending interest groups. Eventually it became clear that the SST designers could not arrive at a generally acceptable compromise solution and so the project failed (Horwich 1982). +={Horwich, M.} + +Pinch and Kline (1996, pp. 774--775) elaborated on the original SCOT model by pointing out that the way a product is interpreted is not restricted to the design stage of a technology, but also can continue during the product's use. They illustrated with the case of the automobile: . . . +={Kline, R.+2} + +_1 although [automobile] manufacturers may have ascribed a particular meaning to the artifact they were not able to control how that artifact was used once it got into the hands of the users. Users precisely as users can embed new meanings into the technology. This happened with the adaptation of the car into rural life. As early as 1903, farm families started to define the car as more than a transportation device. In particular, they saw it as a general source of power. George Schmidt, a Kansas farmer, advised readers of the /{Rural New Yorker}/ in 1903 to "block up the hind axle and run a belt over the one wheel of the automobile and around the wheel on a [corn] sheller, grinder, saw, pump, or any other machine that the engine is capable of running, and see how the farmer can save money and be in style with any city man." T. A. Pottinger, an Illinois farm man, wrote in /{Wallace's Farmer}/ in 1909 that "the ideal farm car should have a detachable backseat, which could turn the vehicle into a small truck." Other Phenomena and Fields 173 +={Pottinger, T.;Schmidt, G.} + +Of course, user innovations and modifications are involved in these cases along with users' reinterpretation of product uses. Kline and Pinch report that manufacturers adopted some of the rural users' innovations, generally after a lag. For example, a car that could also serve as a small truck was eventually offered as a commercial product. +={Users:innovation and+13;Manufacturers:innovation and} + +Research on communities of practice offers another link between studies of user innovation and sociology (Brown and Duguid 1991; Wenger 1998). The focus of this research is on the functioning of specialist communities. Researchers find that experts in a field spontaneously form interest groups that communicate to exchange their views and learnings on how to carry out and improve the practices of their profession. Members of communities of practice exchange help in informal ways that seem similar to the practices described above as characteristic of open source software projects and communities of sports innovators. +={Brown, J.;Duguid, P.;Wenger, E.;Open source software} + +Research on brand communities is still another related research thread (Muniz and O'Guinn 2001). Brand communities form around commercial brands and products (e.g., Lego construction toys) and even around products discontinued by their manufacturers e.g., Apple's Newton personal digital assistant). Brand communities can be intensely meaningful to participants and can involve user innovation. In Newton groups, for example, users develop new applications and exchange information about how to repair aging equipment (Muniz and Schau 2004). In Lego communities, lead users develop new products, new building techniques, and new offline and online multiplayer building projects that later prove to be of interest to the manufacturer (Antorini 2005). +={Antorini, Y.;Brand communities;Muniz, A.;O'Guinn, T.;Schau, H.;Innovation communities:brand and} + +!_ The Management of Product Development +={Product development+10} + +Finally, I turn to links between user-centered innovation and teaching on the management of product development. Information on lead users as a source of new product ideas now appears in most marketing textbooks. There also should be a link to other elements of user-centered innovation processes in the literature on product-development management---but there really isn't much of one yet. Although much of the research on user innovation cited in this book is going on in schools of management and business economics, little of this information has moved into teaching related to the product-development process as of yet. + +Clearly, it would be useful to provide managers of both user firms and manufacturing firms with a better understanding of the management of user-centered innovation. It is a curious fact that even managers of firms that have built major product lines upon user-developed innovations may hold the manufacturer-centric view that "we developed that." For example, an early study of innovation in scientific instruments documented that nearly 80 percent of the major improvements commercialized by instrument manufacturers had been developed by users (von Hippel 1976). When I later discussed this finding with managers in instrument firms, most of them were astonished. They insisted that all the innovations in the study sample had been developed within manufacturing firms. They could be convinced otherwise only when supplied with actual publications by user-scientists describing user-built prototypes of those instrument improvements---prototypes developed from 5 to 7 years before any instrument firm had sold a functionally equivalent commercial product. +={von Hippel, E.} + +My inquiries into why managers in this field and others held---and largely still hold---such contrary-to-fact beliefs identified several contributing factors. First, manufacturers seldom track where the major new products and product improvements they sell actually came from. Managers see no need to set up a tracking system, because the conventional wisdom is clear: "Everyone knows new products are developed by manufacturers such as ourselves based on user needs identified by market research." Further, the manufacturing firms have market-research and product-development departments in place, and innovations are somehow being produced. Thus, it is easy to conclude that the manufacturers' innovation processes must be working as expected. + +In fact, however, important, functionally novel innovations are often brought into manufacturers by informal channels. Product-development engineers may attend conferences and learn about important user innovations, salesmen and technical service personnel discover user-modified equipment on field visits, and so on. Once the basic innovation-related information is in house, the operating principles of a user's prototype will often be adopted, but the detailed design of the device will be changed and improved for production. After a while, the user's prototype, if remembered at all, will begin to look quite primitive to the firm's engineers relative to the much better product they have designed. Finally, when sales begin, the firm's advertising will urge customers to buy "/{our}/ wonderful new product." Other Phenomena and Fields 175 + +The net result is understandable: the user roots of many new commercial products, never widely known in manufacturing firms, are forgotten. And when it is time to develop the next innovation, management again turns to the conventional methods that "worked so well for us last time." Eventually, information about new user innovations will again arrive by pathways unnoticed and unmanaged---and with an unnecessary lag. + +To improve matters, managers must learn when it is appropriate to follow user-centered and manufacturer-centered innovation process paradigms and how user-centered innovation can best be managed when it is the method of choice. Managers in user firms and in manufacturing firms need tools with which to understand the innovate-or-buy decisions they face---to understand which product needs or which service needs users (rather than manufacturers) should invest in developing. Managers in user firms also need to learn how their firms can best carry out development work in their low-cost innovation niches: how they can best deploy their information-related advantages of being actual users and residing in the context of use to cheaply learn by doing. Managers in manufacturing firms will want to learn how they can best play a profitable role in user-centered innovation patterns when these play a role in the markets they serve. +={Sticky information:toolkits and+1;Users:innovate-or-buy decisions by|low-cost innovation niches of} + +Innovating users may also want to learn whether and how to diffuse their innovations by becoming manufacturers. This may be a fairly common practice in some fields. Shah (2000) found that users of sports equipment sometimes became manufacturers by a very natural process. The users would demonstrate the performance and value of their innovations as they used them in public sporting events. Some of the participants in the meets would then ask "Can you make one of those for me too?" Informal hobby-level production would then sometimes become the basis of a major company. Lettl, Herstatt, and Gemünden (2004) report on case histories in which user-innovators became heavily involved in promoting the commercialization of important innovations in surgical equipment. These innovations tended to be developed by surgeons, who then often made major efforts to induce manufacturers to commercialize them. Hienerth (2004) documents how user-innovators in "rodeo kayaking" build their own boats, discover that kayak manufacturers (even those established by a previous generation of user-innovators) are unwilling to manufacture what they want, and so are driven to become manufacturers themselves. +={Gemünden, H.;Lettl, C.;Herstatt, C.;Hienerth, C.;Shah, S.;Sporting equipment:lead users and;Windsurfing;Surgical equipment} + +Managers must learn that no single locus of innovation is the "right" one for either user firms or manufacturer firms. The locus of innovation varies between user firms and manufacturing firms according to market-related and information-related conditions. These conditions may well vary predictably over product life cycles. Utterback and Abernathy (1975) proposed that innovation by users is likely to be more important in the early stages of such cycles. Early in the life of a new product, there is a "fluid" stage in which the nature and the use of a product are unclear. Here, Utterback and Abernathy say, users play a big part in sorting the matter out, in part through innovation. Later, a dominant product design will emerge---a shared sense of exactly what a particular product is, what features and components it should include, and how it should function. (We all know, for example, that a car has four wheels and moves along the ground in directions determined by a steering wheel.) After that time, if the market for the product grows, innovation will shift from product to process as firms shift from the problem of what to produce to the problem of how to produce a well-understood product in ever greater volumes. From a lead user innovation perspective, of course, both functionally novel products and functionally novel processes are likely to be developed by users---in the first case users of the product, and in the second by manufacturing firms that use the process. +={Albernathy, W.;Utterback, J.} + +!_ In Conclusion + +In this book I have explored how and why users, individually and in firms and in communities, develop and freely reveal innovations. I have also argued that there is a general trend toward a open and distributed innovation process driven by steadily better and cheaper computing and communications. The net result is an ongoing shift toward the democratization of innovation. This welfare-enhancing shift is forcing major changes in user and manufacturer innovation practices, and is creating the need for change in government policies. It also, as I noted at the start of the book, presents major new opportunities for us all. Other Phenomena and Fields 177 + +1~ Notes + +!_ Chapter 2 + +1. LES contains four types of measures. Three ("benefits recognized early," "high benefits expected," and "direct elicitation of the construct") contain the core components of the lead user construct. The fourth ("applications generation") is a measure of a number of innovation-related activities in which users might engage: they "suggest new applications," they "pioneer those applications," and (because they have needs or problems earlier than their peers) they may be "used as a test site" (Morrison, Midgely, and Roberts 2004). + +!_ Chapter 3 + +1. Cluster analysis does not specify the "right" number of clusters---it simply segments a sample into smaller and smaller clusters until the analyst calls a halt. Determining an appropriate number of clusters within a sample can be done in different ways. Of course, it always possible to say that "I only want to deal with three market segments, so I will stop my analysis when my sample has been segmented into three clusters." More commonly, analysts will examine the increase of squared error sums of each step, and generally will view the optimal number of clusters as having been reached when the plot shows a sudden "elbow" (Myers 1996). Since this technique does not incorporate information on remaining within-cluster heterogeneity, it can lead to solutions with a large amount of within-cluster variance. The "cubic clustering criterion" (CCC) partially addresses this concern by measuring the within-cluster homogeneity relative to the between-cluster heterogeneity. It suggests choosing the number of clusters where this value peaks (Milligan and Cooper 1985). However, this method appears to be rarely used: Ketchen and Shook (1996) found it used in only 5 of 45 segmentation studies they examined. + +2. http://groups-beta.google.com/group/comp.infosystems.www.servers.unix + +3. http://modules.apache.org/ + +4. To measure heterogeneity, Franke and I analyzed the extent to which j standards, varying from [1; i], meet the needs of the i individuals in our sample. Conceptually, we first locate a product in multi-dimensional need space (dimensions = 45 in the case of our present study) that minimizes the distances to each individual's needs. (This step is analogous to the Ward's method in cluster analysis that also minimizes within cluster variation; see Punj and Stewart 1983.) The "error" is then measured as the sum of squared Euclidean distances. We then repeated these steps to determine the error for two optimally positioned products, three products, and so on up to a number equaling I -- 1. The sum of squared errors for all cases is then a simple coefficient that measures how much the needs of i individuals can be satisfied with j standard products. The "coefficient of heterogeneity" just specified is sensitive both to the (average) distance between the needs and for the configuration of the needs: when the needs tend to form clusters the heterogeneity coefficient is lower than if they are evenly spread. To make the coefficient comparable across different populations, we calibrate it using a bootstrapping technique (Efron 1979) involving dividing the coefficient by the expected value (this value is generated by averaging the heterogeneity of many random distributions of heterogeneity of the same kind). The average random heterogeneity coefficient is then an appropriate value for calibration purposes: it assumes that there is no systematic relationship between the needs of the individuals or between the need dimensions. + +5. Conceptually, it can be possible to generate "one perfect product" for everyone--- in which case heterogeneity of demand is zero---by simply creating all the features wanted by anyone (45 + 92 features in the case of this study), and incorporating them in the "one perfect product." Users could then select the features they want from a menu contained in the one perfect product to tailor it to their own tastes. Doing this is at least conceptually possible in the case of software, but less so in the case of a physical product for two reasons: (1) delivering all possible physical options to everyone who buys the product would be expensive for physical goods (while costing nothing extra in the case of information products); (2) some options are mutually exclusive (an automobile cannot be both red and green at the same time). + +6. The difference between actual willingness to pay and expressed willingness to pay is much lower for private goods (our case) than for public goods. In the case of private goods, Loomis et al. (1996) found the expressed willingness to pay for art prints to be twice the actual WTP. Willis and Powe (1998) found that among visitors to a castle the expressed WTP was 60 percent lower than the actual WTP. In the case of public goods, Brown et al. (1996), in a study of willingness to pay for removal of a road from a wilderness area, found the expressed WTP to be 4--6 times the actual WTP. Lindsey and Knaap (1999), in a study of WTP for a public urban greenway, found the expressed WTP to be 2-10 times the actual WPT. Neil et al. (1994) found the expressed WTP for conserving an original painting in the desert to be 9 times the actual WTP. Seip and Strand (1992) found that less than 10 percent of those who expressed interest in paying to join an environmental organization actually joined. + +!_ Chapter 6 + +1. As a specific example of a project with an emergent goal, consider the beginnings of the Linux open source software project. In 1991, Linus Torvalds, a student in Finland, wanted a Unix operating system that could be run on his PC, which was equipped with a 386 processor. Minix was the only software available at that time but it was commercial, closed source, and it traded at US$150. Torvalds found this too expensive, and started development of a Posix-compatible operating system, later known as Linux. Torvalds did not immediately publicize a very broad and ambitious goal, nor did he attempt to recruit contributors. He simply expressed his private motivation in a message he posted on July 3, 1991, to the USENET newsgroup comp.os.minix (Wayner 2000): Hello netlanders, Due to a project I'm working on (in minix), I'm interested in the posix standard definition. [Posix is a standard for UNIX designers. A software using POSIX is compatible with other UNIX-based software.] Could somebody please point me to a (preferably) machine-readable format of the latest posix-rules? Ftp-sites would be nice. In response, Torvalds got several return messages with Posix rules and people expressing a general interest in the project. By the early 1992, several skilled programmers contributed to Linux and the number of users increased by the day. Today, Linux is the largest open source development project extant in terms of number of developers. +={Linux} + +!_ Chapter 7 + +1. When they do not incorporate these qualities, they would be more properly referred to as networks---but communities is the term commonly used, and I follow that practice here. + +2. hacker n. [originally, someone who makes furniture with an axe] 1. A person who enjoys exploring the details of programmable systems and how to stretch their capabilities, as opposed to most users, who prefer to learn only the minimum necessary. 2. One who programs enthusiastically (even obsessively) or who enjoys programming rather than just theorizing about programming. 3. A person capable of appreciating hack value. 4. A person who is good at programming quickly. . . . 8. [deprecated] A malicious meddler who tries to discover sensitive information by poking around. Hence password hacker, network hacker. The correct term for this sense is cracker (Raymond 1996). + +3. Source code is a sequence of instructions to be executed by a computer to accomplish a program's purpose. Programmers write computer software in the form of source code, and also document that source code with brief written explanations of the purpose and design of each section of their program. To convert a program into a form that can actually operate a computer, source code is translated into machine code using a software tool called a compiler. The compiling process removes program documentation and creates a binary version of the program---a sequence of computer instructions consisting only of strings of ones and zeros. Binary code is very difficult for programmers to read and interpret. Therefore, programmers or firms that wish to prevent others from understanding and modifying their code will release only binary versions of the software. In contrast, programmers or firms that wish to enable others to understand and update and modify their software will provide them with its source code. (Moerke 2000, Simon 1996). + +4. See www.gnu.org/licenses/licenses.html#GPL + +5. http://www.sourceforge.net + +6. "The owner(s) [or `maintainers'] of an open source software project are those who have the exclusive right, recognized by the community at large, to redistribute modified versions. . . . According to standard open source licenses, all parties are equal in the evolutionary game. But in practice there is a very well-recognized distinction between `official' patches [changes to the software], approved and integrated into the evolving software by the publicly recognized maintainers, and `rogue' patches by third parties. Rogue patches are unusual and generally not trusted." (Raymond 1999, p. 89) + +!_ Chapter 8 + +1. See also Bresnahan and Greenstein 1996b; Bresnahan and Saloner 1997; Saloner and Steinmueller 1996. + +!_ Chapter 10 + +1. ABS braking is intended to keep a vehicle's wheels turning during braking. ABS works by automatically and rapidly "pumping" the brakes. The result is that the wheels continue to revolve rather than "locking up," and the operator continues to have control over steering. + +2. In the general literature, Armstrong's (2001) review on forecast bias for new product introduction indicates that sales forecasts are generally optimistic, but that that upward bias decreases as the magnitude of the sales forecast increases. Coller and Yohn (1998) review the literature on bias in accuracy of management earnings forecasts and find that little systematic bias occurs. Tull's (1967) model calculates $15 million in revenue as a level above which forecasts actually become pessimistic on average. We think it reasonable to apply the same deflator to LU vs. non-LU project sales projections. Even if LU project personnel were for some reason more likely to be optimistic with respect to such projections than non-LU project personnel, that would not significantly affect our findings. Over 60 percent of the total dollar value of sales forecasts made for LU projects were actually made by personnel not associated with those projects (outside consulting firms or business analysts from other divisions). + +1~ Bibliography + +Achilladelis, B., A. B. Robertson, and P. Jervis. 1971. /{Project SAPPHO}/. Centre for the Study of Industrial Innovation, London. + +Aghion, P., and J. Tirole. 1994. "The Management of Innovation." /{Quarterly Journal of Economics}/ 109: 1185--1209. + +Allen, R. C. 1983. "Collective Invention." /{Journal of Economic Behavior and Organization}/ 4, no. 1: 1--24. + +Allen, T. J. 1966. "Studies of the Problem-Solving Process in Engineering Design." /{IEEE Transactions on Engineering Management}/ 13, no. 2: 72--83. + +Amabile, T. M. 1996. /{Creativity in Context}/. Westview. + +Antelman, Kristin. 2004. "Do Open Access Articles Have a Greater Research Impact?" /{College and Research Libraries}/ 65, no. 5: 372--382. + +Antorini, Y. M. 2005. The Making of a Lead User. Working paper, Copenhagen Business School. + +Armstrong, J. S., ed. 2001. /{Principles of Forecasting}/. Kluwer. + +Arora, A., A. Fosfuri, and A. Gambardella. 2001. /{Markets for Technology}/. MIT Press. + +Arora, A., and A. Gambardella. 1994. "The Changing Technology of Technological Change." /{Research Policy}/ 23, no. 5: 523--532. + +Arrow, K. 1962. "Economic Welfare and the Allocation of Resources for Inventions." In R. R. Nelson, ed., /{The Rate and Direction of Inventive Activity}/. Princeton University Press. + +Arundel, A. 2001. "The Relative Effectiveness of Patents and Secrecy for Appropriation." /{Research Policy}/ 30, no. 4: 611--624. Balachandra, R., and J. H. Friar. 1997. "Factors for Success in R&D Projects and New Product Introduction: A Contextual Framework." /{IEEE Transactions on Engineering Management}/ 44, no. 3: 276--287. + +Baldwin, C. Y., and K. B. Clark. 2003. Does Code Architecture Mitigate Free Riding in the Open Source Development Model? Working paper, Harvard Business School. + +Barnes, B., and D. Ulin. 1984. "Liability for New Products." /{AWWA Journal}/, February: 44--47. + +Baron, J. 1988. /{Thinking and Deciding}/. Cambridge University Press. + +Behlendorf, B. 1999. "Open Source as a Business Strategy." In C. Dibona, S. Ockman, and M. Stone, eds., /{Open Sources}/. O'Reilly. + +Benkler, Y. 2002. "Intellectual Property and the Organization of Information Production." /{International Review of Law and Economics}/ 22, no. 1: 81--107. + +Bessen, J. 2003. Patent Thickets. Working paper, Research on Innovation and Boston University School of Law. + +Bessen, J. 2004. Open Source Software. Working paper, Research on Innovation. + +Bessen, J., and R. M. Hunt. 2004. An Empirical Look at Software Patents. Working paper, Federal Reserve Bank of Philadelphia. + +Bijker, Wiebe. 1995. /{Of Bicycles, Bakelites and Bulbs}/. MIT Press. + +Boldrin, M., and D. Levine. 2002. "The Case against Intellectual Property." /{AEA Papers and Proceedings}/, May: 209--212. + +Bresnahan, T. F., and S. Greenstein. 1996a. "Technical Progress and Co-Invention in Computing and in the Uses of Computers." /{Brookings Papers on Economic Activity.}/ /{Microeconomics}/ 1996: 1--77. + +Bresnahan, T. F., and S. Greenstein. 1996b. "The Competitive Crash in Large-Scale Commercial Computing." In R. Landau, T. Taylor, and G. Wright, eds., /{The Mosaic of Economic Growth}/. Stanford University Press. + +Bresnahan, T. F., and G. Saloner. 1997. "`Large Firms' Demand for Computer Products and Services: Market Models, Inertia, and Enabling Strategic Change." In D. B. Yoffie, ed., /{Competing in the Age of Digital Convergence. Harvard Business School Press}/. + +Brooks, P. F., Jr. 1979. /{The Mythical Man-Month}/. Addison-Wesley. + +Brown, J. S., and P. Duguid. 1991. "Organizational Learning and Communities-of-Practice: Toward a Unified View of Working, Learning, and Innovation." /{Organization Science}/ 2, no. 1: 40--57. + +Brown, T. C., P. A. Champ, R. C. Bishop, and D. W. McCollum. 1996. "Which Response Format Reveals the Truth about Donations to a Public Good." /{Land Economics}/ 72, no. 2: 152--166. + +Buenstorf, G. 2002. "Designing Clunkers: Demand-Side Innovation and the Early History of Mountain Bike." In J. S. Metcalfe and U. Cantner, eds., /{Change, Transformation and Development}/. Physica. + +Chamberlin, E. H. 1950. "Product Heterogeneity and Public Policy." /{American Economic Review}/ 40, no. 2: 85--92. + +Christensen, C. M. 1997. /{The Innovator's Dilemma}/. Harvard Business School Press. + +Cohen, W. M., A. Goto, A. Nagata, R. R. Nelson, and J. P. Walsh. 2002. "R&D Spillovers, Patents and the Incentives to Innovate in Japan and the United States." /{Research Policy}/ 31 (8--9): 1349--1367. + +Cohen, W. M., and D. A. Levinthal. 1990. "The Implications of Spillovers for R&D Investment and Welfare: A New Perspective." /{Administrative Science Quarterly}/ 35: 128--152. + +Cohen, W. M., R. R. Nelson, and J. P. Walsh. 2000. Protecting Their Intellectual Assets. Working paper, National Bureau of Economic Research. + +Coller, M., and T. L. Yohn. 1998. "Management Forecasts: What Do We Know?" /{Financial Analysts Journal}/ 54, no. 1: 58--62. + +Connell, J. L., and L. B. Shafer. 1989. /{Structured Rapid Prototyping}/. Prentice-Hall. + +Conner, K. R., and C. K. Prahalad. 1996. "A Resource-Based Theory of the Firm: Knowledge versus Opportunism." /{Organization Science}/ 7, no. 5: 477--501. + +Cook, T. D., and D. T. Campbell. 1979. /{Quasi-Experimentation}/. Houghton Mifflin. + +Csikszentmihalyi, M. 1975. /{Beyond Boredom and Anxiety}/. Jossey-Bass. + +Csikszentmihalyi, M. 1990. /{Flow}/. Harper and Row. + +Csikszentmihalyi, M. 1996. /{Creativity}/. HarperCollins. + +Dam, K. W. 1995. "Some Economic Considerations in the Intellectual Property Protection of Software." /{Journal of Legal Studies}/ 24, no. 2: 321--377. + +Danneels, Erwin. 2004. "Disruptive Technology Reconsidered: A Critique and Research Agenda." /{Journal of Product Innovation Management}/ 21: 246--258. + +Dasgupta, P., and P. A. David. 1994. "Toward a New Economics of Science." /{Policy Research}/ 23: 487--521. + +David, P. A. 1992. "Knowledge, Property, and the System Dynamics of Technological Change." /{Proceedings of the World Bank Annual Conference on Development Economics}/ 1992: 215--247. + +David, P. A. 1998. Knowledge Spillovers, Technology Transfers, and the Economic Rationale for Public Support of Exploratory Research in Science. Background paper for European Committee for Future Accelerators. + +de Fraja, G. 1993. "Strategic Spillovers in Patent Races." /{International Journal of Industrial Organization}/ 11, no. 1: 139--146. Bibliography 185 + +Dellarocas, C., N. F. Awad, and X. (M. ) Zhang. 2004. Exploring the Value of Online Reviews to Organizations. Working paper, MIT Sloan School of Management. + +Duke, R. 1988. /{Local Building Codes and the Use of Cost-Saving Methods}/. US Federal Trade Commission, Bureau of Economics. + +Efron, B. 1979. "Bootstrap Methods: Another Look at the Jackknife." /{Annals of Statistics}/ 7: 1--26. + +Ehrenkrantz Group. 1979. /{A Study of Existing Processes for the Introduction of New Products and Technology in the Building Industry}/. US Institute of Building Sciences. + +Elrod, T., and A. P. Kelman. 1987. Reliability of New Product Evaluation as of 1968 and 1981. Working paper, Owen Graduate School of Management, Vanderbilt University. + +Enos, J. L. 1962. /{Petroleum Progress and Profits}/. MIT Press. + +Fleming, L. 2001. "Recombinant Uncertainty in Technological Search." /{Management Science}/ 47, no. 1: 117--132. + +Foray, D. 2004. /{Economics of Knowledge}/. MIT Press. + +Franke, N., and H. Reisinger. 2003. Remaining within Cluster Variance. Working paper, Vienna Business University. + +Franke, N., and S. Shah. 2003. "How Communities Support Innovative Activities: An Exploration of Assistance and Sharing Among End-Users." /{Research Policy}/ 32, no. 1: 157--178. + +Franke, N., and E. von Hippel. 2003a. Finding Commercially Attractive User Innovations. Working paper, MIT Sloan School of Management. + +Franke, N., and E. von Hippel. 2003b. "Satisfying Heterogeneous User Needs via Innovation Toolkits: The Case of Apache Security Software." /{Research Policy}/ 32, no. 7: 1199--1215. + +Freeman, C. 1968. "Chemical Process Plant: Innovation and the World Market." /{National Institute Economic Review}/ 45, August: 29--57. + +Friedman, D., and D. McAdam. 1992. "Collective Identity and Activism: Networks, Choices, and the Life of a Social Movement." In A. D. Morris and C. McClurg, eds., /{Frontiers in Social Movement Theory}/. Yale University Press. + +Gallini, N., and S. Scotchmer. 2002. "Intellectual Property: When Is It the Best Incentive System?" In A. Jaffe, J. Lerner, and S. Stern, eds., /{Innovation Policy and the Economy}/, volume 2. MIT Press. + +Green, P. E. 1971. "A New Approach to Market Segmentation." /{Business Horizons}/ 20, February: 61--73. + +Green, P. E., and C. M. Schaffer. 1998. "Cluster-Based Market Segmentation: Some Further Comparisons of Alternative Approaches." /{Journal of the Market Research Society}/ 40, no. 2: 155--163. + +Hall, B. H., and R. Ham Ziedonis. 2001. "The Patent Paradox Revisited: An Empirical Study of Patenting in the US Semiconductor Industry, 1979--1995." /{RAND Journal of Economics}/ 32, no. 1: 101--128. + +Hall, B. H., and D. Harhoff. 2004. "Post-Grant Reviews in the US Patent System: Design Choices and Expected Impact." /{Berkeley Law Technology Journal}/, in press. + +Harhoff, D. 1996. "Strategic Spillovers and Incentives for Research and Development." /{Management Science}/ 42, no. 6: 907--925. + +Harhoff, D., J. Henkel, and E. von Hippel. 2003. "Profiting from Voluntary Information Spillovers: How Users Benefit by Freely Revealing Their Innovations." /{Research Policy}/ 32, no. 10: 1753--1769. + +Hecker, F. 1999. "Setting Up Shop: The Business of Open Source Software." /{IEEE Software}/ 16, no. 1: 45--51. + +Heller, M. A. 1998. "The Tragedy of the Anticommons: Property in the Transition from Marx to Markets." /{Harvard Law Review}/ 111: 621--688. + +Heller, M. A., and R. S. Eisenberg. 1998. "Can Patents Deter Innovation? The Anticommons in Biomedical Research." /{Science Magazine}/ 280 (5364): 698--701. + +Henkel, J. 2003. "Software Development in Embedded Linux: Informal Collaboration of Competing Firms." In W. Uhr, W. Esswein, and E. Schoop, eds., /{Proceedings der 6. Internationalen Tagung Wirtschaftsinformatik}/ 2003, volume 2. Physica. + +Henkel, J. 2004a. The Jukebox Mode of Innovation. Discussion paper, CEPR. + +Henkel, J. 2004b. Patterns of Free Revealing. Working paper, University of Munich. + +Henkel, J., and S. Thies. 2003. "Customization and Innovation: User Innovation Toolkits for Simulator Software." In Proceedings of the 2003 Congress on Mass Customization and Personalization (MCPC 2003), Munich. + +Henkel, J., and E. von Hippel. 2005. "Welfare Implications of User Innovation." /{Journal of Technology Transfer}/ 30, no. 1/2: 73--87. + +Herstatt, C., and E. von Hippel. 1992. "From Experience: Developing New Product Concepts via the Lead User Method." /{Journal of Product Innovation Management}/ 9, no. 3: 213--222. + +Hertel, G., S. Niedner, and S. Herrmann. 2003. "Motivation of Software Developers in Open Source Projects: An Internet-Based Survey of Contributors to the Linux Kernel." /{Research Policy}/ 32, no. 7: 1159--1177. + +Hienerth, C. 2004. "The Commercialization of User Innovations: Sixteen Cases in an Extreme Sporting Industry." In Proceedings of the 26th R&D Management Conference, Sesimbra, Portugal. + +Hirschleifer, J. 1971. "The Private and Social Value of Information and the Reward to Inventive Activity." /{American Economic Review}/ 61, no. 4: 561--574. + +Hollander, S. 1965. /{The Sources of Increased Efficiency}/. MIT Press. + +Horwich, M. 1982. /{Clipped Wings}/. MIT Press. + +Hunt, R. M., and J. Bessen. 2004. "The Software Patent Experiment." /{Business Review, Federal Reserve Bank of Philadelphia}/ Q3: 22--32. + +Jensen, M. C., and W. H. Meckling. 1976. "Theory of the Firm: Managerial Behavior, Agency Costs, and Ownership Structure." /{Journal of Financial Economics}/ 3, no. 4: 305--360. + +Jeppesen, L. B. 2004. Profiting from Innovative User Communities. Working paper, Department of Industrial Economics and Strategy, Copenhagen Business School. + +Jeppesen, L. B. 2005. "User Toolkits for Innovation: Users Support Each Other." /{Journal of Product Innovation Management}/, forthcoming. + +Jeppesen, L. B., and M. J. Molin. 2003. "Consumers as Co-developers: Learning and Innovation Outside the Firm." /{Technology Analysis and Strategic Management}/ 15, no. 3: 363--84. + +Jokisch, M. 2001. Open Source Software-Entwicklung: Eine Analyse des Geschäftsmodells der STATA Corp. Master's thesis, University of Munich. + +Ketchen, D. J., Jr., and C. L. Shook. 1996. "The Application of Cluster Analysis in Strategic Management Research: An Analysis and Critique." /{Strategic Management Journal}/ 17, no. 6: 441--459. + +Knight, K. E. 1963. A Study of Technological Innovation: The Evolution of Digital Computers. PhD dissertation, Carnegie Institute of Technology. + +Kollock, P. 1999. "The Economies of Online Cooperation: Gifts and Public Goods in Cyberspace." In M. A. Smith and P. Kollock, eds., /{Communities in Cyberspace}/. Routledge. + +Kotabe, M. 1995. "The Return of 7-Eleven . . . from Japan: The Vanguard Program." /{Columbia Journal of World Business}/ 30, no. 4: 70--81. + +Kristensen, P. S. 1992. "Flying Prototypes: Production Departments' Direct Interaction with External Customers." /{International Journal of Operations and Production Management}/ 12, no. 2: 195--211. + +Lakhani, K. 2005. Distributed Coordination Practices in Free and Open Source Communities. PhD thesis, Massachusetts Institute of Technology. + +Lakhani, K. R., and E. von Hippel. 2003. "How Open Source Software Works: `Free' User-to-User Assistance." /{Research Policy}/ 32, no. 6: 923--943. + +Lakhani, K. R., and B. Wolf. 2005. "Why Hackers Do What They Do: Understanding Motivation and Effort in Free/Open Source Software Projects." In J. Feller, B. Fitzgerald, S. Hissam, and K. R. Lakhani, eds., /{Perspectives on Free and Open Source Software}/. MIT Press. + +Lerner, J., and J. Tirole. 2002. "Some Simple Economics of Open Source." /{Journal of Industrial Economics}/ 50, no. 2: 197--234. + +Lessig, L. 2001. /{The Future of Ideas}/. Random House. + +Lettl, C., C. Herstatt and H. Gemünden. 2004. The Entrepreneurial Role of Innovative Users. Working paper, Technical University, Berlin. + +Levin, R. C., A. Klevorick, R. R. Nelson, and S. G. Winter. 1987. "Appropriating the Returns from Industrial Research and Development." /{Brookings Papers on Economic Activity}/ 3: 783--820. + +Levy, S. 1984. /{Hackers}/. Doubleday. + +Lilien, G. L., P. D. Morrison, K. Searls, M. Sonnack, and E. von Hippel. 2002. "Performance Assessment of the Lead User Idea-Generation Process for New Product Development." /{Management Science}/ 48, no. 8: 1042--1059. + +Lim, K. 2000. The Many Faces of Absorptive Capacity. Working paper, MIT Sloan School of Management. + +Lindsey, G., and G. Knaap. 1999. "Willingness to Pay for Urban Greenway Projects." /{Journal of the American Planning Association}/ 65, no. 3: 297--313. + +Loomis, J., T. Brown, B. Lucero, and G. Peterson. 1996. "Improving Validity Experiments of Contingent Valuation Methods: Results of Efforts to Reduce the Disparity of Hypothetical and Actual Willingness to Pay." /{Land Economics}/ 72, no. 4: 450--461. + +Lüthje, C. 2003. "Customers as Co-Inventors: An Empirical Analysis of the Antecedents of Customer-Driven Innovations in the Field of Medical Equipment." In Proceedings of the 32th EMAC Conference, Glasgow. + +Lüthje, C. 2004. "Characteristics of Innovating Users in a Consumer Goods Field: An Empirical Study of Sport-Related Product Consumers." /{Technovation}/ 24, no. 9: 683--695. + +Lüthje, C., C. Herstatt, and E. von Hippel. 2002. The Dominant Role of Local Information in User Innovation: The Case of Mountain Biking. Working Paper, MIT Sloan School of Management. + +Machlup, F. 1962. /{Knowledge Production and Distribution in the United States}/. Princeton University Press. + +Mansfield, E. 1968. /{Industrial Research and Technological Innovation}/. Norton. + +Mansfield, E. 1985. "How Rapidly Does New Industrial Technology Leak Out?" /{Journal of Industrial Economics}/ 34: 217--223. + +Mansfield, E., J. Rapoport, A. Romeo, S. Wagner and G. Beardsley. 1977. "Social and Private Rates of Return from Industrial Innovations." /{Quarterly Journal of Economics}/ 91, no. 2: 221--240. + +Mansfield, E., A. Romeo, M. Schwartz, D. Teece, S. Wagner and P. Brach. 1982. /{Technology Transfer, Productivity, and Economic Policy}/. Norton. + +Mansfield, E., and S. Wagner. 1975. "Organizational and Strategic Factors Associated With Probabilities of Success in Industrial R&D." /{Journal of Business}/ 48, no. 2: 179--198. + +Marples, D. L. 1961. "The Decisions of Engineering Design." /{IRE Transactions on Engineering Management}/, June: 55--71. + +Martin, J. 1991. /{Rapid Application Development}/. Macmillan. + +Matthews, J. 1985. /{Public Access to Online Catalogs}/, second edition. Neal-Schuman. + +Maurer, S. 2005. "Inside the Anticommons: Academic Scientists' Struggle to Commercialize Human Mutations Data, 1999--2001." /{Research Policy}/, forthcoming. + +Mead, C., and L. Conway. 1980. /{Introduction to VLSI Systems}/. Addison-Wesley. + +Means, R. S. 1989. /{Building Construction Cost Data}/ 1989. R. S. Means. + +Merges, R., and R. R. Nelson. 1990. "On the Complex Economics of Patent Scope." /{Columbia Law Review}/ 90: 839--916. + +Merton, R. K. 1973. /{The Sociology of Science}/. University of Chicago Press. + +Meyer, M. H., and L. Lopez. 1995. "Technology Strategy in a Software Products Company." /{Journal of Product Innovation Management}/ 12, no. 4: 194--306. + +Milligan, G. W., and M. C. Cooper. 1985. "An Examination of Procedures for Determining the Number of Clusters in a Data Set." /{Psychometrica}/ 45: 159--179. + +Mishina, K. 1989. Essays on Technological Evolution. PhD thesis, Harvard University. + +Mitchell, R. C., and R. T. Carson. 1989. /{Using Surveys to Value Public Goods}/. Resources for the Future. + +Moerke, K. A. 2000. "Free Speech to a Machine." /{Minnesota Law Review}/ 84, no. +4: 1007--1008. + +Mollick, E. 2004. Innovations from the Underground: Towards a Theory of Parasitic Innovation. Master's thesis, Massachusetts Institute of Technology. + +/{Mountain Bike}/. 1996. /{Mountain Biking Skills}/. Rodale. + +Morrison, P. D., J. H. Roberts, and D. F. Midgley. 2004. "The Nature of Lead Users and Measurement of Leading Edge Status." /{Research Policy}/ 33, no. 2: 351--362. + +Morrison, P. D., J. H. Roberts, and E. von Hippel. 2000. "Determinants of User Innovation and Innovation Sharing in a Local Market." /{Management Science}/ 46, no. 12: 1513--1527. + +Muñiz, A. M., Jr., and T. C. O'Guinn. 2001. "Brand Community." /{Journal of Consumer Research}/ 27: 412--432. + +Muñiz, A. M., Jr., and H. J. Schau. 2004. When the Consumer Becomes the Marketer. Working paper, DePaul University. + +Myers, J. H. 1996. /{Segmentation and Positioning for Strategic Marketing Decisions}/. American Marketing Association. + +National Sporting Goods Association. 2002. /{Sporting Goods Market in 2001}/. + +Neil, H., R. Cummings, P. Ganderton, G. Harrison, and G. McGuckin. 1994. "Hypothetical Surveys and Real Economic Commitments." /{Land Economics}/ 70: 145--154. + +Nelson, R. R. 1982. "The Role of Knowledge in R&D Efficiency." /{Quarterly Journal of Economics}/ 97, no. 3: 453--470. + +Nelson, R. R. 1990. What Is Public and What Is Private About Technology? Working paper, Consortium on Competitiveness and Cooperation, University of California, Berkeley. + +Nelson, R. R. 1993. /{National Innovation Systems: A Comparative Analysis}/. Oxford University Press. + +Nuvolari, A. 2004. "Collective Invention during the British Industrial Revolution: The Case of the Cornish Pumping Engine." /{Cambridge Journal of Economics}/ 28, no. 3: 347--363. + +Ogawa, S. 1998. "Does Sticky Information Affect the Locus of Innovation? Evidence from the Japanese Convenience-Store Industry." /{Research Policy}/ 26, no. 7--8: 777--790. + +Oliver, P. E. 1980. "Rewards and Punishment as Selective Incentives for Collective Action: Theoretical Investigations." /{American Journal of Sociology}/ 85: 1356--1375. + +Oliver, P. E., and G. Marwell. 1988. "The Paradox of Group Size in Collective Action: A Theory of the Critical Mass II." /{American Sociological Review}/ 53, no. 1: 1--18. + +Olson, E. L., and G. Bakke. 2001. "Implementing the Lead User Method in a High Technology Firm: A Longitudinal Study of Intentions versus Actions." /{Journal of Product Innovation Management}/ 18, no. 2: 388--395. + +Olson, M. 1967. The Logic of Collective Action. /{Harvard University Press}/. + +O'Mahony, S. 2003. "Guarding the Commons: How Open Source Contributors Protect Their Work." /{Research Policy}/ 32, no. 7: 1179--1198. + +Ostrom, E. 1998. "A Behavioral Approach to the Rational Choice Theory of Collective Action." /{American Political Science Review}/ 92, no. 1: 1--22. + +Pavitt, K. 1984. "Sectoral Patterns of Technical Change: Towards a Taxonomy and a Theory." /{Research Policy}/ 13 (6): 343--373. + +Penning, C. 1998. /{Bike History}/. Delius & Klasing. + +Perens, B. 1999. "The Open Source Definition." In C. DiBona, S. Ockman, and M. Stone, eds., /{Opensources}/. O'Reilly. + +Pinch, T., and R. Kline. 1996. "Users as Agents of Technological Change. The Social Construction of the Automobile in Rural America." /{Technology and Culture}/ 37: 763--795. + +Pinch, T. J., and W. E. Bijker. 1987. "The Social Construction of Facts and Artifacts." In W. Bijker, T. Hughes, and T. Pinch, eds., /{The Social Construction of Technological Systems}/. The MIT Press. + +Pine, J. B. II. 1993. /{Mass Customization}/. Harvard Business School Press. + +Polanyi, M. 1958. /{Personal Knowledge}/. University of Chicago Press. + +Poolton, J., and I. Barclay. 1998. "New Product Development: From Past Research to Future Applications." /{Industrial Marketing Management}/ 27: 197--212. + +Porter, M. E. 1991. /{Competitive Advantage of Nations}/. Free Press. + +Prügl, R., and N. Franke. 2005. Factors Impacting the Success of Toolkits for User Innovation and Design. Working paper, Vienna University of Economics. + +Punj, G., and D. W. Stewart. 1983. "Cluster Analysis in Marketing Research: Review and Suggestions for Application." Journal of Marketing Research 20, May: 134--148. + +Raymond, E., ed. 1996. /{The New Hacker's Dictionary}/, third edition. MIT Press. + +Raymond, E. 1999. /{The Cathedral and the Bazaar}/. O'Reilly. + +Redmond, W. H. 1995. "An Ecological Perspective on New Product Failure: The Effects of Competitive Overcrowding." /{Journal of Product Innovation Management}/ 12: 200--213. + +Riggs, W., and E. von Hippel. 1994. "Incentives to Innovate and the Sources of Innovation: The Case of Scientific Instruments." /{Research Policy}/ 23, no. 4: 459--469. + +Rogers, E. M. 1994. /{Diffusion of Innovation}/, fourth edition. Free Press. + +Rosenberg, N. 1976. /{Perspectives on Technology}/. Cambridge University Press. + +Rosenberg, N. 1982. /{Inside the Black Box}/. Cambridge University Press. + +Rothwell, R., C. Freeman, A. Horsley, V. T. P. Jervis, A. B. Roberts, and J. Townsend. 1974. "SAPPHO Updated: Project SAPPHO Phase II." /{Research Policy}/ 3, no. 3: 258--291. + +Saloner, G., and W. E. Steinmueller. 1996. Demand for Computer Products and Services in Large European Organizations. Research paper, Stanford Graduate School of Business. + +Sattler, H. 2003. "Appropriability of Product Innovations: An Empirical Analysis for Germany." /{International Journal of Technology Management}/ 26, no. 5--6: S. 502--516. + +Schmookler, J. 1966. /{Invention and Economic Growth}/. Harvard University Press. + +Schrage, M. 2000. /{Serious Play}/. Harvard Business School Press. + +Schreier, M., and N. Franke. 2004. Value Creation by Self-Design. Working paper, Vienna University of Economics. + +Seip, K., and J. Strand. 1992. "Willingness to Pay for Environmental Goods in Norway: A Contingent Valuation Study with Real Payment." /{Environmental and Resource Economics}/ 2: 91--106. + +Shah, S. 2000. Sources and Patterns of Innovation in a Consumer Products Field. Working paper, MIT Sloan School of Management. + +Shah, S., and M. Tripsas. 2004. When Do User-Innovators Start Firms? Working paper, University of Illinois. + +Shapiro, C. 2001. "Navigating the Patent Thicket: Cross Licenses, Patent Pools, and Standard Setting." In A. Jaffe, J. Lerner, and S. Stern, eds., /{Innovation Policy and the Economy}/, volume 1. MIT Press. + +Simon, E. 1996. "Innovation and Intellectual Property Protection: The Software Industry Perspective." /{Columbia Journal of World Business}/ 31, no. 1: 30--37. + +Slater, Stanley F., and Narver, John C. 1998. "Customer-Led and Market-Oriented: Let's Not Confuse the Two." /{Strategic Management Journal}/ 19, no. 1:1001--1006. + +Slaughter, S. 1993. "Innovation and Learning during Implementation: A Comparison of User and Manufacturer Innovations." /{Research Policy}/ 22, no. 1: 81--95. + +Smith, A. 1776. /{An Inquiry into the Nature and Causes of the Wealth of Nations. Modern Library edition}/. Random House, 1937. + +Spence, M. 1976. "Product Differentiation and Welfare." /{American Economic Review}/ 66, no. 2, Papers and Proceedings: 407--414. + +Taylor, C. T., and Z. A. Silberston. 1973. /{The Economic Impact of the Patent System}/. Cambridge University Press. + +Taylor, M., and S. Singleton. 1993. "The Communal Resource: Transaction Costs and the Solution of Collective Action Problems." /{Politics and Society}/ 21, no. 2: 195--215. + +Tedd, L. A. 1994. "OPACs through the Ages." /{Library Review}/ 43, no. 4: 27--37. + +Teece, D. J. 1977. "Technology Transfer by Multinational Firms: The Resource Cost of Transferring Technological Know-How." /{Economic Journal}/ 87: 242--261. + +Thomke, S. H. 1998. "Managing Experimentation in the Design of New Products." /{Management Science}/ 44, no. 6: 743--762. + +Thomke, S. H. 2003. /{Experimentation Matters}/. Harvard Business School Press. + +Thomke, S. H., and E. von Hippel. 2002. "Customers as Innovators: A New Way to Create Value." /{Harvard Business Review}/ 80, no. 4: 74--81. + +Thomke, S. H., E. von Hippel, and R. Franke. 1998. "Modes of Experimentation: An Innovation Process---and Competitive---Variable." /{Research Policy}/ 27, no. 3: 315--332. + +Tirole, J. 1988. /{The Theory of Industrial Organization}/. MIT Press. + +Tull, D. 1967. "The Relationship of Actual and Predicted Sales and Profits in New Product Introductions." /{Journal of Business}/ 40: 233--250. + +Tyre, M., and E. von Hippel. 1997. "Locating Adaptive Learning: The Situated Nature of Adaptive Learning in Organizations." /{Organization Science}/ 8, no. 1: 71--83. + +Urban, G. L., and E. von Hippel. 1988. "Lead User Analyses for the Development of New Industrial Products." /{Management Science}/ 34, no. 5: 569--82. + +Utterback, J. M., and W. J. Abernathy. 1975. "A Dynamic Model of Process and Product Innovation." /{Omega 3}/, no. 6: 639--656. + +van der Plas, R., and C. Kelly. 1998. /{The Original Mountain Bike Book}/. MBI. + +Varian, H. R. 2002. "New Chips Can Keep a Tight Rein on Consumers." /{New York Times}/, July 4. + +von Hippel, E. 1976. "The Dominant Role of Users in the Scientific Instrument Innovation Process." /{Research Policy}/ 5, no. 3: 212--39. + +von Hippel, E. 1977. "Transferring Process Equipment Innovations from User-Innovators to Equipment Manufacturing Firms." /{R&D Management}/ 8, no. 1:13--22. + +von Hippel, E. 1986. "Lead Users: A Source of Novel Product Concepts." /{Management Science}/ 32, no. 7: 791--805. + +von Hippel, E. 1988. /{The Sources of Innovation}/. Oxford University Press. + +von Hippel, E. 1994. "Sticky Information and the Locus of Problem Solving: Implications for Innovation." /{Management Science}/ 40, no. 4: 429--439. + +von Hippel, E. 1998. "Economics of Product Development by Users: The Impact of Sticky Local Information." /{Management Science}/ 44, no. 5: 629--644. + +von Hippel, E. 2001. "Perspective: User Toolkits for Innovation." /{Journal of Product Innovation Management}/ 18: 247--257. + +von Hippel, E., and S. N. Finkelstein. 1979. "Analysis of Innovation in Automated Clinical Chemistry Analyzers." /{Science and Public Policy}/ 6, no. 1: +24--37. + +von Hippel, E., N. Franke, and R. Prügl. 2005. Screening vs. Pyramiding. Working paper, MIT Sloan School of Management. + +von Hippel, E., and R. Katz. 2002. "Shifting Innovation to Users via Toolkits." /{Management Science}/ 48, no. 7: 821--833. + +von Hippel, E., S. H. Thomke, and M. Sonnack. 1999. "Creating Breakthroughs at 3M." /{Harvard Business Review}/ 77, no. 5: 47--57. + +von Hippel, E., and M. Tyre. 1995. "How `Learning by Doing' is Done: Problem Identification in Novel Process Equipment." /{Research Policy}/ 24, no. 1: 1--12. + +von Hippel, E., and G. von Krogh. 2003. "Open Source Software and the "Private-Collective" Innovation Model: Issues for Organization Science." /{Organization Science}/ 14, no. 2: 209--223. + +von Krogh, G., and S. Spaeth. 2002. Joining, Specialization, and Innovation in Open Source Software Development. Working paper, University of St. Gallen. + +von Krogh, G., S. Haefliger and S. Spaeth. 2004. The Practice of Knowledge Reuse in Open Source Software. Working paper, University of St. Gallen. + +Wellman, B., J. Boase, and W. Chen. 2002. The Networked Nature of Community On and Off the Internet. Working paper, Centre for Urban and Community Studies, University of Toronto. + +Wenger, E. 1998. /{Communities of Practice}/. Cambridge University Press. + +Wayner, P. 2000. /{Free for All}/. Harper Business. + +Weber, S. 2004. /{The Success of Open Source}/. Harvard University Press. + +Willis, K. G., and N. A. Powe. 1998. "Contingent Valuation and Real Economic Commitments: A Private Good Experiment." /{Journal of Environmental Planning and Management}/ 41, no. 5: 611--619. + +Wind, Y. 1978. "Issues and Advances in Segmentation Research." /{Journal of Marketing Research}/ 15, August: 317--337. + +Winter, S. G., and G. Szulanski. 2001. "Replication as Strategy." /{Organization Science}/ 12, no. 6: 730--743. + +Young, G., K. G. Smith, and C. M. Grimm. 1996. "Austrian and Industrial Organization Perspectives on Firm Level Competitive Activity and Performance." /{Organization Science}/ 7, no. 3: 243--254. + +%% index di.eric_von_hippel_index.txt democratizing_innovation.eric_von_hippel_index.txt -- cgit v1.2.3 From 7e1396f91bcaebb94016f9582f4a314103be537c Mon Sep 17 00:00:00 2001 From: Ralph Amissah Date: Sun, 22 Aug 2010 17:57:47 -0400 Subject: markup sample v1 "Free Culture", document structure --- data/v1/samples/free_culture.lawrence_lessig.sst | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/data/v1/samples/free_culture.lawrence_lessig.sst b/data/v1/samples/free_culture.lawrence_lessig.sst index 81c5e6f..02927b4 100644 --- a/data/v1/samples/free_culture.lawrence_lessig.sst +++ b/data/v1/samples/free_culture.lawrence_lessig.sst @@ -60,7 +60,7 @@ To Eric Eldred - whose work first drew me to this cause, and for whom it continues still.~# -:C~ PREFACE +:B~ PREFACE 1~preface [Preface]-# @@ -95,7 +95,7 @@ Like Stallman's arguments for free software, an argument for free culture stumbl Instead, the free culture that I defend in this book is a balance between anarchy and control. A free culture, like a free market, is filled with property. It is filled with rules of property and contract that get enforced by the state. But just as a free market is perverted if its property becomes feudal, so too can a free culture be queered by extremism in the property rights that define it. That is what I fear about our culture today. It is against that extremism that this book is written. -:C~ INTRODUCTION +:B~ INTRODUCTION 1~intro [Intro]-# @@ -235,7 +235,7 @@ The two sections set up the core claim of this book: that while the Internet has We allow this, I believe, not because it is right, and not because most of us really believe in these changes. We allow it because the interests most threatened are among the most powerful players in our depressingly compromised process of making law. This book is the story of one more consequence of this form of corruption - a consequence to which most of us remain oblivious. -:C~ "PIRACY" +:B~ "PIRACY" 1~intro_piracy [Intro]-# @@ -971,7 +971,7 @@ begins to talk about "balance," the copyright warriors raise a different argumen "It is /{our property}/," the warriors insist. "And it should be protected just as any other property is protected." -:C~ "PROPERTY" +:B~ "PROPERTY" 1~intro_property [Intro]-# @@ -2082,7 +2082,7 @@ We achieved that free culture because our law respected important limits on the Free culture is increasingly the casualty in this war on piracy. In response to a real, if not yet quantified, threat that the technologies of the Internet present to twentieth-century business models for producing and distributing culture, the law and technology are being transformed in a way that will undermine our tradition of free culture. The property right that is copyright is no longer the balanced right that it was, or was intended to be. The property right that is copyright has become unbalanced, tilted toward an extreme. The opportunity to create and transform becomes weakened in a world in which creation requires permission and creativity must check with a lawyer. ={free culture:permission culture vs.;permission culture:free culture vs.} -:C~ PUZZLES +:B~ PUZZLES 1~ Chapter Eleven: Chimera @@ -2470,7 +2470,7 @@ _1 So when we're talking about numbers like forty to sixty million Americans tha When forty to sixty million Americans are considered "criminals" under the law, and when the law could achieve the same objective - securing rights to authors - without these millions being considered "criminals," who is the villain? Americans or the law? Which is American, a constant war on our own people or a concerted effort through our democracy to change our law? -:C~ BALANCES +:B~ BALANCES 1~intro_balances [Intro]-# @@ -3024,7 +3024,7 @@ What is hard to understand is why the public takes this view. It is as if the la All this seems to follow easily from this untroubled acceptance of the "property" in intellectual property. Common sense supports it, and so long as it does, the assaults will rain down upon the technologies of the Internet. The consequence will be an increasing "permission society." The past can be cultivated only if you can identify the owner and gain permission to build upon his work. The future will be controlled by this dead (and often unfindable) hand of the past. -:C~ CONCLUSION +:B~ CONCLUSION 1~conclusion [Conclusion]-# @@ -3174,7 +3174,7 @@ I've told a dark story. The truth is more mixed. A technology has given us a new Common sense must revolt. It must act to free culture. Soon, if this potential is ever to be realized. -:C~ AFTERWORD +:B~ AFTERWORD 1~intro_afterword [Intro]-# @@ -3555,13 +3555,13 @@ The law should regulate in certain areas of culture - but it should regulate cul We should ask, "Why?" Show me why your regulation of culture is needed. Show me how it does good. And until you can show me both, keep your lawyers away. -:C~ NOTES +:B~ NOTES 1~webnotes Notes~# Throughout this text, there are references to links on the World Wide Web. As anyone who has tried to use the Web knows, these links can be highly unstable. I have tried to remedy the instability by redirecting readers to the original source through the Web site associated with this book. For each link below, you can go to http://free-culture.cc/notes and locate the original source by clicking on the number after the # sign. If the original link remains alive, you will be redirected to that link. If the original link has disappeared, you will be redirected to an appropriate reference for the material. -:C~ ACKNOWLEDGMENTS +:B~ ACKNOWLEDGMENTS 1~acknowledgements [Acknowledgments]-# -- cgit v1.2.3 From 7b5d3681c8feef1a3776bd93da59402fa13cc312 Mon Sep 17 00:00:00 2001 From: Ralph Amissah Date: Sun, 22 Aug 2010 17:53:48 -0400 Subject: markup samples, adds text "Viral Spiral" David Bollier --- data/v1/samples/_sisu/image/vs_db_1.png | Bin 0 -> 98654 bytes .../_sisu/skin/doc/skin_vs_david_bollier.rb | 77 + data/v1/samples/viral_spiral.david_bollier.sst | 2924 +++++++++++++++++++ data/v2/samples/_sisu/image/vs_db_1.png | Bin 0 -> 98654 bytes .../_sisu/skin/doc/skin_vs_david_bollier.rb | 77 + data/v2/samples/viral_spiral.david_bollier.sst | 2930 ++++++++++++++++++++ 6 files changed, 6008 insertions(+) create mode 100644 data/v1/samples/_sisu/image/vs_db_1.png create mode 100644 data/v1/samples/_sisu/skin/doc/skin_vs_david_bollier.rb create mode 100644 data/v1/samples/viral_spiral.david_bollier.sst create mode 100644 data/v2/samples/_sisu/image/vs_db_1.png create mode 100644 data/v2/samples/_sisu/skin/doc/skin_vs_david_bollier.rb create mode 100644 data/v2/samples/viral_spiral.david_bollier.sst diff --git a/data/v1/samples/_sisu/image/vs_db_1.png b/data/v1/samples/_sisu/image/vs_db_1.png new file mode 100644 index 0000000..4a9bfd1 Binary files /dev/null and b/data/v1/samples/_sisu/image/vs_db_1.png differ diff --git a/data/v1/samples/_sisu/skin/doc/skin_vs_david_bollier.rb b/data/v1/samples/_sisu/skin/doc/skin_vs_david_bollier.rb new file mode 100644 index 0000000..d357797 --- /dev/null +++ b/data/v1/samples/_sisu/skin/doc/skin_vs_david_bollier.rb @@ -0,0 +1,77 @@ +# coding: utf-8 +=begin + * Name: SiSU - Simple information Structuring Universe - Structured information, Serialized Units + * Author: Ralph Amissah + * http://www.jus.uio.no/sisu + * http://www.jus.uio.no/sisu/SiSU/download + * Description: Skin prepared for Viral Spiral, David Bollier + * License: Same as SiSU see http://www.jus.uio.no/sisu + * Notes: Site default appearance variables set in defaults.rb + Generic site wide modifications set here scribe_skin.rb, and this file required by other "scribes" instead of defaults.rb +=end +module SiSU_Viz + require SiSU_lib + '/defaults' + class Skin + def url_home + 'http://viralspiral.cc/' + end + def url_txt # text to go with url usually stripped url + 'viralspiral.cc' + end + def url_author + 'http://www.bollier.org/' + end + def color_band1 + '"#ffffff"' + end + def txt_hp + 'viralspiral.cc' + end + def txt_home + 'David Bollier' + end + def icon_home_button + '' + end + def icon_home_banner + icon_home_button + end + def banner_home_button + %{
#{png_home}
\n} + end + def banner_home_and_index_buttons + %{
#{png_home}
 This text sub- 
 Table of Contents 
#{table_close}
 #{table_close}} + end + def banner_band + %{
+

Viral Spiral

+

David Bollier

+ #{table_close}} + end + def credits_splash + %{
+Viral Spiral, David Bollier
+The original pdf is available online at
#{url_txt}
+available at
Amazon.com and
+Barnes & Noble
+This book is Copyright David Bollier © 2008
+Under a Creative Commons License, License: Attribution-Noncommercial Works (CC-BY-NC) 3.0 +http://creativecommons.org/licenses/by-nc/3.0/
} + end + end + class TeX + def header_center + "\\chead{\\href{#{@vz.url_home}}{#{@vz.url_txt}}}" + end + def home_url + "\\href{#{@vz.url_home}}{#{@vz.url_txt}}" + end + def home + "\\href{#{@vz.url_home}}{David Bollier}" + end + def owner_chapter + "Document owner details" + end + end +end +__END__ diff --git a/data/v1/samples/viral_spiral.david_bollier.sst b/data/v1/samples/viral_spiral.david_bollier.sst new file mode 100644 index 0000000..912191b --- /dev/null +++ b/data/v1/samples/viral_spiral.david_bollier.sst @@ -0,0 +1,2924 @@ +% SiSU 1.0 + +@title: Viral Spiral + +@subtitle: How the Commoners Built a Digital Republic of Their Own + +@language: US + +@author: Bollier, David + +@type: Book + +@topic_register: SiSU:markup sample:book;networks;Internet:social aspects|copyright|intellectual property;intellectual property:copyright|creative commons|patents|public domain;society:information society;copyright:creative commons|public domain|licenses;patents;book:subject:information society|information networks|society|copyright|creative commons|patents|culture;open source software:social aspects;software:free software|GPL|open source;license:GPL;programming;democracy;democratization;creative commons:organization;public domain:copyright law (U.S.);free culture;culture + +@rights: Copyright 2008 by David Bollier All rights reserved. No part of this book may be reproduced, in any form, without written permission from the publisher. The author has made an online version of the book available under a Creative Commons Attribution-NonCommercial license. It can be accessed at http://www.viralspiral.cc and http://www.onthecommons.org. Requests for permission to reproduce selections from this book should be mailed to: Permissions Department, The New Press, 38 Greene Street, New York, NY 10013. Published in the United States by The New Press, New York, 2008 Distributed by W. W. Norton & Company, Inc., New York ISBN 978-1-59558-396-3 (hc.) CIP data available The New Press was established in 1990 as a not-for-profit alternative to the large, commercial publishing houses currently dominating the book publishing industry. The New Press operates in the public interest rather than for private gain, and is committed to publishing, in innovative ways, works of educational, cultural, and community value that are often deemed insufficiently profitable. www.thenewpress.com A Caravan book. For more information, visit www.caravanbooks.org.
Creative Commons Attribution-NonCommercial license. + +@prefix: ALSO BY DAVID BOLLIER: "Brand Name Bullies"; "Silent Theft"; "Aiming Higher"; "Sophisticated Sabotage" (with co-authors Thomas O. McGarity and Sidney Shapiro); "The Great Hartford Circus Fire" (with co-author Henry S. Cohn); "Freedom from Harm" (with co-author Joan Claybrook) + +@links: {Viral Spiral}http://viralspiral.cc/ +{David Bollier}http://www.bollier.org/ +{David Bollier @ Wikipedia}http://en.wikipedia.org/wiki/David_Bollier +{Viral Spiral, David Bollier@ SiSU}http://www.jus.uio.no/sisu/viral_spiral.david_bollier +{The Wealth of Networks, Yochai Benkler @ SiSU}http://www.jus.uio.no/sisu/the_wealth_of_networks.yochai_benkler +{Democratizing Innovation, Eric von Hippel @ SiSU}http://www.jus.uio.no/sisu/democratizing_innovation.eric_von_hippel +{Two Bits, Christopher Kelty @ SiSU}http://www.jus.uio.no/sisu/two_bits.christopher_kelty +{Free Culture, Lawrence Lessig @ SiSU}http://www.jus.uio.no/sisu/free_culture.lawrence_lessig +{CONTENT, Cory Doctorow @ SiSU}http://www.jus.uio.no/sisu/content.cory_doctorow +{Free as in Freedom (on Richard M. Stallman), Sam Williams @ SiSU}http://www.jus.uio.no/sisu/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams +{Free For All, Peter Wayner @ SiSU}http://www.jus.uio.no/sisu/free_for_all.peter_wayner +{The Cathedral and the Bazaar, Eric S. Raymond @ SiSU }http://www.jus.uio.no/sisu/the_cathedral_and_the_bazaar.eric_s_raymond +{Little Brother, Cory Doctorow @ SiSU}http://www.jus.uio.no/sisu/little_brother.cory_doctorow +{Viral Spiral @ Amazon.com}http://www.amazon.com/Viral-Spiral-Commoners-Digital-Republic/dp/1595583963 +{Viral Spiral @ Barnes & Noble}http://search.barnesandnoble.com/booksearch/isbnInquiry.asp?isbn=1595583963 + +:A~ @title @author + +1~attribution Attribution~# + +To Norman Lear, dear friend and intrepid explorer of the frontiers of democratic practice -# + +1~acknowledgments ACKNOWLEDGMENTS + +In this book, as with any book, dozens of barely visible means of support conspired to help me. It has been hard work, but any author with sufficient honesty and self-awareness realizes the extent to which he or she is a lens that refracts the experiences, insights, and writings of others. It is a pleasure to pay tribute to those who have been helpful to me. + +I am grateful to Larry Lessig, a singular visionary in developing the commons as a new paradigm, for helping to make this book possible. He submitted to several interviews, facilitated my research within the Creative Commons community, and, despite our shared involvements in various projects over the years, scrupulously respected my independence. It is also a pleasure to thank the Rockefeller Foundation for generously helping to cover my research, reporting, and travel expenses. + +I interviewed or consulted with more than one hundred people in the course of writing this book. I want to thank each of them for carving out some time to speak with me and openly sharing their thoughts. The Creative Commons and iCommons staff were particularly helpful in making time for me, pointing me toward useful documents and Web sites and sharing their expertise. I must single out Glenn Otis Brown, Mia Garlick, Joichi Ito, Heather Ford, Tomislav Medak, Ronaldo Lemos, and Hal Abelson for their special assistance. + +Since writing a book resembles parachuting into a forest and then trying to find one’s way out, I was pleased to have many friends who recommended some useful paths to follow. After reading some or all of my manuscript, the following friends and colleagues offered many invaluable suggestions and criticisms: Charles Schweik, Elliot E. Maxwell, John Seely Brown, Emily Levine, Peter Suber, Julie Ristau, Jay Walljasper, Jonathan Rowe, Kathryn Milun, Laurie Racine, and Gigi Sohn. It hardly requires saying that none of these astute readers bears any responsibility for the choices that I ultimately made. + +For the past seven years, the Tomales Bay Institute, recently renamed On the Commons, has nurtured my thinking and commitment to the commons. (On the Commons has no formal affiliation to the Creative Commons world, but it enthusiastically shares its commitments to the commons.) I am grateful to my colleagues Peter Barnes, Harriet Barlow, and Julie Ristau for their unflagging support of my book over the past three years, even when it impinged on my other responsibilities. + +In the early stages of this book, Elaine Pagels was unusually generous in offering her help, and my conversations with Nick Bromell helped pry loose some important insights used in my conclusion. Cherry Alvarado was of extraordinary help to me as she transcribed scores of interviews with unfailing good humor and precision. I also wish to thank Andrew Ryder for resourceful assistance in the early stages of my research. + +I have dedicated this book to my dear friend and mentor Norman Lear. The zeal, imagination, and grace that he brings to the simple imperatives of citizenship have been more instructive and inspirational than he perhaps realizes. He has also been of incalculable support to me in my headstrong explorations of the commons. + +Finally, at the end of the day, when I emerge from my writer’s lair or return from yet another research and reporting trip, it is Ellen and my sons Sam and Tom who indulge my absences, mental and physical, and reacquaint me with the things that matter most. I could not wish for more. David Bollier Amherst, Massachusetts May 1, 2008 + +1~introduction INTRODUCTION + +It started with that great leap forward in human history the Internet, which gave rise to free software in the 1980s and then the World Wide Web in the early 1990s. The shockingly open Internet, fortified by these tools, began empowering a brash new culture of rank amateurs — you and me. And this began to reverse the fierce tide of twentieth-century media. Ordinary people went online, if only to escape the incessant blare of television and radio, the intrusive ads and the narrow spectrum of expression. People started to discover their own voices . . . and their own capabilities . . . and one another. +={free software+2} + +As the commoners began to take charge of their lives, they discovered anew that traditional markets, governments, and laws were often not serving their needs very well. And so some pioneers had the audacity to invent an infrastructure to host new alternatives: free and open-source software. Private licenses to enable sharing and bypass the oppressive complications of copyright law. A crazy quilt of Web applications. And new types of companies that thrive on servicing social communities on open platforms. + +At the dawn of the twenty-first century, the commoners began to make some headway. More people were shifting their attention away from commercial media to homegrown genres — listservs, Web sites, chat rooms, instant messaging, and later, blogs, podcasts, and wikis. A swirling mass of artists, legal scholars, techies, activists, and even scientists and businesses began to create their own online commons. They self-organized themselves into a loosely coordinated movement dedicated to “free culture.” + +The viral spiral was under way. + +Viral spiral? /{Viral}/, a term borrowed from medical science, refers to the way in which new ideas and innovations on the Internet can proliferate with astonishing speed. A video clip, a blog post, an advertisement released on the Internet tumbles into other people’s consciousness in unexpected ways and becomes the raw feedstock for new creativity and culture. This is one reason the Internet is so powerful — it virally propagates creativity. A novel idea that is openly released in the networked environment can often find its way to a distant person or improbable project that can really benefit from it. This recombinative capacity — efficiently coordinated through search engines, Web logs, informal social networks, and other means— radically accelerates the process of innovation. It enlivens democratic culture by hosting egalitarian encounters among strangers and voluntary associations of citizens. Alexis de Tocqueville would be proud. + +The /{spiral}/ of /{viral spiral}/ refers to the way in which the innovation of one Internet cohort rapidly becomes a platform used by later generations to build their own follow-on innovations. It is a corkscrew paradigm of change: /{viral}/ networking feeds an upward /{spiral}/ of innovation. The cutting-edge thread achieves one twist of change, positioning a later thread to leverage another twist, which leverages yet another. Place these spirals in the context of an open Internet, where they can sweep across vast domains of life and catalyze new principles of order and social practice, and you begin to get a sense of the transformative power of viral spirals. + +The term /{viral spiral}/ is apt, additionally, because it suggests a process of change that is anything but clean, direct, and mechanical. In the networked environment, there is rarely a direct cause-andeffect. Things happen in messy, irregular, indeterminate, serendipitous ways. Life on the Internet does not take place on a stable Cartesian grid — orderly, timeless, universal — but on a constantly pulsating, dynamic, and labyrinthine /{web}/ of finely interconnected threads radiating through countless nodes. Here the context is as rich and generative as any individual, /{Viral spiral}/ calls attention to the holistic and historical dynamics of life on the Web, which has a very different metaphysical feel than the world of twentieth-century media. + +The viral spiral began with free software (code that is free to use, not code at no cost) and later produced the Web. Once these open platforms had sufficiently matured, tech wizards realized that software’s great promise is not as a stand-alone tool on PCs, but as a social platform for Web-based sharing and collaboration. The commoners could then begin to imagine: How might these tools be used to overcome the arbitrary and confusing limitations of copyright law? One answer, the Creative Commons (CC) licenses, a free set of public licenses for sharing content, helped mitigate the legal risks of sharing of works under copyright law. This innovation, in turn, helped unleash a massive wave of follow-on innovations. +={free software;Creative Commons (CC) licenses} + +Web 2.0 applications flourished, many of them relying upon sharing made legal through CC licenses. By avoiding the costly overhead of centralized production and marketing, and tapping into the social vitality of a commons, Web 2.0 platforms have enabled ordinary people to share photos (Flickr), favorite browser bookmarks (del.icio.us), favorite news stories (Digg, Reddit), and homemade videos (YouTube). They let people access user-created archives (Wikipedia, Internet Archive, Ourmedia.org), collaborate in news gathering (OhmyNews, Assignment Zero), participate in immersive communities (Second Life), and build open-business models (Magnatune, Revver, Jamendo). +={web 2.0:applications} + +This book seeks to trace the long arc of change wrought by a kaleidoscopic swarm of commoners besieged by oppressive copyright laws, empowered by digital technologies, and possessed of a vision for a more open, democratic society. Their movement has been fired by the rhetoric of freedom and actualized by digital technologies connected by the Internet. These systems have made it extremely cheap and easy for ordinary people to copy and share things, and to collaborate and organize. They have democratized creativity on a global scale, challenging the legitimacy and power of all sorts of centralized, hierarchical institutions. + +This larger story has rarely been told in its larger scope. It is at base a story of visionary individuals determined to protect the shared code, content, and social community that they have collectively generated. Richard Stallman pioneered the development of free software; Lawrence Lessig waged challenges against excessive copyright protection and led the development of the Creative Commons licenses; citizen-archivist Eric Eldred fought to preserve his online body of public-domain literature and the community that grew up around it. These are simply the better-known leaders of a movement that has attracted thousands of commoners who are building legally defensible commons into which to pour their creative energies and live their lives. +={free software;commoners:concept of+5;commons:concept of+1} + +The commons — a hazy concept to many people — is a new paradigm for creating value and organizing a community of shared interest. It is a vehicle by which new sorts of self-organized publics can gather together and exercise new types of citizenship. The commons can even serve as a viable alternative to markets that have grown stodgy, manipulative, and coercive. A commons arises whenever a given community decides that it wishes to manage a resource in a collective manner, with special regard for equitable access, use, and sustainability. The commons is a means by which individuals can band together with like-minded souls and express a sovereignty of their own. + +Self-styled commoners can now be found in dozens of nations around the world. They are locally rooted but internationally aware citizens of the Internet. They don’t just tolerate diversity (ethnic, cultural, aesthetic, intellectual), they celebrate it. Although commoners may have their personal affinities — free software, open-access publishing, remix music, or countless others — they tend to see themselves as part of a larger movement. They share an enthusiasm for innovation and change that burbles up from the bottom, and are known to roll their eyes at the thick-headedness of the mainstream media, which always seem to be a few steps behind. +={free software} + +If there is an element of self-congratulatory elitism at times, it stems from the freedom of commoners to negotiate their own rules and the pleasure of outmaneuvering conventional institutions. The commoners know how to plug into the specialized Web sites and practitioner communities that can provide just-in-time, highly specialized expertise. As Herbert Simon, the computer-oriented social scientist, once put it, “The meaning of ‘knowing’ today has shifted from being able to remember and repeat information to being able to find and use it.” ~{Cited by John Seely Brown, former chief scientist, Xerox Palo Alto Research Center, at Open Educational Resources conference, Houston, Texas, March 29, 2007.}~ Commoners realize that this other way of being, outside hierarchical institutions, in the open space where viral spirals of innovation are free to materialize, is an important source of their insurgent power. +={Herbert, Simon} + +It is perilous to generalize about a movement that has so many disparate parts pushing and pulling and innovating in so many different directions at once. Yet it is safe to say that the commoners— a digital embodiment of /{e pluribus unum}/ — share a common goal. They wish to transcend the limitations of copyright law in order to build their own online communities. It’s not as if the commoners are necessarily hostile to copyright law, markets, or centralized institutions. Indeed, many of them work for large corporations and universities; many rely on copyright to earn a livelihood; many are entrepreneurs. +={commoners:goal} + +Yet the people who are inventing new commons have some deeper aspirations and allegiances. They glimpse the liberating potential of the Internet, and they worry about the totalizing inclinations of large corporations and the state, especially their tendency to standardize and coerce behavior. They object as well to processes that are not transparent. They dislike the impediments to direct access and participation, the limitations of credentialed expertise and arbitrary curbs on people’s freedom. + +One of the first major gatherings of international commoners occurred in June 2006, when several hundred people from fifty nations converged on Rio de Janeiro, Brazil, for the iCommons Summit. The people of this multinational, eclectic vanguard blend the sophistication of the establishment in matters of power and politics with the bravado and playfulness of Beat poets. There were indie musicians who can deconstruct the terms of a record company licensing agreement with Talmudic precision. There were Web designers who understand the political implications of arcane rules made by the World Wide Web Consortium, a technical standards body. The lawyers and law professors who discourse about Section 114 of the Copyright Act are likely to groove on the remix career of Danger Mouse and the appropriationist antics of Negativland, a sound-collage band. James Boyle and Jennifer Jenkins, two law scholars at Duke Law School, even published a superhero comic book, /{Down by Law!}/, which demystifies the vagaries of the “fair use doctrine” through a filmmaker character resembling video game heroine Lara Croft.~{Keith Aoki, James Boyle, Jennifer Jenkins, /{Down by Law!}/ at http://www .duke.edu/cspd/comics.}~ (Fair use is a provision of copyright law that makes it legal to excerpt portions of a copyrighted work for noncommercial, educational, and personal purposes.) +={commoners:gatherings of} + +2~ The Rise of Socially Created Value +={socially created value+5} + +The salience of electronic commerce has, at times, obscured an important fact — that the commons is one of the most potent forces driving innovation in our time. Individuals working with one another via social networks are a growing force in our economy and society. This phenomenon has many manifestations, and goes by many names — “peer production,” “social production,” “smart mobs,” the “wisdom of crowds,” “crowdsourcing,” and “the commons.”~{“Social production” and “peer production” are associated with the work of Yale law professor Yochai Benkler, especially in his 2006 book, /{The Wealth of Networks}/. “Smart mobs” is a coinage of Howard Rheingold, author of a 2003 book by the same name.“Crowdsourcing” is the name of a blog run by Jeff Howe and the title of a June 2006 /{Wired}/ article on the topic.“Wisdom of crowds” is a term coined by James Surowiecki and used as the title of his 2004 book.}~ The basic point is that /{socially created value}/ is increasingly competing with conventional markets, as GNU/Linux has famously shown. Through an open, accessible commons, one can efficiently tap into the “wisdom of the crowd,” nurture experimentation, accelerate innovation, and foster new forms of democratic practice. +={commons:sources of new ideas, as+1} + +This is why so many ordinary people — without necessarily having degrees, institutional affiliations, or wealth — are embarking upon projects that, in big and small ways, are building a new order of culture and commerce. It is an emerging universe of economic, social, and cultural activity animated by self-directed amateurs, citizens, artists, entrepreneurs, and irregulars. + +Hugh McGuire, a Montreal-based writer and Web designer, is one. In 2005, he started LibriVox, a digital library of free public-domain audio books that are read and recorded by volunteers. More than ten thousand people a day visit the Web site to download audio files of Twain, Kafka, Shakespeare, Dostoyevsky, and others, in nearly a dozen languages.~{ http://www.librivox.org. }~ The Faulkes Telescope Project in Australia lets high school students connect with other students, and with professional astronomers, to scan the skies with robotic, online telescopes.~{ http://faulkes-telescope.com. }~ In a similar type of learning commons, the Bugscope project in the United States enables students to operate a scanning electronic microscope in real time, using a simple Web browser on a classroom computer connected to the Internet.~{ http://bugscope.beckman.uiuc.edu. }~ +={Bugscope;LibriVox;McGuire, Hugh;Faulkes Telescope Project} + +Thousands of individual authors, musicians, and filmmakers are using Web tools and Creative Commons licenses to transform markets for creative works — or, more accurately, to blend the market and commons into integrated hybrids. A nonprofit humanitarian group dedicated to doing reconstructive surgery for children in poor countries, Interplast, produced an Oscar-winning film, /{A Story of Healing}/, in 1997. Ten years later, it released the film under a Creative Commons license as a way to publicize Interplast’s work while retaining ownership of the film: a benefit for both film buffs and Interplast.~{ http://www.interplast.org and http://creativecommons.org/press-releases/ 2007/04/%E2%80%9Ca-story-of-healing%E2%80%9D-becomes-first-acad emy-award%C2%AE-winning-film-released-under-a-creative-commons-li cense. }~ +={Interplast} + +Scoopt, a Glasgow, Scotland–based photography agency, acts as a broker to help bloggers and amateurs sell newsworthy photos and videos to the commercial media.~{ http://www.scoopt.com. }~ The Boston band Two Ton Shoe released its music on the Web for free to market its concerts. Out of the blue, a South Korean record label called one day to say it loved the band and could it come over to Seoul, all expenses paid, to perform four concerts? Each one sold out.~{ http://www.twotonshoe.com/news.html. }~ Boing Boing blogger and cyber-activist Cory Doctorow released his 2003 science-fiction novel, /{Down and Out in the Magic Kingdom}/, under a CC license, reaping a whirlwind of worldwide exposure.~{ See Doctorow’s preface to the second release of the book, February 12, 2004, Tor Books. See also his blog Craphound.com, September 9, 2006, at http:// www.craphound.com/?=p=1681. }~ +={Doctorow, Cory;Scoopt} + +2~ The Commoners Build a Digital Republic of Their Own +={commons: achievement of+11} + +The profusion of commons on the Internet may appear to be a spontaneous and natural development. In fact, it is a hard-won achievement. An infrastructure of software, legal rights, practical expertise, and social ethics had to be imagined, built, and defended. In a sense, the commoners had to invent themselves as commoners. They had to learn to recognize their own distinct interests — in how to control their creative works, how to organize their communities, and how to engage with market players without being co-opted. They have, in fact, invented a new sort of democratic polity within the edifice of the conventional nation-state. + +The commoners differ from most of their corporate brethren in their enthusiasm for sharing. They prefer to freely distribute their writing, music, and videos. As a general rule, they don’t like to encase their work in airtight bubbles of property rights reinforced by technological locks. They envision cyberspace more as a peaceable, sociable kingdom than as a take-no-prisoners market. They honor the individual while respecting community norms. They are enthusiastic about sharing while respecting the utility of markets. Idealistic yet pragmatic, they share a commitment to open platforms, social cooperation, and elemental human freedoms. +={commoners:sharing by+1} + +It is all very well to spout such lofty goals. But how to actualize them? That is the story that the following pages recount. It has been the work of a generation, some visionary leaders, and countless individuals to articulate a loosely shared vision, build the infrastructure, and develop the social practices and norms. This project has not been animated by a grand political ideology, but rather is the result of countless initiatives, grand and incremental, of an extended global family of hackers, lawyers, bloggers, artists, and other supporters of free culture. +={commons:political implications of+3} + +And yet, despite its focus on culture and its aversion to conventional politics, the growth of this movement is starting to have political implications. In an influential 2003 essay, James F. Moore announced the arrival of “an emerging second superpower.”~{ James F. Moore, “The Second Superpower Rears its Beautiful Head,” March 31, 2003, available at http://cyber.law.harvard.edu/people/jmoore/second superpower.html. }~ It was not a nation, but the coalescence of people from around the world who were asserting common values, and forming new public identities, via online networks. The people of this emerging “superpower,” Moore said, are concerned with improving the environment, public health, human rights, and social development. He cited as early examples the international campaign to ban land mines and the Seattle protests against the World Trade Organization in 1999. The power and legitimacy of this “second superpower” do not derive from the constitutional framework of a nation-state, but from its ability to capture and project people’s everyday feelings, social values, and creativity onto the world stage. Never in history has the individual had such cheap, unfettered access to global audiences, big and small. +={Moore, James} + +The awakening superpower described in /{Viral Spiral}/ is not a conventional political or ideological movement that focuses on legislation and a clutch of “issues.” While commoners do not dismiss these activities as unimportant, most are focused on the freedom of their peer communities to create, communicate, and share. When defending these freedoms requires wading into conventional politics and law, they are prepared to go there. But otherwise, the commoners are more intent on building a kind of parallel social order, inscribed within the regnant political economy but animated by their own values. Even now, the political/cultural sensibilities of this order are only vaguely understood by governments, politicians, and corporate leaders. The idea of “freedom without anarchy, control without government, consensus without power” — as Lawrence Lessig put it in 1999~{ Lawrence Lessig, /{Code and Other Laws of Cyberspace}/ (New York: Basic Books, 1999), p. 4. }~ —is just too counterintuitive for the conventionally minded to take seriously. +={commoners:sharing by+1;Lessig, Lawrence+2} + +Very early on, the commoners identified copyright law as a major impediment to their vision of a “sharing economy.” It is not that they revile copyright law as such; indeed, many commoners defend the importance of copyright law to creative endeavor. The problem, they insist, is that large corporations with vast inventories of copyrighted works — film studios, record labels, book publishers, software companies — have used their political power unfairly to extend the scope and term of copyright privileges. A limited monopoly granted by the U.S. Constitution has morphed into an expansive, near-perpetual monopoly, enforced by intrusive technologies and draconian penalties. +={copyright law:sharing economy vs.+3} + +The resulting curbs on citizen freedom, as large entertainment and media corporations gain legal privileges at the expense of the public, is a complicated issue that I return to in chapter 2. But it is worth noting briefly why copyright law has been particularly harmful to the commons in the digital age. When Congress enacted a major revision of U.S. copyright law in 1976, it eliminated a longstanding requirement that works had to be formally registered in order to receive copyright protection.~{ The effect of the elimination of formal registration in copyright law is cogently discussed by Lessig in /{Free Culture}/ (New York: Penguin, 2004), pp. 170–73, and pp. 248–53. }~ Under the new law, /{everything}/ became automatically copyrighted upon creation. This meant that all information and artistic work created after 1978 (when the law took effect) has been born into an invisible envelope of property rights. It sounds appealing to eliminate bureaucratic formalities like registration. But the shift to automatic copyright has meant that every digital scribble is born with a © branded on its side. /{Culture = private property}/. +={Copyright Act (1976);copyright law:automatic+1|revision of (1976)+1|and property rights+1;property rights, and copyright law} + +The various industries that rely on copyrights have welcomed this development because it helps them portray their ownership rights as all-encompassing. They can cast the public’s right to use works without permission or payment — traditionally guaranteed under the fair use doctrine and the public domain — as exceptions to the general rule of absolute property rights. “What could be wrong with enclosing works in ever-stronger packages of property rights?” the music and film industries argue. “That’s how new economic wealth is created.” The media oligopolies that control most of television, film, music, and news gathering naturally want to protect their commercial content. It is the fruit of a vast system of fixed investment — equipment, high-priced stars, lawyers, distribution channels, advertising, etc. — and copyright law is an important tool for protecting that value. + +The Internet has profoundly disrupted this model of market production, however. The Internet is a distributed media system of low-cost capital (your personal computer) strung together with inexpensive transmission and software. Instead of being run by a centralized corporation that relies upon professionals and experts above all else, the Internet is a noncommercial infrastructure that empowers amateurs, citizens, and ordinary individuals in all their quirky, authentic variety. The mass media have long regarded people as a commodifiable audience to be sold to advertisers in tidy demographic units. +={Internet:empowerment by+2} + +Now, thanks to the Internet, “the people formerly known as the audience” (in Jay Rosen’s wonderful phrase) are morphing into a differentiated organism of flesh-and-blood, idiosyncratic individuals, as if awakening from a spell. Newly empowered to speak as they wish, in their own distinctive, personal voices to a global public of whoever cares to listen, people are creating their own transnational tribes. They are reclaiming culture from the tyranny of mass-media economics and national boundaries. In Lessig’s words, Internet users are overthrowing the “read only” culture that characterized the “weirdly totalitarian” communications of the twentieth century. In its place they are installing the “read/write” culture that invites everyone to be a creator, as well as a consumer and sharer, of culture.~{ Lawrence Lessig, “The Read-Write Society,” delivered at the Wizards of OS4 conference in Berlin, Germany, on September 5, 2006. Available at http:// www.wizards-of-os.org/programm/panels/authorship_amp_culture/keynote _the_read_write_society/the_read_write_society.html. }~ A new online citizenry is arising, one that regards its socially negotiated rules and norms as at least as legitimate as those established by conventional law. +={Rosen, Jay} + +Two profoundly incommensurate media systems are locked in a struggle for survival or supremacy, depending upon your perspective or, perhaps, mutual accommodation. For the moment, we live in a confusing interregnum — a transition that pits the dwindling power and often desperate strategies of Centralized Media against the callow, experimental vigor of Internet-based media. This much is clear, however: a world organized around centralized control, strict intellectual property rights, and hierarchies of credentialed experts is under siege. A radically different order of society based on open access, decentralized creativity, collaborative intelligence, and cheap and easy sharing is ascendant. Or to put it more precisely, we are stumbling into a strange hybrid order that combines both worlds — mass media and online networks — on terms that have yet to be negotiated. + +2~ The Rise of the Commoners +={commoners:rise of+21} + +But who shall do the negotiating? Who will set forth a compelling alternative to centralized media, and build it? That task has fallen to a loosely coordinated global federation of digital tribes — the free software and open-source hackers, the Wikipedians, the bloggers and citizen-journalists, the remix musicians and filmmakers, the avant-garde artists and political dissidents, the educators and scientists, and many others. It is a spontaneous folk-tech conspiracy that belongs to everyone and no one. + +As we will see in chapter 1, Richard Stallman, the legendary hacker, played an indispensable first-mover role by creating a sovereign domain from which to negotiate with commercial players: free software. The software commons and later digital commons inspired by it owe an incalculable debt to Stallman’s ingenious legal innovation, the General Public License, or GPL, launched in 1989. The GPL is a license for authorizing anyone to use a copyrighted software program so long as any copies or derivative versions are also made available on the same terms. This fairly simple license enables programmers to contribute code to a common pool without fear that someone might privatize and destroy the commons. +={General Public License (GPL)} + +As the computer revolution continued through the 1980s and the Internet went wide in the 1990s, the antisocial, antidemocratic implications of copyright law in networked spaces became more evident. As we will see in chapter 2, a growing community of progressive legal scholars blew the whistle on some nasty developments in copyright law that were shrinking the public’s fair use rights and the public domain. Scholars such as James Boyle, Pamela Samuelson, Jessica Litman, Yochai Benkler, Lawrence Lessig, Jonathan Zittrain, and Peter Jaszi provided invaluable legal analyses about the imperiled democratic polity of cyberspace. +={Lessig, Lawrence+2} + +By the late 1990s, this legal scholarship was in full flower, Internet usage was soaring, and the free software movement produced its first significant free operating system, GNU/Linux. The commoners were ready to take practical action. Lessig, then a professor at Harvard Law School, engineered a major constitutional test case, /{Eldred v. Reno}/ (later /{Eldred v. Ashcroft}/), to try to strike down a twentyyear extension of copyright terms — a case that reached the U.S. Supreme Court in 2002. At the same time, Lessig and a number of his colleagues, including MIT computer scientist Hal Abelson, Duke law professor James Boyle, and Villanova law professor Michael W. Carroll, came together to explore innovative ways to protect the public domain. It was a rare moment in history in which an ad hoc salon of brilliant, civic-minded thinkers from diverse fields of endeavor found one another, gave themselves the freedom to dream big thoughts, and embarked upon practical plans to make them real. +={GNU/Linux;Linux:see also GNU/Linux;Eldred v. Reno/Eldred v. Ashcroft} + +The immediate upshot of their legal and techno ingenuity, as we will see in chapters 3 and 4, was the drafting of the Creative Commons licenses and the organization that would promote them. The purpose of these free, standardized public licenses was, and is, to get beyond the binary choice imposed by copyright law. Why must a work be considered either a chunk of privately owned property or a kind of nonproperty completely open to anyone without constraint (“in the public domain”)? The CC licenses overcome this stifling either/or logic by articulating a new middle ground of ownership that sanctions sharing and collaboration under specified terms. To stress its difference from copyright law, which declares “All Rights Reserved,” the Creative Commons licenses bear the tagline “Some Rights Reserved.” +={Creative Commons (CC) licenses+2|copyright law, and+2;opyright law:CC licenses+2} + +Like free software, the CC licenses paradoxically rely upon copyright law to legally protect the commons. The licenses use the rights of ownership granted by copyright law not to exclude others, but to invite them to share. The licenses recognize authors’ interests in owning and controlling their work — but they also recognize that new creativity owes many social and intergenerational debts. Creativity is not something that emanates solely from the mind of the “romantic author,” as copyright mythology has it; it also derives from artistic communities and previous generations of authors and artists. The CC licenses provide a legal means to allow works to circulate so that people can create something new. /{Share, reuse, and remix, legally}/, as Creative Commons puts it. + +After the licenses were introduced in December 2002, they proliferated throughout the Internet and dozens of nations as if by spontaneous combustion. It turns out that the licenses have been more than a legal fix for the limitations of copyright law. They are a powerful form of social signaling. The licenses have proven to be a flag for commoners to advertise their identities as members of a culturally insurgent sharing economy — an aesthetic/political underground, one might say. Attaching the CC logo to one’s blog, video, MP3 file, or laptop case became a way to proclaim one’s support for free culture. Suddenly, all sorts of participatory projects could be seen as elements of a larger movement. By 2007, authors had applied one or more of six CC licenses to 90 million works, by one conservative estimate, or more than 220 million works by another estimate. Collectively, CC-licensed works constitute a class of cultural works that are “born free” to be legally shared and reused with few impediments. + +A great deal of the Creative Commons story revolves around its founder, the cerebral yet passionate Larry Lessig, a constitutional law professor at Harvard in the mid-1990s until a move to Stanford Law School in 2000. As a scholar with a sophisticated grasp of digital technologies, Lessig was one of the first to recognize that as computers became the infrastructure for society, software code was acquiring the force of law. His 1999 classic, /{Code and Other Laws of Cyberspace}/, is renowned for offering a deep theoretical framework for understanding how politics, law, technology, and social norms shape the character of cyberspace — and in turn, any society. +={Lessig, Lawrence:Code and Other Laws of Cyberspace} + +In popularizing this message, it didn’t hurt that Lessig, an experienced classroom lecturer, is a poised and spellbinding performer. On the tech and copyright circuit, in fact, he has become something of a rock star. With his expansive forehead and wire glasses, Lessig looks every bit the professor he is. Yet in his signature black jeans and sport jacket, delivering punchy one-liners punctuated by arresting visuals projected on a big screen behind him, Lessig makes a powerful impression. He’s a geek-chic techie, intellectual, legal activist, and showman all rolled into one. + +From the beginning, Lessig and his colleagues wondered, How far can the sharing ethic be engineered? Just how far can the idea of free culture extend? As it turns out, quite far. At first, of course, the free culture project was applied mostly to Web-based text and music. But as we see in chapters 5 through 12, the technologies and ethic of free culture have rapidly taken root in many creative sectors of society — video, music, books, science, education — and even business and international arts and culture. + +!{/{Remix culture.}/}! Thanks to digital technologies, musicians can sample verbatim snippets of other musicians’ work in their own works, producing “remixes” that blend sounds from a number of copyrighted songs. It’s all patently illegal, of course, unless you’re wealthy enough to pay for the rights to use a sample. But that hasn’t stopped artists. +={music:remixes+2;remix works+2} + +In fact, the underground remix scene has become so robust that even established artists feel obliged to engage with it to bolster their street cred. With a wink and a nudge from record labels, major rap stars like Jay-Z and Eminem have released instrumental tracks of their records in the hope and expectation that remix /{auteurs}/ will recycle the tracks. Record labels have quietly relied on mixtapes— personalized compilations of tracks — to gain exposure and credibility.~{ See, e.g., Joanna Demers, /{Steal This Music: How Intellectual Property Law Affects Musical Creativity}/ (Athens: University of Georgia Press, 2006); Kelefa Sanneh, “Mixtapes Mix in Marketing,” New York Times, July 20, 2006. }~ To help an illegal social art go legit, many artists are using Creative Commons licenses and public-domain sound clips to build a legal body of remix works. + +In the video world, too, the remix impulse has found expression in its own form of derivative creativity, the mashup. From underground remakes of /{Star Wars}/ films to parodies of celebrities, citizenamateurs are taking original video clips and mixing them with other images, pop music tracks, and their own narrations. When Alaska senator Ted Stevens compared the Internet to a “series of tubes,” video clips of his rambling speech were mashed up and set to a techno dance beat. Beyond this playful subculture, serious filmmakers are using CC licenses on their works to develop innovative distribution systems that attract large audiences and earn money. Machinima animations — a filmmaking technique that uses computer game action sequences, shot with in-game cameras and then edited together — are pioneering a new market niche, in part through their free distribution under a CC license. +={Machinima animations;Stevens, Ted} + +!{/{Open business.}/}! One of the most surprising recent developments has been the rise of “open business” models. Unlike traditional businesses that depend upon proprietary technology or content, a new breed of businesses see lucrative opportunities in exploiting open, participatory networks. The pioneer in this strategy was IBM, which in 2000 embraced GNU/Linux, the open-source computer operating system, as the centerpiece of its service and consulting business.~{ Steve Lohr, “IBM to Give Free Access to 500 Patents, /{New York Times}/, July 11, 2005. See also Steven Weber, /{The Success of Open Source Software}/ (Cambridge, Mass.: Harvard University Press, 2004), pp. 202–3. See also Pamela Samuelson, “IBM’s Pragmatic Embrace of Open Source,” /{Communications of the ACM}/ 49, no. 21 (October 2006). }~ Dozens of small, Internet-based companies are now exploiting open networks to build more flexible, sustainable enterprises. +={GNU/Linux:IBM, and;IBM:GNU/Linux, and;open business models+1} + +The key insight about many open-platform businesses is that they no longer look to copyright or patent law as tools to assert market control. Their goal is not to exclude others, but to amass large communities. Open businesses understand that exclusive property rights can stifle the value creation that comes with mass participation, and so they strive to find ways to “honor the commons” while making money in socially acceptable forms of advertising, subscriptions, or consulting services. The brave new economics of “peer production” is enabling forward-thinking businesses to use social collaboration among thousands, or even millions, of people to create social communities that are the foundation for significant profits. /{BusinessWeek}/ heralded this development in a major cover story in 2005, “The Power of Us,” and called sharing “the net’s next disruption.”~{ Robert D. Hof, “The Power of Us: Mass Collaboration on the Internet Is Shaking Up Business,” /{BusinessWeek}/, June 20, 2005, pp. 73–82. }~ + +!{/{Science}/}! as a commons. The world of scientific research has long depended on open sharing and collaboration. But increasingly, copyrights, patents, and university rules are limiting the flow of scientific knowledge. The resulting gridlock of rights in knowledge is impeding new discoveries and innovation. Because of copyright restrictions and software incompatibilities, scientists studying genetics, proteins, and marine biology often cannot access databases containing vital research. Or they cannot easily share physical samples of lab samples. When the maker of Golden Rice, a vitamin-enhanced bioengineered rice, tried to distribute its seeds to millions of people in poor countries, it first had to get permissions from seventy patent holders and obtain six Material Transfer Agreements (which govern the sharing of biomedical research substances).~{ Interview with John Wilbanks, “Science Commons Makes Sharing Easier,” /{Open Access Now}/, December 20, 2004, available at http://www.biomedcen tral.com/openaccess/archive/?page=features&issue=23.}~ +={Wilbanks, John+1;Science Commons:CC Commons spinoff, and+1} + +The problem of acquiring, organizing, and sharing scientific knowledge is becoming more acute, paradoxically enough, as more scientific disciplines become dependent on computers and the networked sharing of data. To help deal with some of these issues, the Creative Commons in 2005 launched a new project known as the Science Commons to try to redesign the information infrastructure for scientific research. The basic idea is to “break down barriers to sharing that are hindering innovation in the sciences,” says John Wilbanks, executive director of Science Commons. Working with the National Academy of Sciences and other research bodies, Wilbanks is collaborating with astronomers, archaeologists, microbiologists, and medical researchers to develop better ways to make vast scientific literatures more computer-friendly, and databases technically compatible, so that they can be searched, organized, and used more effectively. + +!{/{Open education and learning.}/}! A new class of knowledge commons is poised to join free and open-source software, the Creative Commons and Wikipedia as a coherent social movement. The new groundswell goes by the awkward name “Open Educational Resources,” or OER.~{ See, e.g., Daniel E. Atkins, John Seely Brown, and Allen L. Hammond, “A Review of the Open Educational Resources (OER) Movement: Achievements, Challenges and New Opportunities,” February 2007, available at http://www .oerderves.org/?p=23.}~ One of the earlier pioneers of the movement was the Massachusetts Institute of Technology which has put virtually all of its course materials on the Web, for free, through its OpenCourseWare initiative. The practice has now spread to scores of colleges and universities around the world, and inspired a broader set of OER initiatives: digital repositories for articles, reports, and data; open-access scholarly journals that bypass expensive commercial publishers; and collaborative Web sites for developing teaching materials. There are wikis for students and scholars working together, sites to share multimedia presentations, and much more. +={education:OER movement+1;pen Educational Resources (OER) movement+1;Wikipedia:social movement, as+1;Creative Commons (CC):social movement, as+1} + +The OER movement has particular importance for people who want to learn but don’t have the money or resources — scholars in developing countries, students struggling to pay for their educations, people in remote or rural locations, people with specialized learning needs. OER is based on the proposition that it will not only be cheaper or perhaps free if teachers and students can share their materials through the Web, it will also enable more effective types of learning. So the OER movement is dedicated to making learning tools cheaper and more accessible. The revolutionary idea behind OER is to transform traditional education — teachers imparting information to passive students — into a more learnerdriven process facilitated by teachers. Self-directed, socially driven learning supplants formal, hierarchical modes of teaching. + +!{/{The international sharing economy.}/}! Shortly after the first CC licenses were released in 2002, dozens of exceptionally capable volunteers — from Japan, Finland, Brazil, South Africa, and other countries — came knocking on the door of CC. How can we adapt the American CC licenses to our respective national legal systems? they asked. This unexpected turn prompted the Creative Commons to inaugurate Creative Commons International, based in Berlin, Germany, to supervise the complicated task of “porting” the U.S. licenses to other legal jurisdictions. To date, CC affiliates in fortyseven nations have adapted the U.S. licenses to their legal systems, and another seventeen have porting projects under way. +={Creative Commons International+1} + +The volunteers include avant-garde artists in Croatia, free software programmers in the Netherlands, South Korean judges, Italian law professors, South African musicians, Malaysian citizenjournalists, Bulgarian filmmakers, and Taiwanese songwriters. The passionate international licensing movement has even been embraced by the Brazilian government, which has proclaimed itself the first Free Culture Nation. As usage of the licenses spreads, they are effectively becoming the default international legal structure of the sharing economy. + +2~ A New Type of Emergent Democracy? + +Peter Suber, a leading champion of open-access scholarly publishing, once explained to me why a disparate, rambunctious crowd of commoners spread around the globe might wish to work together to do something about their plight. “People are taking back their culture,” Peter said. “People who have not been served by the current law have quietly endured it until they saw that they didn’t have to.”~{ Interview with Peter Suber, June 28, 2006. }~ The Creative Commons has become both a symbol and a tool for people to reclaim creativity and culture from the mass-media leviathans. The licenses and the organization have become instruments to advance a participatory, sharing economy and culture. +={Suber, Peter} + +How far can it go? Will it significantly affect conventional politics and government? Can it bring market forces and social needs into a more positive alignment? + +This book is about the struggle to imagine this new world and push it as far as it can go. It is, in one sense, a history, but “history” suggests that the story is over and done. The truth is that the commons movement is tremendously robust and expansive right now. The early history about free software, the public domain, and the Creative Commons is simply a necessary foundation for understanding the propulsive logic of what is happening. + +The story told in these pages is not entirely new; it has been told in fragments and through the restless lens of journalism. But it has not been told in its larger conceptual and historical sweep. That’s partly because most of its players are usually seen in isolation from one another, and not put in the context of the larger open-platform revolution. It’s also because the free culture movement, nothwithstanding its vigor, is generally eclipsed by the bigmoney corporate developments that are ostensibly more important. But that is precisely the problem: conventional economics does not understand the actual significance of open platforms and the commons. We need to understand what the online commons represent: a powerful sociotechnological paradigm that is reordering some basic dynamics of creative practice, culture, politics, and everyday life. + +I am no bystander in this story, it must be said, but a commoner who has grappled with the quandaries of copyright law and the public domain for nearly twenty years. In 2001, after co-founding Public Knowledge, a Washington advocacy group to defend the public’s stake in copyright and Internet policies, I went on to write books on the market enclosure of myriad commons and on the absurd expansions of copyright and trademark law. Over the course of this work, I discovered how a commons analysis can help us understand the digital revolution. It can help us see that it is not just about technological innovation, but about social and legal innovations. Reading Elinor Ostrom and Yochai Benkler, in particular — two leading theorists of the commons — I came to realize that social communities, and not just markets, must be recognized as powerful vehicles for creating value. I realized that many basic assumptions about property rights, as embedded in copyright law and neoclassical economics, fail to take account of the generative power of online communities. +={Public Knowledge} + +How then shall we create the commons and protect it? That question lies at the core of this book and the history of the commoners in cyberspace. I am mostly interested in exploring how the Creative Commons has galvanized a variety of interrelated crusades to build a digital republic of, by, and for the commoners. One reason why a small licensing project has grown into a powerful global brand is that, at a time of mass-media dominance and political stalemate, free culture offers an idealistic alternative vision. Something you can do. A movement in which everyone can play some useful role. The free culture movement stands for reclaiming culture by making it yourself and for reviving democracy by starting in your own digital backyard. CC stands for personal authenticity and diversity in a world of stale, mass-marketed product. It stands for good fun and the joys of sharing. + +Put the CC logo on your blog or music CD or video, and you too can belong to a movement that slyly sticks it to Big Media without getting into an ugly brawl. Don’t get mad, the CC community seems to whisper. Just affiliate with a growing virtual nation of creative renegades. Transcend a rigged game by migrating to a commons of your own making. Build therefore your own world, in the manner of Henry David Thoreau — then imagine its embrace by many others. Imagine it radiating into conventional politics with a refreshing ethic of open accountability and earned rewards, a contempt for coercive business practices and governmental abuses, and an insistence upon transparency, participation, and the consent of the governed. You may be an entrepreneur who just wants to build a profitable business, or a scientist who just wants to find better ways to research Huntington’s disease. The commons has some solutions in these areas, too. This big-tent movement is unabashedly ecumenical. + +This is the vision now exploding around the world anyway. The recurring question in its earliest days, and now, remains — How can we build it out? /{Can}/ it be built out? And how far? For the commoners, just asking the question is halfway to answering it. + +:B~ PART I + +:C~ Harbingers of the Sharing Economy + +1~intro_i [Intro] -# + +The rise of the sharing economy had its roots among the renegades living on the periphery of mainstream culture. At the time, they were largely invisible to one another. They had few ways of making common cause and no shared language for even naming the forces that troubled them. It was the 1990s, after all, a time of alluring mercantile fantasies about the limitless possibilities of the laissez-faire “information superhighway.” Even for those who could pierce the mystifications, the new technologies were so new, powerful, and perplexing that it was difficult to understand their full implications. + +The renegades, while sharing a vision of technological progress, were disturbed by many on-the-ground realities. A small network of hackers, for example, was enraged to learn that software was becoming a closed, proprietary product. Companies could prohibit interested individuals from tinkering with their own, legally purchased software. On both creative and political grounds, this development was odious to Richard Stallman, a brilliant programmer who soon hatched a dream of building a protected kingdom of “free software,” the subject of chapter 1. +={software:proprietary;Stallman, Richard} + +Meanwhile, a loose community of legal scholars and tech activists was becoming alarmed by the antisocial, anti-democratic tendencies of copyright law and digital technology. Scholars such as Lawrence Lessig, James Boyle, and Hal Abelson began to realize that copyright law and software code were acquiring unsuspected powers to redesign our political and social order. They also began to understand the ways in which the public domain is not a wasteland, as conventional minds had long supposed, but a highly generative zone of culture. This intellectual journey is described in chapter 2. +={Abelson, Hal;Boyle, James;Lessig, Lawrence+1} + +Finally, it was becoming painfully apparent to yet another amorphous band of renegades — artists, musicians, writers, scientists, educators, citizens — that copyright law and technological controls were artificially restricting their creative freedoms. With scant public attention, the music, film, and publishing industries were using their clout to protect their archaic business models at the expense of innovation and the commons. This onslaught ultimately provoked one exemplary commoner, Eric Eldred, to team up with legal scholar Lawrence Lessig to mount an unprecedented constitutional challenge to copyright law, the focus of chapter 3. + +None of these surges of innovative dissent was well funded or particularly promising. For the most part, they were improvisational experiments undertaken by public-spirited individuals determined to vindicate their visions for a better society. With the benefit of hindsight, we can now see that while many of these initiatives were only partially successful, each was indispensable to the larger, later task of imagining and building a digital republic to secure basic human freedoms, the subject of Part II. + +1~ 1 IN THE BEGINNING WAS FREE SOFTWARE +={Stallman, Richard+34;General Public License (GPL)+1} + +/{Richard Stallman's mythic struggle to protect the commons of code set the viral spiral in motion.}/ + +The struggle to imagine and invent the software commons, which later set in motion a viral spiral now known as free culture, began with Richard Stallman, a brilliant, eccentric MIT computer programmer. Stallman’s history as a hacker and legal innovator has by now become the stuff of legend. As one of the first people to confront the deep tensions between proprietary control and the public domain in software development, Stallman has achieved that rare pinnacle in the high-tech world, the status of celebrity geek. Besides his programming prowess, he is renowned for devising the GNU General Public License, more commonly known as the GPL, an ingenious legal mechanism to protect shared software code. +={free culture+33} + +Stallman — or RMS, as he likes to be called — has become an iconic figure in the history of free culture in part because he showed courageous leadership in protecting the commons well before anyone else realized that there was even a serious problem. He was a lone voice in the wilderness for at least ten years before the Internet became a mass medium, and so has earned enormous credibility as a leader on matters of free culture. He has also been reviled by some as an autocratic zealot with bad manners and strident rhetoric. + +It is perhaps fitting that Stallman could be mistaken for an Old Testament prophet. He is a shaggy, intense, and fiercely stubborn guy. On his Web site, visitors can find a gag photo of him posed as Saint IGNUcius, with his hand raised in mock genuflection and his head encircled by a gold aureole (held in place by two admiring acoyltes). He has been known to deliver lectures barefoot, sleep on the couch in a borrowed office for weeks at a time, and excoriate admirers for using taboo phrases like “intellectual property” and “copyright protection.” Stallman explains that “intellectual property” incorrectly conflates three distinct bodies of law — copyright, patent, and trademark — and emphasizes individual property rights over public rights. “Copyright protection” is misleading, he says, because it implies a positive, necessary act of /{defending}/ something rather than an acquisitive, aggressive act of a monopolist. Stallman considers /{content}/ to be a disparaging word, better replaced by “works of authorship.” He has even made a list of fourteen words that he urges people to avoid because of their politically misleading valences.~{ Joshua Gray, editor, /{Free Software Free Society: Selected Essays of Richard M. Stallman}/ (Boston: GNU Press, 2002), pp. 190–91. }~ +={authorship;intellectual property} + +Even though Stallman frequently speaks to august academic and scientific gatherings, and meets with the heads of state in developing countries, he resembles a defiant hippie, Yet for his visionary role in developing free software and the free software philosophy, Stallman is treated as if he were a head of state . . . which, in a way, he is. His story has irresistible mythological resonances — the hero’s journey through hardship and scorn, later vindicated by triumph and acclaim. But for many, including his most ardent admirers, Stallman’s stubborn idealism can also be supremely maddening. +={free software} + +His first encounter with the creeping ethic of proprietary control, in the late 1970s, is an oft-told part of his story. The Xerox Corporation had donated an experimental laser printer to the MIT Artificial Intelligence Lab, where Stallman was then a graduate student. The printer was constantly jamming, causing frustration and wasting everyone’s time. Stallman wanted to devise a software fix but he discovered that the source code was proprietary. Determined to find out who was responsible and force them to fix it, he tracked down a computer scientist at Carnegie Mellon University who had supposedly written the code — but the professor refused to help him; he had signed a nondisclosure agreement with Xerox prohibiting him from sharing the code. +={Xerox Corporation+1;software:proprietary+1} + +Stallman considered Xerox’s lockup of code a profound moral offense that violated the integrity of the hacker community. (Among practitioners, /{hacker}/ is a term of respect for an ingenious, resourceful programmer, not an accusation of criminality.) Not only did it prevent people from fixing their own equipment and software, the nondisclosure agreement flouted the Golden Rule. It prohibited sharing with one’s neighbor. The proprietary ethic was not just immoral, by Stallman’s lights, but a barrier to developing great software. +={hackers:use of term} + +By the late 1970s, he had developed a breakthrough text editor, Emacs, in collaboration with a large community of programmers. “Everybody and his brother was writing his own collection of redefined screen-editor commands, a command for everything he typically liked to do,” Stallman wrote. “People would pass them around and improve them, making them more powerful and more general. The collections of redefinitions gradually became system programs in their own right.”~{ Sam Williams, /{Free as in Freedom: Richard Stallman’s Crusade for Free Software}/ (Sebastopol, CA: O’Reilly & Associates 2002), pp. 76–88. }~ Emacs was one of the first software projects to demonstrate the feasibility of large-scale software collaboration and the deep well of innovative ideas that it could yield. Emacs enabled programmers to add new features with great ease, and to constantly upgrade and customize the program with the latest improvements. The Emacs experiment demonstrated that /{sharing}/ and /{interoperability}/ are vital principles for a flourishing online commons. +={Emacs+2;Stallman, Richard:Emacs, and+2} + +Two problems quickly emerged, however. If people did not communicate their innovations back to the group, divergent streams of incompatible code would produce a Tower of Babel effect. Second, if the code and its derivations were not shared with everyone, the usefulness of the program would slowly decline. The flow of innovation would dissipate. + +To solve these problems, Stallman invented a user contract that he called the “Emacs Commune.” It declared to all users that Emacs was “distributed on a basis of communal sharing, which means that all improvements must be given back to me to be incorporated and distributed.” He enforced the provisions of the contract with an iron hand. As Stallman biographer Sam Williams writes, when the administrators for the MIT Laboratory for Computer Science instituted a new password system — which Stallman considered an antisocial power grab — he “initiated a software ‘strike,’ refusing to send lab members the latest version of Emacs until they rejected the security system on the lab’s computers. The move did little to improve Stallman’s growing reputation as an extremist, but it got the point across: commune members were expected to speak up for basic hacker values.” +={Williams, Sam} + +Stallman was groping for a way to sustain the hacker ethic of community and sharing in the face of new types of top-down control. Some programmers were beginning to install code that would turn off access to a program unless money was paid. Others were copyrighting programs that had been developed by the community of programmers. Bill Gates, as an undergraduate at Harvard in the late 1970s, was nearly expelled for using publicly funded labs to create commercial software. He was forced to put his code into the public domain, whereupon he left the university to found an obscure Albuquerque company called Micro-Soft. +={authorship:community access;Gates, Bill;hackers:use of term+5} + +Software was simply becoming too lucrative for it to remain a shared resource — an attitude that enraged Stallman. He was determined to preserve the integrity of what we would now call the software commons. It was an immense challenge because copyright law makes no provisions for community ownership of creative work beyond “joint authorship” among named individuals. Stallman wanted to devise a way to ensure that all the talent and innovation created by commoners would /{stay}/ in the commons. The idea that an outsider — a university administrator, software entrepreneur, or large company — could intrude upon a hacker community and take its work was an appalling injustice to Stallman. +={authorship:joint;copyright law:community authorship vs.;commons:copyright law vs.} + +Yet this was precisely what was happening to the hacker community at MIT’s AI Lab in the early 1980s. It was slowly disintegrating as one programmer after another trooped off to join commercial software ventures; the software itself was becoming annexed into the marketplace. Software for personal computers, which was just then appearing on the market, was sold as a proprietary product. This meant that the source code — the deep design architecture of the program that operated everything — was inaccessible.~{ Steven Levy, /{Hackers: Heroes of the Computer Revolution}/ (New York: Delta, 1993), pp. 425, 427. }~ Perhaps most disturbing to Stallman at the time was that the leading mainframe operating system, Unix, was locking up its source code. Unix had been developed by AT&T with generous federal funding, and had been generally available for free within academic computing circles. At the time, most mainframe software was given away to encourage buyers to purchase the computer hardware. But when the Department of Justice broke up AT&T in 1984 to spur competition, it also enabled AT&T to enter other lines of business. Naturally, the company was eager to maximize its profits, so in 1985 it began to charge a licensing fee for Unix. +={AT&T;Unix;software:source code for} + +Stallman grieved at the disintegration of the hacker community at the AI Lab as closed software programs inexorably became the norm. As he wrote at the time: + +_1 The people remaining at the lab were the professors, students, and non-hacker researchers, who did not know how to maintain the system, or the hardware, or want to know. Machines began to break and never be fixed; sometimes they just got thrown out. Needed changes in software could not be made. The non-hackers reacted to this by turning to commercial systems, bringing with them fascism and license agreements. I used to wander through the lab, through the rooms so empty at night where they used to be full, and think, “Oh my poor AI lab! You are dying and I can’t save you.” + +Stallman compared himself to Ishi, “the last survivor of a dead [Native American] culture. And I don’t really belong in the world anymore. And in some ways I feel I ought to be dead.” + +Stallman decided to leave MIT — why stay? — but with a brash plan: to develop a free software operating system that would be compatible with Unix. It would be his brave, determined effort to preserve the hacker ethic. He dubbed his initiative the GNU Project, with “GNU” standing for “GNU’s Not Unix” — a recursive hacker’s pun. He also started, in 1985, the Free Software Foundation to help develop GNU software projects and distribute them for free to anyone. (The foundation now occupies a fifth-floor office on a narrow commercial street in downtown Boston.) +={Unix;free software;Free Software Foundation;GNU Project+3;Stallman, Richard:free software, and|GNU Project, and+3} + +The Emacs Commune experience had taught Stallman about the limits of informal social norms in protecting the software commons. It also revealed the difficulties of being the central coordinator of all code changes. This time, in developing a set of software programs for his GNU Project, Stallman came up with a better idea — a legally enforceable license. The goal was to ensure that people could have free access to all derivative works and share and reuse software. The licensing rights were based on the rights of ownership conferred by copyright law. +={Emacs;Stallman, Richard:Emacs, and;authorship:community access;copyright law:community authorship vs.+2|licensing rights+2} + +Stallman called his license the GNU General Public License, or GPL. He puckishly referred to it as “copyleft,” and illustrated it with a reverse copyright symbol (a backward c in a circle). Just as programmers pride themselves on coming up with ingenious hacks to solve a software problem, so the GPL is regarded as a world-class hack around copyright law. Copyright law has no provisions for protecting works developed by a large community of creators. Nor does it offer a way to prevent works from being made proprietary. Indeed, that’s the point of copyright law — to create private property rights. +={software:proprietary;copyright law:property rights, and;General Public License (GPL)+14;property rights, and copyright law;Stallman, Richard:GPL, and+14} + +The GPL bypasses these structural limitations of copyright law by carving out a new zone of collective ownership. A work licensed under the GPL permits users to run any program, copy it, modify it, and distribute it in any modified form. The only limitation is that any derivative work must also be licensed under the GPL. This provision of the GPL means that the license is /{automatically}/ applied to any derivative work, and to any derivative of a derivative, and so on — hence its viral nature.~[* Stallman told me he considers it “a common calumny to compare the GNU GPL to a virus. That is not only insulting (I have a virus infection in my throat right now and it is no fun), it is also inaccurate, because the GPL does not spread like a virus. It spreads like a spider plant: if you cut off a piece and plant it over here, it grows over here.]~ The GPL ensures that the value created by a given group of commoners shall stay within the commons. To guarantee the viral power of the license, users of GPL’d works cannot modify the licensing terms. No one has to pay to use a GPL’d work — but as a condition for using it, people are legally obliged to license any derivative versions under the GPL. In this way, a GPL’d work is born and forever protected as “shareable.” +={collective ownership;General Public License (GPL):viral nature of;software:user freedoms;Stallman, Richard:freedom, and} + +Version 1.0 of the GPL was first published in 1989. It was significant, writes Sam Williams, because it “demonstrated the intellectual similarity between legal code and software code. Implicit within the GPL’s preamble was a profound message: instead of viewing copyright law with suspicion, hackers should view it as yet another system begging to be hacked.”~{ Williams, /{Free as in Freedom}/, p. 127. }~ The GPL also served to articulate, as a matter of law, the value of collaborative work. A universe of code that might previously have been regarded as part of the “public domain” — subject to free and unrestricted access — could now be seen in a subtly different light. +={authorship:community access;Williams, Sam;public domain: GPL, and} + + +A GPL’d work is not part of the public domain, because the public domain has no rules constraining how a work may be used. Works in the public domain are open to anyone. The GPL is similar, but with one very important restriction: no private appropriation is allowed. Any follow-on uses must remain free for others to use (a provision that some property rights libertarians regard as “coercive”). Works in the public domain, by contrast, are vulnerable to privatization because someone need only add a smidgen of “originality” to the work and she would own a copyright in the resulting work. A GPL’d work and its derivatives stay free forever — because anyone who tries to privatize a GPL’d work is infringing on the license. + +For Stallman, the GPL became the symbol and tool for enacting his distinct political vision of “freedom.” The license rests on four kinds of freedoms for users of software (which he lists using computer protocols): +={software:user freedoms+4;Stallman, Richard:freedom, and+5} + +_1 Freedom 0: The freedom to run the program for any purpose; + +_1 Freedom 1: The freedom to study how the program works, and to adapt it to your needs. (Access to the source code is a precondition for this); +={authorship:community access} + +_1 Freedom 2: The freedom to redistribute copies so you can help your neighbor; and + +_1 Freedom 3: The freedom to improve the program, and release your improvements to the public, so that the whole community benefits. (Access to the source code is a precondition for this.) +={authorship:community access} + +Stallman has become an evangelist for the idea of freedom embodied in all the GNU programs. He refuses to use any software programs that are not “free,” and he has refused to allow his appearances to be Webcast if the software being used was not “free.” “If I am to be an honest advocate for free software,” said Stallman, “I can hardly go around giving speeches, then put pressure on people to use nonfree software. I’d be undermining my own cause. And if I don’t show that I take my principles seriously, I can’t expect anybody else to take them seriously.”~{ Stallman at MIT forum, “Copyright and Globalization in the Age of Computer Networks,” April 19, 2001, available at http://media-in-transition.mit .edu/forums/copyright/transcript.html. }~ +={Stallman, Richard:free software, and+2} + +Stallman has no problems with people making money off software. He just wants to guarantee that a person can legally use, copy, modify, and distribute the source code. There is thus an important distinction between software that is commercial (possibly free) and software that is proprietary (never free). Stallman tries to explain the distinction in a catchphrase that has become something of a mantra in free software circles: /{“free as in ‘free speech,’ not as in ‘free beer.’”}/ The point is that code must be freely accessible, not that it should be free of charge. (This is why “freeware” is not the same as free software. Freeware may be free of charge, but it does not necessarily make its source code accessible.) +={freeware vs. free software;software:proprietary|source code for} + +Eben Moglen, a professor of law at Columbia University and general counsel for the Free Software Foundation since 1994, calls the provisions of the GPL “elegant and simple. They respond to the proposition that when the marginal cost of goods is zero, any nonzero cost of barbed wire is too high. That’s a fact about the twentyfirst century, and everybody had better get used to it. Yet as you know, there are enormous cultural enterprises profoundly committed to the proposition that more and more barbed wire is necessary. And their basic strategy is to get that barbed wire paid for by the public everywhere.”~{ Eben Moglen, “Freeing the Mind: Free Software and the Death of Proprietary Culture,” June 29, 2003, available at http://emoglen.law/columbia.edu/publi cations/maine-speech.html. }~ +={Moglen, Eben;Free Software Foundation} + +The GPL truly was something new under the sun: a legally enforceable tool to vouchsafe a commons of software code. The license is based on copyright law yet it cleverly turns copyright law against itself, limiting its reach and carving out a legally protected zone to build and protect the public domain. In the larger scheme of things, the GPL was an outgrowth of the “gift economy” ethic that has governed academic life for centuries and computer science for decades. What made the GPL different from these (abridgeable) social norms was its legal enforceability. +={gift economy;Internet:gift economy of+1;General Public License (GPL):legal enforceability of} + +The GPL might well have remained an interesting but arcane curiosity of the software world but for two related developments: the rise of the Internet in the 1990s and software’s growing role as core infrastructure in modern society. As the computer and Internet revolutions have transformed countless aspects of daily life, it has become evident that software is not just another product. Its design architecture is seminally important to our civic freedoms and democratic culture. Or as Lawrence Lessig famously put it in his 1999 book /{Code}/, “code is law.” Software can affect how a business can function, how information is organized and presented, and how individuals can think, connect with one another, and collaborate. Code invisibly structures people’s relationships, and thus serves as a kind of digital constitutional order. As an economic force, software has become as critical as steel or transportation in previous eras: a building block for the basic activities of the economy, businesses, households, and personal life. +={Lessig, Lawrence:Code and Other Laws of Cyberspace;code:as law;free culture:Internet, of the+2;law:code as;Internet:rise of;software:core infrastructure, as} + +Stallman’s atavistic zeal to preserve the hacker community, embodied in the GPL, did not immediately inspire others. In fact, most of the tech world was focused on how to convert software into a marketable product. Initially, the GPL functioned like a spore lying dormant, waiting until a more hospitable climate could activate its full potential. Outside of the tech world, few people knew about the GPL, or cared.~[* The GPL is not the only software license around, of course, although it was, and remains, the most demanding in terms of protecting the commons of code. Other popular open-source licenses include the MIT, BSD, and Apache licenses, but each of these permit, but do not require, that the source code of derivative works also be freely available. The GPL, however, became the license used for Linux, a quirk of history that has had far-reaching implications.]~ And even most techies were oblivious to the political implications of free software. +={hackers:community of} + +Working under the banner of the Free Software Foundation, Stallman continued through the 1980s and 1990s to write a wide number of programs needed to build a completely free operating system. But just as Lennon’s music was better after finding McCartney, Stallman’s free software needed to find Linus Torvalds’s kernel for a Unix-like operating system. (A kernel is the core element of an operating system that controls how the various applications and utilities that comprise the system will run.) +={Free Software Foundation;Torvalds, Linus+1;Unix+1} + +In 1991, Torvalds was a twenty-one-year-old computer science student at the University of Helsinki, in Finland. Frustrated by the expense and complexity of Unix, and its inability to work on personal computers, Torvalds set out to build a Unix-like operating system on his IBM AT, which had a 33-megahertz processor and four megabytes of memory. Torvalds released a primitive version of his program to an online newsgroup and was astonished when a hundred hackers responded within a few months to offer suggestions and additions. Over the next few years, hundreds of additional programmers joined the project, which he named “Linux” by combining his first name, “Linus,” with “Unix.” The first official release of his program came in 1994.~{ One useful history of Torvalds and Linux is Glyn Moody, /{Rebel Code: Inside Linux and the Open Source Revolution}/ (Cambridge, MA: Perseus, 2001). }~ +={hackers:community of+3;Linux:development of+7} + +The Linux kernel, when combined with the GNU programs developed by Stallman and his free software colleagues, constituted a complete computer operating system — an astonishing and unexpected achievement. Even wizened computer scientists could hardly believe that something as complex as an operating system could be developed by thousands of strangers dispersed around the globe, cooperating via the Internet. Everyone assumed that a software program had to be organized by a fairly small group of leaders actively supervising the work of subordinates through a hierarchical authority system — that is, by a single corporation. Yet here was a virtual community of hackers, with no payroll or corporate structure, coming together in a loose, voluntary, quasi-egalitarian way, led by leaders who had earned the trust and respect of some highly talented programmers. + +The real innovation of Linux, writes Eric S. Raymond, a leading analyst of the technology, was “not technical, but sociological”: +={Linux:sociological effect of+1} + +_1 Linux was rather casually hacked on by huge numbers of volunteers coordinating only through the Internet. Quality was maintained not by rigid standards or autocracy but by the naively simple strategy of releasing every week and getting feedback from hundreds of users within days, creating a sort of rapid Darwinian selection on the mutations introduced by developers. To the amazement of almost everyone, this worked quite well.~{ Eric S. Raymond, “A Brief History of Hackerdom,” http://www.catb.org/ ~est/writings/cathedral-bazaar/hacker-history/ar01s06.html.}~ + +The Free Software Foundation had a nominal project to develop a kernel, but it was not progressing very quickly. The Linux kernel, while primitive, “was running and ready for experimentation,” writes Steven Weber in his book /{The Success of Open Source}/: “Its crude functionality was interesting enough to make people believe that it could, with work, evolve into something important. That promise was critical and drove the broader development process from early on.”~{ Steven Weber, /{The Success of Open Source}/ (Cambridge, MA: Harvard University Press, 2004), p. 100. }~ +={Weber, Steven:The Success of Open Source;Free Software Foundation} + +There were other powerful forces driving the development of Linux. Throughout the 1990s, Microsoft continued to leverage its monopoly grip over the operating system of personal computers, eventually attracting the attention of the U.S. Department of Justice, which filed an antitrust lawsuit against the company. Software competitors such as Hewlett-Packard, Sun Microsystems, and IBM found that rallying behind an open-source alternative — one that was legally protected against being taken private by anyone else— offered a terrific way to compete against Microsoft. +={Hewlett-Packard;IBM:open source, and;Microsoft:antitrust lawsuit against|competition against+2} + +Meanwhile, the once-free Unix software program was becoming a fragmented mess. So many different versions of Unix were being sold that users were frustrated by the proliferation of incompatible proprietary versions. In the words of a Sun Microsystems executive at the time, users were unhappy with the “duplication of effort around different implementations, leading to high prices; poor compatibility; and worst of all, slower development as each separate Unix vendor had to solve the same kinds of problems independently. Unix has become stagnant. . . .”~{ Williams, /{Free as in Freedom}/, p. 100.}~ +={Unix+1;Sun Microsystems} + +Given these problems, there was great appeal in a Unix-like operating system with freely available source code. Linux helped address the fragmentation of Unix implementations and the difficulties of competing against the Microsoft monopoly. Knowing that Linux was GPL’d, hackers, academics, and software companies could all contribute to its development without fear that someone might take it private, squander their contributions, or use it in hostile ways. A commons of software code offered a highly pragmatic solution to a market dysfunction. +={General Public License (GPL):Linux, and;GNU/Linux+5;Linux:GPL, and} + +Stallman’s GNU Project and Torvalds’s Linux software were clearly synergistic, but they represented very different styles. The GNU Project was a slower, more centrally run project compared to the “release early and often” developmental approach used by the Linux community. In addition, Stallman and Torvalds had temperamental and leadership differences. Stallman has tended to be more overbearing and directive than Torvalds, who does not bring a political analysis to the table and is said to be more tolerant of diverse talents.~{ Torvalds included a brief essay, “Linux kernel management style,” dated October 10, 2004, in the files of the Linux source code, with the annotation, “Wisdom passed down the ages on clay tablets.” It was included as an epilogue in the book /{Open Life: The Philosophy of Open Source}/, by Henrik Ingo, and is available at http://www.openlife.cc/node/43. }~ +={Torvalds, Linus;GNU Project;Stallman, Richard:GNU Project, and} + +So despite their natural affinities, the Free Software Community and the Linux community never found their way to a grand merger. Stallman has applauded Linux’s success, but he has also resented the eclipse of GNU programs used in the operating system by the Linux name. This prompted Stallman to rechristen the program “GNU/Linux,” a formulation that many people now choose to honor. + +Yet many hackers, annoyed at Stallman’s political crusades and crusty personal style, committed their own linguistic raid by renaming “free software” as “open source software,” with a twist. As GNU/Linux became more widely used in the 1990s, and more corporations began to seriously consider using it, the word /{free}/ in “free software” was increasingly seen as a problem. The “free as in free speech, not as in free beer” slogan never quite dispelled popular misconceptions about the intended sense of the word /{free}/. Corporate information technology (IT) managers were highly wary about putting mission-critical corporate systems in the hands of software that could be had for /{free}/. Imagine telling the boss that you put the company’s fate in the hands of a program you downloaded from the Internet for free! +={free software:open source software, as+6;software:open source+6;free software:uses of term+6} + +Many corporate executives clearly recognized the practical value of free software; they just had no interest in joining Stallman’s ideological crusade or being publicly associated with him. They did not necessarily want to become champions of the “four freedoms” or the political vision implicit in free software. They simply wanted code that works well. As Eric Raymond wrote: “It seemed clear to us in retrospect that the term ‘free software’ had done our movement tremendous damage over the years. Part of this stemmed from the well-known ‘free speech/free beer’ ambiguity. Most of it came from something worse — the strong association of the term ‘free software’ with hostility to intellectual property rights, communism, and other ideas hardly likely to endear themselves to an MIS [management information systems] manager.”~{ Eric S. Raymond, “The Revenge of the Hackers,” in Chris DiBona, Sam Ockman, and Mark Stone, eds., /{Open Sources: Voices from the Open Source Revolution}/ (Sebastopol, CA: O’Reilly & Associates, 1999), p. 212. }~ +={Stallman, Richard:free software, and+5;Raymond, Eric S.} + +One response to this issue was the rebranding of free software as “open-source” software. A number of leading free software programmers, most notably Bruce Perens, launched an initiative to set forth a consensus definition of software that would be called “opensource.” At the time, Perens was deeply involved with a community of hackers in developing a version of Linux known as the Debian GNU/Linux distribution. Perens and other leading hackers not only wanted to shed the off-putting political dimensions of “free software,” they wanted to help people deal with the confusing proliferation of licenses. A lot of software claimed to be free, but who could really tell what that meant when the terms were so complicated and legalistic? +={Debian GNU/Linux distribution;Perens, Bruce} + +The Open Source Initiative, begun in 1998, helped solve this problem by enumerating criteria that it considered significant in judging a program to be “open.”~{ http://www.opensource.org. }~ Its criteria, drawn from the Debian community, helped standardize and stabilize the definition of open-source software. Unlike the GPL, permissive software licenses such as BSD and MIT /{allow}/ a program to be freely copied, modified, and distributed but don’t /{require}/ it. A programmer can choose to make a proprietary derivative without violating the license. +={Open Source Initiative+2} + +The Open Source Initiative has focused more on the practical, technical merits of software than on the moral or political concerns that have consumed Stallman. Free software, as Stallman conceived it, is about building a cohesive moral community of programmers dedicated to “freedom.” The backers of open-source software are not necessarily hostile to those ideals but are more interested in building reliable, marketable software and improving business performance. As Elliot Maxwell described the free software/open source schism: +={Maxwell, Elliot+1} + +_1 [S]upporters of the Open Source Initiative were willing to acknowledge a role for proprietary software and unwilling to ban any link between open-source software and proprietary software. Richard Stallman aptly characterized the differences: “We disagree on the basic principles but agree more or less on the practical recommendations. So we can and do work together on many specific projects.”~{ Elliot Maxwell, citing Wikipedia entry on “Open Source Movement,” in “Open Standards Open Source and Open Innovation,” in /{Innovations: Technology, Governance, Globalization}/ 1, no. 3 (Summer 2006), p. 134, note 56. }~ + +The philosophical rift between free software and open-source software amounts to a “friendly schism,” a set of divergent approaches that has been bridged in some respects by language.~{ Richard Stallman has outlined his problems with the “open source” definition of software development in an essay, “Why ‘Open Source’ Misses the Point of Free Software,” http://www.gnu.org/philosophy/open-source-misses-thepoint.html. }~ Observers often use the acronym FOSS to refer to both free software and open-source software, or sometimes FLOSS — the L stands for the French word /{libre}/, which avoids the double meaning of the English word /{free}/. Whatever term is used, free and open-source software has become a critical tool for making online marketplaces more competitive, and for creating open, accessible spaces for experimentation. In his classic essay, “The Cathedral and the Bazaar,” Eric Raymond explains how the licenses help elicit important noneconomic, personal energies: +={FOSS/FLOSS+3;free software:FOSS/FLOSS+3;Raymond, Eric S.+1;Linux:sociological effect of+1} + + +_1 The Linux world behaves in many respects like a free market or an ecology, a collection of selfish agents attempting to maximize utility which in the process produces a selfcorrecting spontaneous order more elaborate and efficient than any amount of central planning could have achieved. . . . The utility function Linux hackers are maximizing is not classically economic, but is the intangible of their own ego satisfaction and reputation among other hackers.~{ Eric Raymond, “The Cathedral and the Bazaar,” available at http://www .catb.org/~esr/writings/cathedral-bazaar/cathedral-bazaar/ar01s11.html.}~ + +It turns out that an accessible collaborative process, FOSS, can elicit passions and creativity that entrenched markets often cannot. In this respect, FOSS is more than a type of freely usable software; it reunites two vectors of human behavior that economists have long considered separate, and points to the need for new, more integrated theories of economic and social behavior. +={free software:economic effects of+1} + +FOSS represents a new breed of “social production,” one that draws upon social energies that neoclassical economists have long discounted or ignored. It mobilizes the personal passions and moral idealism of individuals, going beyond the overt economic incentives that economists consider indispensable to wealth creation. The eighteenth-century economist Adam Smith would be pleased. He realized, in his 1776 book /{The Wealth of Nations}/, that people are naturally given to “truck, barter and exchange” — but he also recognized, in his earlier /{The Theory of Moral Sentiments}/, written in 1759, that people are motivated by deep impulses of human sympathy and morality. Neoclassical economists have long segregated these as two divergent classes of human behavior, regarding altruism and social sympathies as subordinate to the rational, utility-maximizing, selfserving behavior. FOSS embodies a new synthesis — and a challenge to economists to rethink their crude model of human behavior, /{Homo economicus}/. Free software may have started as mere software, but it has become an existence proof that individual and collective goals, and the marketplace and the commons, are not such distinct arenas.~{ I am grateful to Nicholas Gruen for this insight, taken from his essay “Geeks Bearing Gifts: Open Source Software and Its Enemies,” in /{Policy}/ 21, no. 2 (Winter 2005), pp. 39–48. }~ They are tightly intertwined, but in ways we do not fully understand. This is a golden thread that will reappear in later chapters. +={Smith, Adam:The Theory of Moral Sentiments;social production+6} + +Red Hat, a company founded in 1993 by Robert Young, was the first to recognize the potential of selling a custom version (or “distribution”) of GNU/Linux as a branded product, along with technical support. A few years later, IBM became one of the first large corporations to recognize the social realities of GNU/Linux and its larger strategic and competitive implications in the networked environment. In 1998 IBM presciently saw that the new software development ecosystem was becoming far too variegated and robust for any single company to dominate. It understood that its proprietary mainframe software could not dominate the burgeoning, diversified Internet-driven marketplace, and so the company adopted the open-source Apache Web server program in its new line of WebSphere business software. +={Red Hat;Young, Robert;GNU/Linux:IBM, and+1|Red Hat, and;IBM:GNU/Linux, and+1;Apache Web server;open source software:functions of+2} + +It was a daring move that began to bring the corporate and open-source worlds closer together. Two years later, in 2000, IBM announced that it would spend $1 billion to help develop GNU/Linux for its customer base. IBM shrewdly realized that its customers wanted to slash costs, overcome system incompatibilities, and avoid expensive technology “lock-ins” to single vendors. GNU/Linux filled this need well. IBM also realized that GNU/Linux could help it compete against Microsoft. By assigning its property rights to the commons, IBM could eliminate expensive property rights litigation, entice other companies to help it improve the code (they could be confident that IBM could not take the code private), and unleash a worldwide torrent of creative energy focused on GNU/Linux. Way ahead of the curve, IBM decided to reposition itself for the emerging networked marketplace by making money through tech service and support, rather than through proprietary software alone.~{ Andrew Leonard, “How Big Blue Fell for Linux,” Salon.com, September 12, 2000, available at http://www.salon.com/tech/fsp/2000/09/12/chapter_7_ part_one.print.html. The competitive logic behind IBM’s moves are explored in Pamela Samuelson, “IBM’s Pragmatic Embrace of Open Source,” /{Communications of the ACM}/ 49, no. 21 (October 2006), and Robert P. Merges, “A New Dynamism in the Public Domain,” /{University of Chicago Law Review}/ 71, no. 183 (Winter 2004). }~ +={Microsoft:competition against} + +It was not long before other large tech companies realized the benefits of going open source. Amazon and eBay both saw that they could not affordably expand their large computer infrastructures without converting to GNU/Linux. GNU/Linux is now used in everything from Motorola cell phones to NASA supercomputers to laptop computers. In 2005, /{BusinessWeek}/ magazine wrote, “Linux may bring about the greatest power shift in the computer industry since the birth of the PC, because it lets companies replace expensive proprietary systems with cheap commodity servers.”~{ Steve Hamm, “Linux Inc.,” /{BusinessWeek}/, January 31, 2005. }~ As many as one-third of the programmers working on open-source projects are corporate employees, according to a 2002 survey.~{ Cited by Elliot Maxwell in “Open Standards Open Source and Open Innovation,” note 80, Berlecon Research, /{Free/Libre Open Source Software: Survey and Study — Firms’ Open Source Activities: Motivations and Policy Implications}/, FLOSS Final Report, Part 2, at www.berlecon.de/studien/downloads/200207FLOSS _Activities.pdf. }~ +={Amazon;eBay} + +With faster computing speeds and cost savings of 50 percent or more on hardware and 20 percent on software, GNU/Linux has demonstrated the value proposition of the commons. Open source demonstrated that it can be cheaper and more efficacious to collaborate in the production of a shared resource based on common standards than to strictly buy and own it as private property. +={commons:value proposition of;value:creation of+1:see also Great Value Shift} + +But how does open source work without a conventional market apparatus? The past few years have seen a proliferation of sociological and economic theories about how open-source communities create value. One formulation, by Rishab Ghosh, compares free software development to a “cooking pot,” in which you can give a little to the pot yet take a lot — with no one else being the poorer. “Value” is not measured economically at the point of transaction, as in a market, but in the nonmonetary /{flow}/ of value that a project elicits (via volunteers) and generates (through shared software).~{ Rishab Aiyer Ghosh, “Cooking Pot Markets and Balanced Value Flows,” in Rishab Aiyer Ghosh, ed., /{CODE: Collaborative Ownership and the Digital Economy}/ (Cambridge, MA: MIT Press, 2005), pp. 153–68. }~ Another important formulation, which we will revisit later, comes from Harvard law professor Yochai Benkler, who has written that the Internet makes it cheap and easy to access expertise anywhere on the network, rendering conventional forms of corporate organization costly and cumbersome for many functions. Communities based on social trust and reciprocity are capable of mobilizing creativity and commitment in ways that market incentives often cannot — and this can have profound economic implications.~{ See, e.g., Benkler, “Coase’s Penguin, or Linux and the Nature of the Firm,” /{Yale Law Journal}/ 112, no. 369 (2002); Benkler, “ ‘Sharing Nicely’: On Shareable Goods and the Emergence of Sharing as a Modality of Economic Production,” Yale Law Journal 114, no. 273 (2004).}~ Benkler’s analysis helps explain how a global corps of volunteers could create an operating system that, in many respects, outperforms software created by a well-paid army of Microsoft employees. +={Benkler, Yochai:open networks, on;FOSS/FLOSS;free software:FOSS/FLOSS;Ghosh, Rishab;open source software:economic implications of|uses of term+4} + +A funny thing happened to free and open-source software as it matured. It became hip. It acquired a cultural cachet that extends well beyond the cloistered precincts of computing. “Open source” has become a universal signifier for any activity that is participatory, collaborative, democratic, and accountable. Innovators within filmmaking, politics, education, biological research, and drug development, among other fields, have embraced the term to describe their own attempts to transform hidebound, hierarchical systems into open, accessible, and distributed meritocracies. Open source has become so much of a cultural meme — a self-replicating symbol and idea — that when the Bikram yoga franchise sought to shut down unlicensed uses of its yoga techniques, dissident yoga teachers organized themselves into a nonprofit that they called Open Source Yoga Unity. To tweak the supremacy of Coca-Cola and Pepsi, culture jammers even developed nonproprietary recipes for a cola drink and beer called “open source cola” and “open source beer.”~{ Open Source Yoga Unity, http://www.yogaunity.org; open-source cola, http://alfredo.octavio.net/soft_drink_formula.pdf; open-source beer, Vores OI (Danish for “Our Beer”), http://en.wikipedia.org/wiki/Vores_%C3%981 . See also http://freebeer.org/blog and http://www.project21.ch/freebeer. }~ +={free software:uses of term+5} + +Stallman’s radical acts of dissent in the 1980s, regarded with bemusement and incredulity at the time, have become, twenty-five years later, a widely embraced ideal. Small-/{d}/ democrats everywhere invoke open source to lambaste closed and corrupt political systems and to express their aspirations for political transcendence. People invoke open source to express a vision of life free from overcommercialization and corporate manipulation. The term enables one to champion bracing democratic ideals without seeming naïve or flaky because, after all, free software is solid stuff. Moreover, despite its image as the software of choice for granola-loving hippies, free and open-source software is entirely compatible with the commercial marketplace. How suspect can open source be when it has been embraced by the likes of IBM, Hewlett-Packard, and Sun Microsystems? +={Stallman, Richard:influence of|free software, and+4;Hewlett-Packard;IBM:open source, and;Sun Microsystems} + +The appeal of “openness” has become so great that it is sometimes difficult to recognize that /{limits}/ on openness are not only necessary but desirable. The dark side of openness is the spam that clogs the Internet, the ability to commit fraud and identity theft, and the opportunities for disturbed adults to prey sexually upon children. Still, the virtues of an open environment are undeniable; what is more difficult is negotiating the proper levels of openness for a given realm of online life. +={openness:limitations of} + +Nearly twenty years after the introduction of the GPL, free software has expanded phenomenally. It has given rise to countless FOSS software applications, many of which are major viral hits such as Thunderbird (e-mail), Firefox (Web browser), Ubuntu (desktop GNU/Linux), and Asterisk (Internet telephony). FOSS has set in motion, directly or indirectly, some powerful viral spirals such as the Creative Commons licenses, the iCommons/free culture movement, the Science Commons project, the open educational resource movement, and a new breed of open-business ventures, Yet Richard Stallman sees little connection between these various “open” movements and free software; he regards “open” projects as too vaguely defined to guarantee that their work is truly “free” in the free software sense of the term. “Openness and freedom are not the same thing,” said Stallman, who takes pains to differentiate free software from open-source software, emphasizing the political freedoms that lie at the heart of the former.~{ Interview with Richard Stallman, January 21, 2008. }~ +={FOSS/FLOSS;free software:FOSS/FLOSS|expansion of;Stallman, Richard:influence of|freedom, and} + +Any revolution is not just about new tools and social practices, however. It is also about developing new ways of understanding the world. People must begin to /{see}/ things in a new perspective and /{talk}/ with a new vocabulary. In the 1990s, as Disney, Time Warner, Viacom, and other media giants realized how disruptive the Internet might be, the public was generally oblivious that it might have a direct stake in the outcome of Internet and copyright policy battles. Big Media was flexing its muscles to institute all sorts of self-serving, protectionist fixes — copy-protection technologies, broader copyright privileges, one-sided software and Web licenses, and much more — and most public-interest groups and civic organizations were nowhere to be seen. + +Fortunately, a small but fierce and keenly intelligent corps of progressive copyright scholars were beginning to discover one another in the 1990s. Just as the hacker community had had to recognize the enclosure of its commons of software code, and embrace the GPL and other licenses as defensive remedies, so progressive copyright scholars and tech activists were grappling with how to defend against a related set of enclosures, The relentless expansion of copyright law was eroding huge swaths of the public domain and fair use doctrine. Tackling this problem required asking a question that few in the legal or political establishments considered worth anyone’s time — namely, What’s so valuable about the public domain, anyway? + +1~ 2 THE DISCOVERY OF THE PUBLIC DOMAIN +={public domain+93} + +/{How a band of irregulars demonstrated that the public domain is enormously valuable after all.}/ + +For decades, the public domain was regarded as something of a wasteland, a place where old books, faded posters, loopy music from the early twentieth century, and boring government reports go to die. It was a dump on the outskirts of respectable culture. If anything in the public domain had any value, someone would sell it for money. Or so goes the customary conception of the public domain. + +Jack Valenti, the longtime head of the Motion Picture Association of America, once put it this way: “A public domain work is an orphan. No one is responsible for its life. But everyone exploits its use, until that time certain when it becomes soiled and haggard, barren of its previous virtues. Who, then, will invest the funds to renovate and nourish its future life when no one owns it?”~{ Jack Valenti, “A Plea for Keeping Alive the U.S. Film Industry’s Competitive Energy, ” testimony on behalf of the Motion Picture Association of America to extend the term of copyright protection, Senate Judiciary Committee, September 20, 1995, at http://instructors.cwrl.utexas.edu/~martin/Valenti .pdf. }~ (Valenti was arguing that longer copyright terms would give film studios the incentive to digitize old celluloid films that would otherwise enter the public domain and physically disintegrate.) +={Valenti, Jack} + +One of the great, unexplained mysteries of copyright law is how a raffish beggar grew up to be King Midas. How did a virtually ignored realm of culture — little studied and undertheorized— become a subject of intense scholarly interest and great practical importance to commoners and businesses alike? How did the actual value of the public domain become known? The idea that the public domain might be valuable in its own right — and therefore be worth protecting — was a fringe idea in the 1990s and before. So how did a transformation of legal and cultural meaning occur? +={public domain:emergence of+4|copyright law, and+3} + +Unlike Richard Stallman’s crusade to create a sustainable public domain of code,~[* Free software constitutes a “sustainable public domain” because the General Public License protects the code and its derivatives from private appropriation yet otherwise makes the code free for anyone to use. The public domain, by contrast, is vulnerable to private appropriation in practice if a company has sufficient market power (e.g., Disney’s appropriation of fairy tales) or if it uses the public domain to make derivative works and then copyrights them (e.g., vendors who mix government data with proprietary enhancements).]~ the discovery of the public domain for cultural works was not led by a single protagonist or group. It emerged over time through a loose network of legal scholars, techies, activists, and some businesses, who were increasingly concerned about worrisome expansions of copyright and patent law. Slowly, a conversation that was occurring in a variety of academic and tech communities began to intensify, and then coalesce into a more coherent story. +={copyright law: expansion of} + +Scholarship about copyright law is not exactly gripping stuff. But it has played an important role in the viral spiral. Before anyone could begin to imagine how an online commons could be structured and protected, someone needed to explain how intellectual property law had become “uncontrolled to the point of recklessness” — as law professor David Lange put it in 1981, well before the proprietarian explosion of the late 1980s and 1990s. +={Lange, David;copyright law:scholarship about} + +Fortunately, a new breed of public-spirited professors was reaching a critical mass just as the Internet was becoming culturally important. These professors, collaborating with programmers and activists, were among the first to understand the ways in which copyright law, historically an arcane backwater of law, was starting to pose serious threats to democracy-loving citizens and Internet users. The full complexity of this legal literature over the past generation cannot be unpacked here, but it is important to understand how progressive copyright scholarship played a critical role in identifying dangerous trends in law and technology — and in constructing a new narrative for what copyright law should be. + +This legal scholarship reconceptualized the public domain— then a vague notion of nonproperty — and developed it into an affirmative theory. It gave the public domain sharper definition and empirical grounding. Thinkers like Yochai Benkler (Harvard Law School), Lawrence Lessig (Stanford Law), and James Boyle (Duke Law) developed bracing new theories that recognize the power of social communities, and not just the individual, in the creative process. Others, such as Julie Cohen (Georgetown Law Center) and Pamela Samuelson (Boalt Hall), have respectively explored the need to develop a new social theory of creative practice~{ Julie E. Cohen, “Copyright, Commodification and Culture: Locating the Public Domain,” in Lucie Guibaut and P. Bernt Hugenholtz eds. /{The Future of the Public Domain: Identifying the Commons in Information Law}/ (The Netherlands: Kluwer Law International, 2006), pp. 121–66. }~ and the theoretical challenges of “mapping” the public domain.~{ Pamela Samuelson, “Challenges in Mapping the Public Domain,” in Guibault and Hugenholtz, eds. /{The Future of the Public Domain}/, pp. 7–26. }~ All of this thinking, mostly confined to scholarly workshops, law reviews, and tech journals, served as a vital platform for imagining the commons in general and the Creative Commons in particular. +={Boyle, James;Cohen, Julie;Lessig, Lawrence;Samuelson, Pamela} + +2~ The Elusive Quest for “Balance” +={copyright law:balance of public and private rights+19|public domain vs.+19;public domain:copyright law, and+19} + +Historically, copyright has been regarded as a “bargain” between the public and authors. The public gives authors a set of monopoly rights to help them sell their works and earn rewards for their hard work. In return, the public gets the marketable output of creators— books, films, music — and certain rights of free access and use. The primary justification of copyright law is not to protect the fortunes of authors; it is to promote new creative works and innovation. By giving authors a property right in their works — and so helping them to sell those works in the marketplace — copyright law aims to promote the “progress of human knowledge.” +={property rights:copyright law, and;copyright law:property rights, and} + +That’s the author’s side of the bargain. The public’s stake is to have certain limited rights to use copyrighted works. Under the “fair use” doctrine (or “fair dealing” in some countries), people are entitled to excerpt copyrighted works for noncommercial purposes such as journalism, scholarship, reviews, and personal use. People are also entitled to resell the physical copies of copyrighted works such as books and videos. (This right is granted under the “first sale doctrine,” which enables libraries and DVD rental stores to exist.) The public also has the right to use copyrighted works for free after the term of a copyright has expired — that is, after a work has “entered the public domain.” This general scheme is said to establish a balance in copyright law between the private rights of authors and the needs of the public and future authors. +={copyright law:fair use doctrine, and|first sale doctrine, and;fair use doctrine:copyright law, and;first sale doctrine} + +This “balance” has been more rhetorical than real, however. For decades, critics have complained that the public’s side of the copyright bargain is being abridged. Content industries have steadily expanded their rights under copyright law at the expense of the public’s modest access rights. + +What is notable about the long history of seeking “balance” in copyright law is the singular failure of critics to make much headway (until recently) in redressing the problem. The public’s interests in copyright law — and those of authors’ — have never been given that much attention or respect. From the authors of eighteenth-century England, whose formal rights were in practice controlled by booksellers, to the rhythm-and-blues singers of the 1940s whose music was exploited for a pittance by record labels, to academics whose copyrights must often be ceded to commercial journals, authors have generally gotten the short end of the stick. No surprise here. Business practices and copyright policy have usually been crafted by the wealthiest, most politically connected players: book publishers, film studios, record labels, broadcasters, cable operators, news organizations. The public’s lack of organized political power was reflected in its lack of a coherent language for even describing its own interests in copyright law. + +For most of the twentieth century, the forging of copyright law was essentially an insider contest among various copyright-dependent industries for market advantage. Congress hosted a process to oversee the squabbling and negotiation, and nudged the players now and again. This is what happened in the fifteen-year run-up to congressional enactment of the Copyright Act of 1976, for example. For the most part, Congress has preferred to ratify the compromises that industry players hammer out among themselves. The unorganized public has been treated as an ignorant bystander. +={Copyright Act (1976)+3;copyright law:revision of (1976)+3} + +Naturally, this has strengthened the hand of commercial interests. Copyright disputes could be argued within a congenial intellectual framework and closely managed by a priesthood of lawyer-experts, industry lobbyists, and friendly politicians. The interests of citizens and consumers, blessedly absent from most debates, could be safely bracketed as marginal. + +But letting industries negotiate their own solutions has its own problems, as Professor Jessica Litman has pointed out: “Each time we rely on current stakeholders to agree on a statutory scheme, they produce a scheme designed to protect themselves against the rest of us. Its rigidity leads to its breakdown; the statute’s drafters have incorporated too few general principles to guide courts in effecting repairs.”~{ Jessica Litman, /{Digital Copyright}/ (Amherst, NY: Prometheus, 2000), p. 62. }~ By letting the affected industries negotiate a series of fact specific solutions, each reflecting that moment in history, Congress has in effect let copyright law become an agglomeration of complex and irregular political compromises — or, as some might say, a philosophically incoherent mess. +={copyright law:expansion of+8;Litman, Jessica} + +Perhaps because it is so attentive to its industry benefactors, Congress has generally regarded the fair use doctrine and the public domain as a sideshow. Under the Copyright Act of 1976, for example, fair use is set forth only as an affirmative defense to accusations of copyright infringement, not as an affirmative right. Moreover, fair use is defined by four general statutory guidelines, which courts have proceeded to interpret in wildly inconsistent ways. In real life, Lawrence Lessig has quipped, fair use amounts to “the right to hire a lawyer.” +={Lessig, Lawrence:fair use, on;copyright use:fair use doctrine, and;fair use doctrine:copyright law, and} + +Congress has shown a similarly low regard for the public domain. After extending the term of copyright law eleven times since 1961, the copyright monopoly now lasts for an author’s lifetime plus seventy years (ninety-five years for corporations). For Congress, writes Professor Tyler Ochoa, “allowing works to enter the public domain was something to be condemned, or at least only grudgingly tolerated, rather than something to be celebrated.”~{ Tyler Ochoa, “Origins and Meanings of the Public Domain,” /{Dayton Law Review}/ 28, no. 215 (2002). }~ Congress’s most hostile act toward the public domain — and to the public’s rights of access — was the elimination of the registration requirement for copyright protection.~{ Lawrence Lessig explains the impact of eliminating the copyright registration requirement in Lessig, /{Free Culture}/ (New York: Penguin, 2004), pp. 222–23. }~ Since 1978, copyright holders have not had to formally register their works in order to receive protection. Doodle on a scratch pad, record your guitar strumming, and it’s automatically copyrighted. +={copyright law:automatic+1;Ochoa, Tyler} + +Sounds great . . . but this provision had especially nasty consequences once the digital revolution kicked into high gear in the 1990s, because every digital byte was born, by default, as a form of property. Automatic copyright protection dramatically reversed the previous default, where most everything was born in the public domain and was free to use unless registered. Today, anyone wishing to reuse a work legally has to get permission and possibly pay a fee. To make matters worse, since there is no longer a central registry of who owns what copyrighted works, it is often impossible to locate the copyright holder. Such books, films, and images are known as “orphan works.” +={copyright law:"orphan works"} + +Thirty years ago, the idea of throwing a net of copyright over all information and culture was not alarming in the least. As Jessica Litman recalled, “When I started teaching in 1984, we were at what was about to be the crest of a high-protectionist wave. That is, if you looked at the scholarship being written then, people were writing about how we should expand copyright protection, not only to cover useful articles and fashions and semiconductor chips and computer programs, but also recombinant DNA. The Chicago School of scholarship was beginning to be quite influential. People were reconceiving copyright in Chicago Law and Economics terms, and things like fair use were seen to be ‘free riding.’ ”~{ Interview with Jessica Litman, November 16, 2006. }~ +={Litman, Jessica+1} + +Yet the effects of this protectionist surge, at least for the short term, were muted for a number of reasons. First, corporate lobbying on copyright issues was extremely low-key. “I started going to congressional hearings in 1986,” said Litman, “and no one was there. There were no members of Congress; there was no press. The witnesses would come and they’d talk, and staffers would take notes. And that would be it.”~{ Ibid. }~ The big-ticket lobbying — receptions, slick reports, legislative junkets, private movie screenings with Jack Valenti — did not really begin to kick in until the late 1980s and early 1990s, when trade associations for every conceivable faction stepped up their Washington advocacy. When the Internet’s commercial implications became clear in the mid-1990s, copyright-dependent industries ratcheted up their campaign contributions and lobbying to another level entirely. +={Valenti, Jack} + +The protectionist surge in copyright law in the 1980s was mitigated by two stalwart public servants: Representative Robert Kastenmeier of Wisconsin, the chair of the House judiciary subcommittee that oversaw copyright legislation, and Dorothy Schrader, the longtime general counsel of the U.S. Copyright Office. Both considered it their job to protect the public from grasping copyright industries. When Kastenmeier lost his reelection bid in 1990 and Schrader retired in 1994, the film, music, broadcast, cable, and publishing industries would henceforth have staunch allies— sometimes their former lawyer-lobbyists — in key congressional staff positions and copyright policy jobs. Government officials no longer saw their jobs as protecting consumers from overbearing, revenuehungry media industries, but as helping copyright owners chase down and prosecute “pirates.” Copyright law was recast as a form of industrial policy — a way to retain American jobs and improve the U.S. balance of trade — not as an instrument that affects social equity, consumer rights, and democratic values. +={Kastenmeier, Robert;Schrader, Dorothy;piracy} + +Ironically, the mercantilist view of copyright was gaining ground at precisely the time when the public’s stake in copyright law was growing. An explosion of consumer electronics in the 1980s was giving the public new reasons to care about their fair use rights and the public domain. The introduction of the videocassette recorder, the proliferation of cable television, personal computers, software and electronics devices, and then the introduction of the Web in 1993 all invited people to control their own creative and cultural lives. The new media meant that the baroque encrustations of copyright law that had accumulated over decades were now starting to interfere with people’s daily activities. +={fair use doctrine:copyright law, and+1;copyright law:fair use doctrine, and+1|mercantilist view of+2} + +Yet rather than negotiate a new copyright bargain to take account of the public’s needs and interests, copyright industries stepped up their demands on Congress to ram through even stronger copyright, trademark, and patent privileges for themselves. Their basic goal was, and generally remains, a more perfect control over all downstream uses of works. Content industries generally do not concede that there is any presumptive “free use zone” of culture, notwithstanding the existence of the fair use doctrine. Works that citizens may regard as fair-use entitlements industry often regards as chunks of information that no one has yet figured out how to turn into marketable property. + +Most content industries, then and now, do not see any “imbalance” in copyright law; they prefer to talk in different terms entirely. They liken copyrighted works to personal property or real estate, as in “and you wouldn’t steal a CD or use my house without permission, would you?” A copyrighted work is analogized to a finite physical object, But the essential point about works in the digital age is that they can’t be “used up” in the same way that physical objects can. They are “nondepletable” and “nonrival,” as economists put it. A digital work can be reproduced and shared for virtually nothing, without depriving another person of it. +={property rights:copyright law, and+1;copyright law:property rights, and} + +Nonetheless, a new narrative was being launched — copyrighted works as property. The idea of copyright law reflecting a policy bargain between the public and authors/corporations was being supplanted by a new story that casts copyright as property that is nearly absolute in scope and virtually perpetual in term. In hindsight, for those scholars who cared enough to see, a disquieting number of federal court cases were strengthening the hand of copyright holders at the expense of the public. James Boyle, in a much-cited essay, called this the “second enclosure movement” — the first one, of course, being the English enclosure movement of common lands in medieval times and into the nineteenth century.~{ James Boyle, “The Second Enclosure Movement and the Construction of the Public Domain,” /{Law and Contemporary Problems}/ 66 (Winter–Spring 2003), pp. 33–74, at http://www.law.duke.edu/shell/cite.pl?66+Law+&+Contemp.+ Probs.+ 33+ (WinterSpring+2003). }~ +={Boyle, James:enclosure movement, on;commons:enclosure of+1;enclosure movement+1;copyright law:enclosure movement, and+1} + +Enclosure took many forms. Copyright scholar Peter Jaszi recalls, “Sometime in the mid-1980s, the professoriate started getting worried about software copyright.”~{ Interview with Peter Jaszi, October 17, 2007. }~ It feared that copyrights for software would squelch competition and prevent others from using existing code to innovate. This battle was lost, however. Several years later, the battle entered round two as copyright scholars and programmers sought to protect reverse-engineering as fair use. This time, they won.~{ /{Sega Enterprises v. Accolade}/, 977 F.2d 1510 (9th Cir. 1993). }~ +={Jaszi, Peter;fair use doctrine:reverse engineering, and;software:copyright of;copyright law:digital age, in} + +Then, in 1985, the U.S. Supreme Court ruled that it was not fair use for the /{Nation}/ magazine to excerpt three hundred words from President Ford’s 200,000-word memoir. The /{Nation}/ had acquired a copy of Ford’s book before its publication and published an article of highlights, including a handful of quotations. The material, derived from Ford’s official duties as president, was of obvious value to the democratic process. But by a 6-3 margin the Court held that the /{Nation}/ had violated Ford’s copyright.~{ /{Harper & Row v. Nation Enterprises}/, 471 U.S. 539 (1985). }~ The proprietary tilt of copyright law only intensified in the following years. Companies claimed copyrights for all sorts of dubious forms of “originality” — the page numbers of federal court decisions, the names and numbers in telephone directories, and facts compiled in databases. +={Ford, Gerald R.;Nation (magazine)} + +2~ The Great Expansion of Intellectual Property +={intellectual property:protection of+14;copyright law:expansion of+14} + +These expansions of proprietary control in the 1980s proved to be a prelude to much more aggressive expansions of copyright, patent, and trademark law in the 1990s. Congress and the courts were granting property rights to all sorts of things that had previously been considered unowned or unownable. The Supreme Court had opened this door in 1980 when it recognized the patentability of a genetically modified bacterium. This led to ethically and economically dubious patents for genes and life-forms. Then businesses began to win patents for “business methods” — ideas and theoretical systems — that would otherwise be in the public domain. Mathematical algorithms, if embedded in software, could now be owned. Amazon.com’s patent on “one-click shopping” on its Web site became the symbol of this trend. Boat manufacturers won a special /{sui generis}/ (“in a class by itself ”) form of protection for the design of boat hulls in 1998. Celebrities and talent agencies prevailed upon state legislatures to extend the scope of ownership of celebrity names and likenesses, which had long been considered in the public domain. +={Amazon;copyright law:proprietary rights;software:copyright of+1;software:copyright of+1;property rights:copyright law, and} + +Companies developed still other strategies to assert greater proprietary control over works. Software companies began to rely upon mass-market licenses — often referred to as “shrink wrap” contracts and “click-through” Web agreements — to expand their rights at the expense of consumers and the public domain. Various computer companies sought to enact a model state law that, in Samuelson’s words, would “give themselves more rights than intellectual property law would do and avoid the burdens of public interest limitations.”~{ Samuelson, “Digital Information, Digital Networks, and the Public Domain,” p. 92. }~ Consumers could in effect be forced to surrender their fair use rights, the right to criticize the product or their right to sue, because of a “contract” they ostensibly agreed to. +={Samuelson, Pamela;copyright law:fair use doctrine, and;fair use doctrine:copyright law, and} + +Trademarks, originally designed to help people identify brands and prevent fraud in the marketplace, acquired a new power in 1995 — the ability to control public meanings. For years, large corporations had wanted to extend the scope of their trademark protection to include “dilution” — a fuzzy concept that would prohibit the use of a trademark without permission, even for legitimate public commentary or parody, if it “dilutes” the recognized public associations and meanings of a trademark. For a decade or more, Kastenmeier had prevented antidilution legislation from moving forward. After Kastenmeier left Congress, the trademark lobby succeeded in getting Congress to enact the legislation. This made it much easier for Mattel to threaten people who did parodies of Barbie dolls. The /{Village Voice}/ could more credibly threaten the /{Cape Cod Voice}/ for trademark infringement. Wal-Mart could prevent others from using “its” smiley-face logo (itself taken from the cultural commons).~{ See, e.g., David Bollier, /{Brand Name Bullies: The Quest to Own and Control Culture}/ (New York: Wiley, 2005).}~ +={Kastenmeier, Robert;trademarks:dilution of} + +The election of Bill Clinton as president in 1992 gave content industries new opportunities to expand their copyright privileges. The Clinton administration launched a major policy effort to build what it called the National Information Infrastructure (NII), more commonly known as the Information Superhighway. Today, of course, we call it the Internet. A task force of industry heavyweights was convened to determine what policies should be adopted to help build the NII.~{ Jessica Litman has an excellent historical account of the NII campaign in her book /{Digital Copyright}/ (Amherst, NY: Prometheus, 2000). }~ Vice President Al Gore cast himself as a visionary futurist and laid out astonishing scenarios for what the NII could deliver: access to every book in the Library of Congress, the ability of doctors to share medical information online, new strides against inequality as everyone goes online. +={Clinton, Bill;Gore, Al;Information Superhighway+6;Internet:rise of+1;National Information Infrastructure (NII)+6} + +The NII project was a classic case of incumbent industries trying to protect their profit centers. Executives and lobbyists associated with broadcasting, film, and music were being asked how to structure the Information Superhighway. Predictably, they came up with fantasies of digital television with five hundred channels, programs to sell products, and self-serving scenarios of even stronger copyright protection and penalties. Few had any inkling of the transformative power of open networks or the power of the sharing economy — and if they did, the possibilities certainly were not appealing to them. + +One part of the NII campaign was a working group on intellectual property headed by Bruce Lehman, a former congressional staffer, lobbyist for the software industry, and commissioner of patents and trademarks. The Lehman panel spent two years developing a sweeping set of copyright policies for the Information Superhighway. When the panel’s report was released in September 1995, anyone who cared about open culture and democracy was livid. The White Paper, as it was called, recommended a virtual elimination of fair use rights in digital content and broader rights over any copyrighted transmissions. It called for the elimination of first-sale rights for digitally transmitted documents (which would prevent the sharing of digital files) and endorsed digital rights management systems for digital works (in order to monitor and prosecute illegal sharing). The White Paper even sought to reinterpret existing law so that transient copies in the random-access memory of computers would be considered illegal unless they had a license — essentially outlawing Web browsing without a license. With visions of Soviet-style indoctrination, the document also recommended an ambitious public education program to teach Americans to properly respect copyright laws. +={Lehman, Bruce;White Paper [on copyright]+4;Clinton administration:White Paper+4;first sale doctrine;copyright:first sale, and|fair use doctrine, and;fair use doctrine:copyright law, and+1|White Paper vs.+5} + +Litman wrote a revealing history of the misbegotten NII project in her book /{Digital Copyright}/. Her chapter title “Copyright Lawyers Set Out to Colonize Cyberspace” says it all.~{ Litman, /{Digital Copyright}/, pp. 89–100. }~ Samuelson alerted the readers of /{Wired}/ about the outrageous proposals of the White Paper in her devastating January 1996 article “The Copyright Grab.”~{ Pamela Samuelson, “The Copyright Grab,” /{Wired}/, January 1996. }~ If the NII proposals are enacted, warned Samuelson, “your traditional user rights to browse, share or make private noncommercial copies of copyrighted works will be rescinded. Not only that, your online service provider will be forced to snoop through your files, ready to cut you off and turn you in if it finds any unlicensed material there. The White Paper regards digital technology as so threatening to the future of the publishing industry that the public must be stripped of all the rights copyright law has long recognized — including the rights of privacy. Vice President Al Gore has promised that the National Information Infrastructure will dramatically enhance public access to information; now we find out that it will be available only on a pay-per-use basis.”~{ Ibid. }~ +={Litman, Jessica;Samuelson, Pamela;Gore, Al} + +The White Paper was not just an effort by Old Media to domesticate or eliminate the freedoms emerging on the Information Superhighway; it sought to set the stage for the internationalization of strict copyright norms, so that American-style copyright law would prevail around the world. To counter this effort, American University law professor Peter Jaszi convened a group of law professors, library organizations, and computer and consumer electronics makers, who promptly organized themselves as the Digital Future Coalition (DFC), the first broad-based coalition in support of the public’s stake in copyright law. +={Jaszi, Peter;Digital Future Coalition (DFC)+1;copyright law:international} + +The DFC attacked the White Paper as a copyright-maximalist nightmare and sought to rally civil liberties groups, Internet service providers, and electronics manufacturers. With modest industry support, the DFC was largely responsible for slowing progress on legislation that would have enacted Lehman’s proposals. As domestic opposition grew, Lehman shrewdly decided to push for a new global copyright treaty that would embody similar principles. In the end, however, the World Intellectual Property Organization demurred. +={Lehman, Bruce;World Intellectual Property Organization} + +By that time, however, the terms of debate had been set, and there was serious congressional momentum to adopt some variant of the White Paper agenda. The ultimate result, enacted in October 1998, was the Digital Millennium Copyright Act (DMCA), the crowning achievement of the copyright-maximalist decade. It contained dozens of highly specific provisos and qualifications to satisfy every special pleader. The law in effect authorized companies to eliminate the public’s fair use rights in digital content by putting a “digital lock” around the content, however weak. Circumventing the lock, providing the software to do so, or even telling someone how to do so became a criminal offense. +={copyright:fair use doctrine, and+1;fair use doctrine:copyright law, and+1;Digital Millennium Copyright Act (DMCA) [1998]+4;copyright law:DMCA+4} + +The DMCA has been roundly denounced by software programmers, music fans, and Internet users for prohibiting them from making personal copies, fair use excerpts, and doing reverse engineering on software, even with legally purchased products. Using digital rights management systems sanctioned by the DMCA, for example, many CDs and DVDs are now coded with geographic codes that prevent consumers from operating them on devices on other continents. DVDs may contain code to prevent them from running on Linux-based computers. Digital journals may “expire” after a given period of time, wiping out library holdings unless another payment is made. Digital textbooks may go blank at the end of the school year, preventing their reuse or resale. + +Critics also argue that the DMCA gives large corporations a powerful legal tool to thwart competition and interoperability. Some companies programmed garage door openers and printer cartridges so that the systems would not accept generic replacements (until a federal court found this behavior anticompetitive). Naturally, this sort of behavior, which the DMCA facilitates, lets companies avoid open competition on open platforms with smaller companies and entrepreneurs. It also gives companies a legal pretext for bullying Web site owners into taking down copyrighted materials that may in fact be legal to use. + +In her excellent history of the political run-up to the DMCA, Litman notes, “There is no overarching vision of the public interest animating the Digital Millennium Copyright Act. None. Instead, what we have is what a variety of different private parties were able to extract from each other in the course of an incredibly complicated four-year multiparty negotiation.”~{ Litman, /{Digital Copyright}/, pp. 144–45. }~ The DMCA represents a new frontier of proprietarian control — the sanctioning of technological locks that can unilaterally override the copyright bargain. Companies asked themselves, Why rely on copyrights alone when technology can embed even stricter controls into the very design of products? +={Litman, Jessica} + +The year 1998 was an especially bad year for the public domain. Besides enacting the trademark dilution bill and DMCA, the Walt Disney Company and other large media corporations succeeded in their six-year campaign to enact the Sonny Bono Copyright Term Extension Act.~{ See Wikipedia entry for the Copyright Term Extension Act, at http://en .wikipedia.org/wiki/Sonny_Bono_Copyright_Term_Extension_Act. See also /{Eldred v. Ashcroft}/, 537 U.S. 186 (2003), F. 3d 849 (2001). }~ The legislation, named after the late House legislator and former husband of the singer Cher, retroactively extended the terms of existing copyrights by twenty years. As we will see in chapter 3, this law became the improbable catalyst for a new commons movement. +={trademarks:dilution of;Walt Disney Company;Copyright Term Extension Act} + +2~ Confronting the Proprietarian Juggernaut +={copyright law:property rights, and+13;property rights:copyright law, and+13} + +If there was ever a need for independent scholarship on copyright law and activism to challenge the new excesses, this was such a time. Fred von Lohmann, senior staff attorney for the Electronic Frontier Foundation in San Francisco, recalls, “Peggy Radin taught the first cyber-law class at Stanford Law School in 1995, and I was her research assistant. And at the end of that semester, I had read everything that had ever been written about the intersection of the Internet and the law — not just in the legal literature, but in almost all the literature. It filled about two boxes, and that was it. That was all there was.”~{ Interview with Fred von Lohmann, March 20, 2006. }~ +={Lohmann, Fred von;Radin, Margaret Jane} + +In about a dozen years, those two boxes of literature have grown into many shelves and countless filing cabinets of case law and commentary. Much of the legal scholarship was the fruit of a new generation of copyright professors who rose to the challenge of the time. An earlier generation of copyright scholars — Melville Nimmer, Alan Latman, Paul Goldstein — were highly respected titans, but they also enjoyed busy consulting practices with the various creative industries that they wrote about. Protecting the public domain was not their foremost concern. +={Goldstein, Paul;Latman, Alan;Nimmer, Melville} + +By the 1980s, as law schools become more like graduate schools and less like professional schools, copyright commentary began to get more scholarly and independent of the industries it studied. People like Pamela Samuelson, Peter Jaszi, Jerome H. Reichman, Jessica Litman, L. Ray Patterson, and Wendy Gordon were among this cohort, who were soon joined in the 1990s by a new wave of thinkers such as James Boyle, Lawrence Lessig, Julie Cohen, Niva Elkin-Koren, and Yochai Benkler. Still others, such as Rosemary Coombe and Keith Aoki, approached copyright issues from cross-cultural and globalization perspectives. These scholars were frankly hostile to the large copyright industries, and greatly concerned with how the law was harming democracy, science, culture, and consumers. +={Litman, Jessica;Samuelson, Pamela;Jaszi, Peter;Benkler, Yochai;Boyle, James;Cohen, Julie;Coombe, Rosemary;Elkin-Koren, Niva;Gordon, Wendy;Lessig, Lawrence;Patterson, L. Ray;Reichman, Jerome H.;Aoki, Keith} + +A number of activist voices were also coming forward at this time to challenge the proprietarian juggernaut. As the Internet became a popular medium, ordinary people began to realize that the new copyright laws were curtailing their creative freedoms and free speech rights. The obscure complexities of copyright law started to become a far more public and political issue. The pioneering activist organization was the Electronic Frontier Foundation. EFF was founded in 1990 by tech entrepreneur Mitch Kapor, the famed inventor of the Lotus 1-2-3 spreadsheet in the 1980s; John Perry Barlow, Grateful Dead lyricist and hacker; and John Gilmore, a leading privacy/cryptography activist and free software entrepreneur. +={Barlow, John Perry+4;Gilmore, John;Kapor, Mitch;Electronic Frontier Foundation (EFF)} + +The organization was oriented to hackers and cyberlibertarians, who increasingly realized that they needed an organized presence to defend citizen freedoms in cyberspace. (Barlow adapted the term /{cyberspace}/ from science-fiction writer William Gibson in 1990 and applied it to the then-unnamed cultural life on the Internet.) Initially, the EFF was concerned with hacker freedom, individual privacy, and Internet censorship. It later went through some growing pains as it moved offices, changed directors, and sought to develop a strategic focus for its advocacy and litigation. In more recent years, EFF, now based in San Francisco, has become the leading litigator of copyright, trademark, and Internet free expression issues. It also has more than ten thousand members and spirited outreach programs to the press and public. +={Gibson, William;cyberspace:use of term} + +John Perry Barlow was an important visionary and populizer of the time. His March 1994 article “The Economy of Ideas” is one of the most prophetic yet accessible accounts of how the Internet was changing the economics of information. He astutely realized that information is not a “product” like most physical property, but rather a social experience or form of life unto itself. “Information is a verb, not a noun,” he wrote. “Freed of its containers, information obviously is not a thing. In fact, it is something that happens in the field of interaction between minds or objects or other pieces of information. . . . Sharks are said to die of suffocation if they stop swimming, and the same is nearly true of information.”~{22. John Perry Barlow, “The Economy of Ideas,” /{Wired}/, March 1994, at http:// www.wired.com/wired/archive/2.03/economy.ideas.html.}~ + +Instead of the sober polemics of law professors, Barlow — a retired Wyoming cattle rancher who improbably doubled as a tech intellectual and rock hipster — spiced his analysis of information with colorful metaphors and poetic aphorisms. Comparing information to DNA helices, Barlow wrote, “Information replicates into the cracks of possibility, always seeking new opportunities for /{Lebensraum}/.” Digital information, he said, “is a continuing process more like the metaphorphosing tales of prehistory than anything that will fit in shrink-wrap.” + +Since hyperbole is an occupational reflex among cyberjournalists, Barlow’s /{Wired}/ piece bore the obligatory subtitle, “Everything you know about intellectual property is wrong.” Yet reading Barlow more than a decade later confirms that, posturing aside, he /{was}/ on to the big story of our time: “Notions of property, value, ownership and the nature of wealth itself are changing more fundamentally than at any time since the Sumerians first poked cuneiform into wet clay and called it stored grain. Only a very few people are aware of the enormity of this shift, and fewer of them are lawyers or public officials.”~{ Ibid. }~ + +With a nod to Professor Samuelson, Barlow was prescient enough to compare the vulnerability of indigenous peoples to the coming dispossession of Internet communities: “Western countries may legally appropriate the music, designs and biomedical lore of aboriginal people without compensation to their tribes of origins since those tribes are not an ‘author’ or ‘investors.’ But soon most information will be generated collaboratively by the cyber-tribal hunter-gatherers of cyberspace. Our arrogant legal dismissal of the rights of ‘primitives’ will soon return to haunt us.” +={Samuelson, Pamela} + +No account of cyberactivism in the 1990s is complete without mention of James Love, a feisty advocate with a brilliant strategic mind and an extraordinary ability to open up broad new policy fronts. For example, Love, as director of the Ralph Nader–founded Consumer Project on Technology, worked with tech activist Carl Malamud to force the U.S. Securities and Exchange Commission to put its EDGAR database of corporate filings online in 1994, at a time when the SEC was planning to give the data to private vendors to sell. By prevailing at the SEC, Love and Malamud set an important precedent that government agencies should post their information on the Internet for free. A few years later, in 1997, Love convened a conference to assess Microsoft’s troubling monopoly power, an event that emboldened the Department of Justice to launch its antitrust lawsuit against the company. Love later played a key role in persuading an Indian drugmaker to sell generic HIV/AIDS drugs to South Africa, putting Big Pharma on the defensive for its callous patent and trade policies and exorbitant drug prices. Love’s timely gambit in 1996 to organize broader advocacy for the public domain failed, however. He co-founded the Union for the Public Domain, with a board that included Richard Stallman, but the project never developed a political following or raised much money. +={Love, James;EDGAR database;Malamud, Carl;Microsoft:antitrust lawsuit against} + +The American Library Association was the largest and bestfunded advocate on copyright issues in the 1990s, but its collaborations with other Washington allies tended to be modest, and its grassroots mobilization disappointing. Libraries are respected in the public mind precisely because they are stable, apolitical civic institutions — that is, not activists. Despite its valuable presence on copyright and Internet policy issues, the library lobby was temperamentally disinclined to get too far ahead of the curve. +={American Library Association;libraries} + +By the end of the decade, a muscular, dissenting discourse about copyright law was starting to take shape. On one side was a complicated body of industry-crafted copyright law that claimed imperial powers to regulate more and more aspects of daily life — your Web site, your music CDs, your electronic devices, your computer practices. On the other side were ordinary people who loved how the Internet and digital devices were empowering them to be creators and publishers in their own right. They just wanted to indulge their natural human urge to share, tinker, reuse, and transform culture. +={copyright law:expansion of} + +The dissent of the progressive copyright scholars and activists, though pungent, was hardly insurrectionist. These critics were reformers, not bomb throwers. Most objected to the overreaching scope and draconian enforcement of copyright law, not to its philosophical foundations. They generally argued that the problem wasn’t copyright law per se, but the misapplication and overextension of its core principles. + +2~ A New Story About the Public Domain +={copyright law:public domain vs.+15;public domain:copyright law, and+15|emergence of+21} + +One of the most notable outgrowths of all this activity was the development of a new story about the public domain. Scholars took a range of legal doctrines that were scattered among the sprawling oeuvre of copyright law and consolidated them under one banner, /{the public domain}/. The new framing helped give the public’s rights in cultural works a new moral standing and intellectual clarity. + +Even though copyright law has existed for three centuries, the term “public domain” did not surface in a U.S. Supreme Court decision until 1896. The public domain was first mentioned in U.S. copyright law in 1909, and while it occasionally merited passing reference or discussion in later decades, the concept was not the subject of a significant law review article until 1981. That article was “Recognizing the Public Domain,” by Professor David Lange.~{ David Lange, “Recognizing the Public Domain,” /{Law and Contemporary Problems}/ 44 (Autumn 1981). }~ “David’s article was an absolutely lovely piece that sunk without a trace,” recalls Jessica Litman. “When a bunch of us discovered [Lange’s article] in the late 1980s, it had been neither cited nor excerpted nor reprinted nor anything — because nobody was looking for a defense of the public domain. People were looking for arguments for extending copyright protection. David was ahead of his time.” + +The main reason that the public domain was ignored was that it was generally regarded as a nullity. “Public domain in the fields of literature, drama, music and art is the other side of the coin of copyright,” wrote M. William Krasilovsky in 1967.~{ M. William Krasilovsky, “Observations on the Public Domain,” /{Bulletin of the Copyright Society}/ 14, no. 205 (1967). }~ “It is best defined in negative terms.” Edward Samuels wrote that the public domain “is simply whatever remains after all methods of protection are taken into account.”~{ Edward Samuels, “The Public Domain in Copyright Law,” /{Journal of the Copyright Society}/ 41, no. 137 (1993), p. 138. }~ + +Lange himself acknowledged this conventional wisdom when he wrote that the public domain “amounts to a dark star in the constellation of intellectual property.” He took issue with this history, however, and insisted upon the affirmative value of the public domain. Lange dredged up a number of “publicity rights” cases and commentary to shed light on the problem: Bela Lugosi’s widow and son claimed that they, not Universal Pictures, should own the rights to the character Dracula. Representatives of the deceased Marx Brothers sought to stop a Broadway production spoofing 1930s musicals from using the Marx Brothers’ characters. DC Comics, owner of a trademark in the Superman character, sued to prevent a group of Chicago college students from calling their newspaper /{The Daily Planet}/. And so on. +={DC Comics;Lugosi, Bela;Marx Brothers+1} + +From such examples, Lange drove home a commonsense lesson about the derivative nature of creativity: we all depend on others to generate “new” works. Groucho, Chico, and Harpo Marx couldn’t “invent” their stage personas until, in classic vaudevillian tradition, they had adapted jokes and shtick from their peers. “In time,” Groucho wrote in his memoirs, “if [a comedian] was any good, he would emerge from the routine character he had started with and evolve into a distinct personality of his own. This has been my experience and also that of my brothers, and I believe this has been true of most of the other comedians.” + +To which Lange added, “Of course, what Groucho is saying in this passage is that although he and his brothers began as borrowers they ended as inventors. . . . It is a central failing in the contemporary intellectual property literature and case law that that lesson, so widely acknowledged, is so imperfectly understood.”~{ Lange, “Recognizing the Public Domain,” p. 162. }~ + +In example after example, Lange made the point that “as access to the public domain is choked, or even closed off altogether, the public loses too: loses the rich heritage of its culture, the rich presence of new works derived from that culture, and the rich promise of works to come.” Lange warned that “courts must dispel” the “impression of insubstantiality” from which the public domain suffers. Nothing will be resolved, he warned, “until the courts have come to see the public domain not merely as an unexplored abstraction but as a field of individual rights as important as any of the new property rights.” + +2~ What Is “Authorship”? +={authorship+13;copyright law:authorship, and+13|originality, and+13} + +Besides honoring the public domain, copyright reformers sought to develop a second, more subversive narrative. They questioned the very idea of individual “authorship” and “originality,” two central pillars of copyright law, The standard moral justification for granting authors exclusive rights in their works is the personal originality that they supposedly show in creating new works. But can “originality” and “authorship” be so neatly determined? What of the role of past generations and creative communities in enabling the creation of new works? Don’t we all, in the words of Isaac Newton, stand on the shoulders of giants? +={Newton, Isaac} + +The idea that sharing, collaboration, and adaptation may actually be important to creativity, and not merely incidental, was a somewhat daring theme in the early 1990s, if only because it had little recognition in copyright scholarship. While this line of analysis preceded the Internet, the arrival of the World Wide Web changed the debate dramatically. Suddenly there was a powerful, real-life platform for /{collective}/ authorship. Within fifteen years, sharing and collaboration has become a standard creative practice, as seen in Wikipedia, remix music, video mashups, machinima films, Google map mashups, social networking, and much else. +={orld Wide Web: collective authorship, and;authorship:collective} + +Of course, in the early 1990s, the promise of online networks was only dimly understood. But for Jessica Litman, the tightening noose of proprietary control had troubling implications for fair use and the ability of people to create and share culture: “Copyright law was no longer as open and porous as it had been, so I felt compelled to try to defend the open spaces that nobody was paying attention to.” Litman published a major article on the public domain in 1990, instigating a fresh round of interest in it and establishing lines of analysis that continue to this day.~{ Jessica Litman, “The Public Domain,” /{Emory Law Journal}/ 39, no. 965 (Fall 1990). }~ +={Litman, Jessica;copyright law:fair use doctrine, and;fair use doctrine:copyright law, and} + +She made the then-startling claim, for example, that “the very act of authorship in /{any}/ medium is more akin to translation and recombination than it is to creating Aphrodite from the foam of the sea. Composers recombine sounds they have heard before; playwrights base their characters on bits and pieces drawn from real human beings and other playwrights’ characters. . . . This is not parasitism; it is the essence of authorship. And, in the absence of a vigorous public domain, much of it would be illegal.” Litman argued that the public domain is immensely important because all authors depend upon it for their raw material, Shrink the public domain and you impoverish the creative process. + +The problem, said Litman, is that copyright law contains a structural contradiction that no one wants to acknowledge. The law requires “originality” in order for a work to be protected — but it cannot truly determine what is “original.” If authors could assert that their works were entirely original, and courts conscientiously enforced this notion, copyright law would soon collapse. Everyone would be claiming property rights in material that had origins elsewhere. Shakespeare’s estate might claim that Leonard Bernstein’s /{West Side Story}/ violates its rights in /{Romeo and Juliet}/; Beethoven would prevent the Bee Gees from using the opening chords of his Fifth Symphony. + +When one person’s copyright claims appear to threaten another person’s ability to create, the courts have historically invoked the public domain in order to set limits on the scope of copyright protection. In this backhanded way, the public domain helps copyright law escape from its own contradictions and ensures that basic creative elements remain available to all. As Litman explained: + +_1 Because we have a public domain, we can permit authors to avoid the harsh light of a genuine search for provenance, and thus maintain the illusion that their works are indeed their own creations. We can tolerate the grant of overbroad and overlapping deeds through the expedient assumption that each author took her raw material from the commons, rather than from the property named in prior deeds.~{ Litman, “The Public Domain,” p. 1012. }~ + +In effect, copyright law sets up a sleight of hand: it invites authors to plunder the commons with the assurance that their borrowings will be politely ignored — but then it declares the resulting work of authorship “original” and condemns any further follow-on uses as “piracy.” This roughly describes the early creative strategy of the Walt Disney Company, which built an empire by rummaging through the public domain of fairy tales and folklore, adding its own creative flourishes, and then claiming sole ownership in the resulting characters and stories. +={authorship+3;Walt Disney Company} + +As Litman unpacked the realities of “authorship,” she showed how the idea of “originality” serves as a useful fiction. Any author must draw upon aspects of culture and recombine them without ever being able to identify the specific antecedents, she pointed out. Judges, for their part, can never really make a rigorous factual determination about what is “original” and what is taken from the public domain. In reality, said Litman, authorship amounts to “a combination of absorption, astigmatism and amnesia.” The public domain is vague and shifting precisely because it must constantly disguise the actual limits of individual “originality.” + +English professor Martha Woodmansee and law professor Peter Jaszi helped expose many of the half-truths about “authorship” and “originality.” Their 1994 anthology of essays, /{The Construction of Authorship}/, showed how social context is an indispensable element of “authorship,” one that copyright law essentially ignores.~{ Martha Woodmansee and Peter Jaszi, eds., /{The Construction of Authorship: Textual Appropriation in Law and Literature}/ (Durham, NC: Duke University Press, 1994). }~ Thus, even though indigenous cultures collectively create stories, music, and designs, and folk cultures generate works in a collaborative fashion, copyright law simply does not recognize such acts of collective authorship. And so they go unprotected. They are vulnerable to private appropriation and enclosure, much as Stallman’s hacker community at MIT saw its commons of code destroyed by enclosure. +={Jaszi, Peter;Woodmansee, Martha;commons:enclosure of;enclosure movement} + +Before the Internet, the collaborative dimensions of creativity were hardly given much thought. An “author” was self-evidently an individual endowed with unusual creative skills. As the World Wide Web and digital technologies have proliferated, however, copyright’s traditional notions of “authorship” and “originality” have come to seem terribly crude and limited. The individual creator still matters and deserves protection, of course. But when dozens of people contribute to a single entry of Wikipedia, or thousands contribute to an open-source software program, how then shall we determine who is the “author”?~{ Henry Miller writes: “We carry within us so many entities, so many voices, that rare indeed is the man who can say he speaks with his own voice. In the final analysis, is that iota of uniqueness which we boast of as ‘ours’ really ours? Whatever real or unique contribution we make stems from the same inscrutable source whence everything derives. We contribute nothing but our understanding, which is a way of saying — our acceptance.” Miller, /{The Books in My Life}/ (New York: New Directions), p. 198. }~ By the lights of copyright law, how shall the value of the public domain, reconstituted as a commons, be assessed?~{ Rufus Pollock, “The Value of the Public Domain,” report for Institute for Public Policy Research, London, July 2006, at http://www.rufuspollock.org/ economics/papers/value_of_public_domain.ippr.pdf. }~ +={World Wide Web:collective authorship, and;creativity:collaborative} + +The Bellagio Declaration, the outgrowth of a conference organized by Woodmansee and Jaszi in 1993, called attention to the sweeping deficiencies of copyright law as applied. One key point stated, “In general, systems built around the author paradigm tend to obscure or undervalue the importance of the ‘public domain,’ the intellectual and cultural commons from which future works will be constructed. Each intellectual property right, in effect, fences off some portion of the public domain, making it unavailable to future creators.”~{ See James Boyle, /{Shamans, Software, and Spleens: Law and the Construction of the Information Society}/ (Cambridge, MA: Harvard University Press, 1995), p. 192. }~ +={Jaszi, Peter;Woodmansee, Martha;Bellagio Declaration;Boyle, James:Shamans, Software and Spleens, by+1;intellectual property:public domain vs.} + +Another fusillade of flaming arrows engulfed the fortress of “authorship” and “originality” in 1996, when James Boyle published /{Shamans, Software, and Spleens}/. With sly wit and deep analysis, this landmark book identified many of the philosophical paradoxes and absurdities of property rights in indigenous knowledge, software, genes, and human tissue. Boyle deftly exposed the discourse of IP law as a kind of Möbius strip, a smooth strip of logic that confusingly turns back on itself. “If a geography metaphor is appropriate at all,” said Boyle, “the most likely cartographers would be Dali, Magritte and Escher.”~{ James Boyle, “A Theory of Law and Information: Copyright, Spleens, Blackmail and Insider Trading,” /{California Law Review}/ 80, no. 1413 (1992), at http://www.law.duke.edu/boylesite/law&info.htm. }~ +={copyright law:philosophical challenges to} + +2~ “You Have No Sovereignty Where We Gather” +={copyright law:philosophical challenges to+13} + +The deconstruction of copyright law over the past twenty years has been a significant intellectual achievement. It has exposed the copyright law’s philosophical deficiencies, showed how social practice deviates from it, and revealed the antisocial effects of expanding copyright protection. Critics knew that it would be impossible to defend the fledgling cyberculture without first documenting how copyright law was metastasizing at the expense of free expression, creative innovation, consumer rights, and market competition. + +But as the millennium drew near, the tech-minded legal community — and law-minded techies — knew that critiques and carping could only achieve so much. A winnable confrontation with copyright maximalists was needed. A compelling counternarrative and a viable long-term political strategy had to be devised. And then somehow they had to be pushed out to the wider world and made real. + +That task was made easier by the intensifying cultural squeeze. The proprietarian lockdown was starting to annoy and anger people in their everyday use of music, software, DVDs, and the Web. And the property claims were growing more extreme. The American Society of Composers, Authors and Publishers had demanded that Girl Scout camps pay a public performance license for singing around the campfire. Ralph Lauren challenged the U.S. Polo Association for ownership of the word /{polo}/. McDonald’s succeeded in controlling the Scottish prefix Mc as applied to restaurants and motels, such as “McVegan” and “McSleep.”~{ These examples can be found in Bollier, /{Brand Name Bullies}/. }~ +={Lauren, Ralph} + +The mounting sense of frustration fueled a series of conferences between 1999 and 2001 that helped crystallize the disparate energies of legal scholarship into something resembling an intellectual movement. “A number of us [legal scholars] were still doing our own thing, but we were beginning to get a sense of something,” recalls Yochai Benkler, “It was no longer Becky Eisenberg working on DNA sequences and Pamela Samuelson on computer programs and Jamie Boyle on ‘environmentalism for the ’Net’ and me working on spectrum on First Amendment issues,” said Benkler. “There was a sense of movement.”~{ Interview with Yochai Benkler, February 7, 2006.}~ (“Environmentalism for the ’Net” was an influential piece that Boyle wrote in 1998, calling for the equivalent of an environmental movement to protect the openness and freedom of the Internet.)~{ James Boyle, “A Politics of Intellectual Property: Environmentalism for the Net,” /{Duke Law Journal}/ 47, no. 1 (October 1997), pp. 87–116, at http://www .law.duke.edu/boylesite/Intprop.htm. }~ +={Boyle, James+1;Benkler, Yochai+1;Eisenberg, Rebecca;Samuelson, Pamela} + +“The place where things started to get even crisper,” said Benkler, “was a conference at Yale that Jamie Boyle organized in April 1999, which was already planned as a movement-building event.” That conference, Private Censorship/Perfect Choice, looked at the threats to free speech on the Web and how the public might resist. It took inspiration from John Perry Barlow’s 1996 manifesto “A Declaration of the Independence of Cyberspace.” It is worth quoting at length from Barlow’s lyrical cri de coeur — first published in /{Wired}/ and widely cited — because it expresses the growing sense of thwarted idealism among Internet users, and a yearning for greater self-determination and self-governance among commoners. Barlow wrote: +={Barlow, John Perry+6} + +_1 Governments of the Industrial World, you weary giants of flesh and steel, I come from Cyberspace, the new home of Mind. On behalf of the future, I ask you of the past to leave us alone, You are not welcome among us, You have no sovereignty where we gather. + +_1 We have no elected government, nor are we likely to have one, so I address you with no greater authority than that with which liberty itself always speaks. I declare the global social space we are building to be naturally independent of the tyrannies you seek to impose on us. You have no moral right to rule us nor do you possess any methods of enforcement we have true reason to fear. + +_1 Governments derive their just powers from the consent of the governed. You have neither solicited nor received ours. We did not invite you, You do not know us, nor do you know our world. Cyberspace does not lie within your borders. Do not think that you can build it, as though it were a public construction project, You cannot. It is an act of nature and it grows itself through our collective actions. + +_1 You have not engaged in our great and gathering conversation, nor did you create the wealth of our marketplaces. You do not know our culture, our ethics, or the unwritten codes that already provide our society more order than could be obtained by any of your impositions. + +_1 You claim there are problems among us that you need to solve, You use this claim as an excuse to invade our precincts. Many of these problems don’t exist. Where there are real conflicts, where there are wrongs, we will identify them and address them by our means. We are forming our own Social Contract. This governance will arise according to the conditions of our world, not yours. Our world is different. + +As Barlow made clear, the Internet was posing profound new questions — not just about politics, but about the democratic polity itself. What would be the terms of moral legitimacy and democratic process in cyberspace? Would the new order be imposed by a Congress beholden to incumbent industries and their political action committees, or would it be a new social contract negotiated by the commoners themselves? In posing such questions, and doing it with such rhetorical panache, Barlow earned comparisons to Thomas Jefferson. + +The stirrings of a movement were evident in May 2000, when Benkler convened a small conference of influential intellectual property scholars at New York University Law School on “A Free Information Ecology.” This was followed in November 2001 by a large gathering at Duke Law School, the first major conference ever held on the public domain. It attracted several hundred people and permanently rescued the public domain from the netherworld of “nonproperty.” People from diverse corners of legal scholarship, activism, journalism, and philanthropy found each other and began to reenvision their work in a larger, shared framework. +={Benkler, Yochai;public domain:conferences about+1} + +Over three decades, copyright scholarship had become more incisive, impassioned, and focused on the public good — but much of the talk remained within the rarefied circles of the academy. What to do about the disturbing enclosures of the cultural commons remained a vexing, open question. The 1990s saw an eclectic smattering of initiatives, from EFF lawsuits and visionary manifestos to underfunded advocacy efforts and sporadic acts of hacker mischief and civil disobedience. All were worthwhile forms of engagement and exploratory learning. None were terribly transformative. Free software was growing in popularity in the 1990s, but its relevance to broader copyright struggles and the Internet was not yet recognized. Congress and the courts remained captive to the copyright-maximalist worldview. The idea of organizing a counter-constituency to lay claim to the public domain and forge a new social contract for cyberspace was a fantasy. Copyright law was just too obscure to excite the general public and most creators and techies. The commoners were too scattered and diverse to see themselves as an insurgent force, let alone imagine they might create a movement. + +1~ 3 WHEN LARRY LESSIG MET ERIC ELDRED + +/{A constitutional test case becomes the seed for a movement.}/ + +Once the value of the public domain became evident, and a few visionaries realized that the commons needed to be protected somehow, an important strategic question arose: Which arena would offer the best hope for success — politics, culture, technology, or law? + +The real answer, of course, was all of the above. Building a new digital republic would require a wholesale engagement with the politics of effecting democratic change and the challenges of building a cultural movement. It would require the invention of a shared technological infrastructure, and the development of legal tools to secure the commons. All were intertwined. But as a practical matter, anyone who aspired to stop the mass-media-driven expansions of copyright law had to choose where to invest his or her energy. In the mid-1990s, Lawrence Lessig decided that the greatest leverage would come through law. +={Lessig, Lawrence:law in contemporary context, and+2} + +Lessig, usually referred to as Larry, had the knowledge, talent, and good timing to conceptualize the politics of digital technologies at a ripe moment, the late 1990s, when the World Wide Web was exploding and people were struggling to understand its significance. However, Lessig was not content to play the sage law professor dispensing expertise at rarefied professional and scholarly gatherings; he aimed to become a public intellectual and highbrow activist. Through a punishing schedule of public speaking and a series of high-profile initiatives starting in 1998 and 1999, Lessig became a roving demigod-pundit on matters of the Internet, intellectual property, and cultural freedom. + +In the course of his frequent travels, he had a particularly significant rendezvous at the Starbucks on Church Street in Cambridge, Massachusetts. It was November 1998. A month earlier, Congress had enacted the Sonny Bono Copyright Extension Act. Lessig was eager to meet with one Eric Eldred, a retired navy contractor, to see if he would agree to be a plaintiff in the first federal case to challenge the constitutionality of the copyright clause. +={Copyright Term Extension Act;Eldred, Eric:Lessig, and;Lessig, Lawrence:Eldred, and} + +Eldred was a book enthusiast and computer programmer who had reached the end of his rope. Three years earlier, in 1995, he had launched a simple but brilliant project: a free online archive of classic American literature. Using his PC and a server in his home in New Hampshire, Eldred posted the books of Nathaniel Hawthorne, Henry James, Wallace Stevens, and dozens of other great authors whose works were in the public domain. Eldred figured it would be a great service to humanity to post the texts on the World Wide Web, which was just beginning to go mainstream. + +Eldred had previously worked for Apollo Computer and Hewlett-Packard and was experienced in many aspects of computers and software. In the late 1980s, in fact, he had developed a system that enabled users to post electronic text files and then browse and print them on demand. When the World Wide Web arrived, Eldred was understandably excited. “It seemed to me that there was a possibility of having a system for electronic books that was similar to what I had done before. I was interested in experimenting with this to see if it was possible.”~{ Interview with Eric Eldred, August 1, 2006; Daren Fonda, “Copyright Crusader,” /{Boston Globe Magazine}/, August 29, 1999, available at http://www .boston.com/globe/magazine/8-29/featurestory1.shtml; and Eric Eldred, “Battle of the Books: The Ebook vs. the Antibook,” November 15, 1998, at http://www.eldritchpress.org/battle.html. }~ + +So Eldred set out to build his own archive of public-domain books: “I got books from the library or wherever, and I learned how to do copyright research and how to scan books, do OCR [opticalcharacter recognition] and mark them up as HTML [the programming language used on the Web],” he said. “I just wanted to make books more accessible to readers.”~{ Interview with Eric Eldred, August 1, 2006. }~ + +Eldred didn’t realize it at the time, but his brave little archive, Eldritch Press, embodied a dawning cultural archetype — the selfpublished digital work meant to be freely shared with anyone in the world, via the Internet. Thanks to the magic of “network effects” — the convenience and value that are generated as more people join a network — Eldred’s Web site was soon receiving more than twenty thousand hits a day. A growing community of book lovers came together through the site. They offered annotations to the online books, comments, and links to foreign translations and other materials. In 1997, the National Endowment for the Humanities considered the site so educational and exemplary that it formally cited Eldritch Press as one of the top twenty humanities sites on the Web. +={Eldritch Press+12;Internet:communication system, as+3} + +Although it was only a one-person project, Eldritch Press was not just an idiosyncratic innovation. The convergence of telecommunications, personal computers, and software in the 1990s, otherwise known as the Internet, was facilitating an explosion of new genres of public expression. We are still grappling with how this new type of media system is different from broadcasting and other mass media. But we do know this: it invites mass participation because the system doesn’t require a lot of capital or professional talent to use. The system favors decentralized interactivity over centralized control and one-way communication. Ordinary people find it relatively inexpensive and versatile. Since everyone has roughly the same access and distribution capacities, the Internet is perhaps the most populist communication platform and egalitarian marketplace in human history. +={Internet:mass participation in} + +This was not the goal of the computer scientists who invented the Internet, of course. Working under the auspices of the U.S. military, they were chiefly concerned with building a communications system that would allow academic researchers to share computerized information cheaply and easily. The idea was that intelligence and innovation would arise from the “edges” of a “dumb” network, and not be controlled by a centralized elite in the manner of broadcasting or book publishing. The Internet — a network of networks — would be a platform open to anyone who used a shared set of freely accessible “protocols,” or standardized code, for computer hardware and software.~[* The Internet protocols that enable different computers and networks to connect despite their differences is TCP/IP, which stands for TransmissionControl Protocol/Internet Protocol. These protocols enabled the commons known as the Internet to emerge and function, and in turn to host countless other commons “on top” of it.]~ +={Internet:protocols of+1} + +What was radically new about the network architecture was its freedom: No special qualifications or permissions were needed to communicate or “publish.” No one needed to pay special fees based on usage. Anyone could build her own innovative software on top of the open protocols, It is a measure of the system’s power that it has spawned all sorts of innovations that were not foreseen at the outset: in the 1990s, the World Wide Web, instant messaging, peer-to-peer file sharing, and Web logs, and, in the 2000s, podcasts, wikis, social networking software, and countless other applications. The open, shared protocols of the Internet provided an indispensable communications platform for each of these innovations to arise. +={free culture:Internet, of the;Internet:architecture of} + +In building his online archive, Eric Eldred was part of this new cultural cohort of innovators. He not only shared Richard Stallman’s dream — to build an open, sharing community. He also came to share Stallman’s contempt for the long arm of copyright law. The problem, in Eldred’s case, was the corporate privatization of large portions of the public domain. In the 1990s, the Walt Disney Company was worried that its flagship cartoon character, Mickey Mouse, would enter the public domain and be freely available for anyone to use. Mickey, originally copyrighted in 1928, was nearing the end of his seventy-five-year term of copyright and was due to enter the public domain in 2003. +={copyright law:public domain vs.+6;Eldred, Eric:public domain, and;public domain:copyright law, and+6|privatization of+2;Stallman, Richard:influence of;Walt Disney Company} + +Disney led a concerted campaign to extend the term of copyrights by twenty years. Under the new law, all works copyrighted after January 1, 1923, would be privately controlled for another twenty years. Corporations would be able to copyright their works for ninety-five years instead of seventy-five years, and the works of individual authors would be a private monopoly for the author’s lifetime plus seventy years. Thousands of works that were expected to enter the public domain in 1999 and following years would remain under copyright until 2019 and beyond. + +Congress readily enacted this twenty-year giveaway of monopoly rights on a unanimous vote, and without any public hearings or debate. Disney was the most visible beneficiary of the law, prompting critics to dub it the Mickey Mouse Protection Act. But its more significant impact was to deprive Americans of access to an estimated four hundred thousand cultural works from the 1920s and 1930s. Books by Sherwood Anderson, music by George Gershwin, poems by Robert Frost, and tens of thousands of other works would remain under private control for no good reason. The law was the eleventh time in the course of four decades that Congress had extended the term of copyright protection. American University law professor Peter Jaszi complained that copyright protection had become “perpetual on the installment plan.” +={Copyright Term Extension Act+6;Jaszi, Peter;copyright law:expansion of|purpose of+3} + +The law was astonishingly inefficient and inequitable as well. To preserve the property rights of the 2 percent of works from this period that still had commercial value, the law also locked up the remaining 98 percent of works (whose owners are often unknown or unable to be located in order to grant permissions). Indeed, it was these “orphan works” — works still under copyright but not commercially available, and with owners who often could not be found — that represent an important “feedstock” for new creativity. The Sonny Bono Act showered a windfall worth billions of dollars to the largest entertainment businesses and authors’ estates. +={copyright law:orphan works} + +At a more basic level, the copyright term extension showed contempt for the very rationale of copyright law. Copyrights are intended as an inducement to authors to create works. It is a government grant of monopoly property rights meant to help authors earn money for producing books, music, film, and other works. But, as Lessig pointed out, “You can’t incent a dead person. No matter what we do, Hawthorne will not produce any more works, no matter how much we pay him.” Jack Valenti replied that longer copyright terms would give Hollywood the incentive to preserve old films from deteriorating and make them available. + +The copyright term extension act privatized so many of the public domain books on the Eldritch Press Web site, and so offended Eldred’s sense of justice, that in November 1998 he decided to close his site in protest. The new law meant that he would not be able to add any works published since 1923 to his Web site until 2019. “I can no longer accomplish what I set out to do,” said Eldred.~{ Ibid. }~ +={Eldred, Eric:public domain, and|Lessig, and+3;Lessig, Lawrence+3:Eldred, and+3} + +As luck had it, Larry Lessig was looking for an Everyman of the Internet. Lessig, then a thirty-seven-year-old professor at Harvard Law School, was looking for a suitable plaintiff for his envisioned constitutional test case. He had initially approached Michael S. Hart, the founder of Project Gutenberg, the first producer of free electronic books. At the time, the project had nearly six thousand public-domain books available online. (It now has twenty thousand books; about 3 million books are downloaded every month.) Hart was receptive to the case but had his own ideas about how the case should be argued. He wanted the legal complaint to include a stirring populist manifesto railing against rapacious copyright holders. Lessig demurred and went in search of another plaintiff.~{ Richard Poynder interview with Lawrence Lessig, “The Basement Interviews: Free Culture,” April 7, 2006, p. 26, available at http://poynder.blogspot.com/ 2006/03/basement-interviews.html. See also Steven Levy, “Lawrence Lessig’s Supreme Showdown,” /{Wired}/, October 2002, pp. 140–45, 154–56, available at http://www.wired.com/wired/archive/10.10/lessig.html. Project Gutenberg is at http://wwwgutenberg.org. }~ +={Hart, Michael S.;Project Gutenberg} + +After reading about Eldred’s protests in the /{Boston Globe}/, and meeting with him over coffee, Lessig asked Eldred if he would be willing to be the plaintiff in his envisioned case. Eldred readily agreed. As a conscientious objector and draft resister during the Vietnam War, he was ready to go to great lengths to fight the Sonny Bono Act. “Initially, I volunteered to violate the law if necessary and get arrested and go to jail,” Eldred said. “But Larry told me that was not necessary.” A good thing, because under the No Electronic Theft Act, passed in 1997, Eldred could be charged with a felony. “I could face jail, fines, seizure of my computer, termination of my Internet service without notice — and so all the e-books on the Web site could be instantly lost,” he said. +={No Electronic Theft Act (1997)} + +It was the beginning of a landmark challenge to the unchecked expansion of copyright law. The case would turbocharge Lessig’s unusual career and educate the press and public about copyright law’s impact on democratic culture. Most significantly, it would, in time, spur the growth of an international free culture movement. +={copyright law:expansion of;Eldred, Eric:copyright law, and} + +2~ Larry Lessig’s Improbable Journey +={Lessig, Lawrence+49} + +Since Lessig looms so large in this story, it is worth pausing to understand his roots. Raised by culturally conservative, rock-ribbed Republican parents in central Pennsylvania, Lessig was a bright kid with a deep enthusiasm for politics. “I grew up a right-wing lunatic Republican,” Lessig told journalist Steven Levy, noting that he once belonged to the National Teen Age Republicans, ran a candidate’s unsuccessful campaign for the Pennsylvania state senate, and attended the 1980 Republican National Convention, which nominated Ronald Reagan for president. Larry’s father, Jack, was an engineer who once built Minuteman missile silos in South Dakota (where Lessig was born in 1961), and who later bought a steelfabrication company in Williamsport, Pennsylvania.~{ Wikipedia entry, at http://en.wikipedia.org/wiki/Lessig; Levy, “Lawrence Lessig’s Supreme Showdown.” }~ +={Lessig, Lawrence:background of+4} + + +Lessig initially thought he would follow in his father’s footsteps, and so he went to the University of Pennsylvania to earn degrees in economics and management. Later, studying philosophy at Trinity College in Cambridge, England, he faced growing doubts about his deep-seated libertarian worldview. Hitchhiking through Eastern Bloc countries, Lessig gained a new appreciation for the role of law in guaranteeing freedom and making power accountable. “There were many times when people in Eastern Europe would tell me stories about the history of the United States that I had never been taught: things like the history of how we treated Native Americans; and the history of our intervention in South America; and the nature of our intervention in South East Asia,” Lessig told Richard Poynder in 2006. “All of those were stories that we didn’t tell ourselves in the most accurate and vivid forms.” These experiences, said Lessig, “opened up a channel of skepticism in my head.”~{ Poynder interview with Lessig, April 7, 2006. }~ + +Lessig’s sister Leslie once told a reporter that Larry came back from Cambridge a very different person: “His views of politics, religion, and his career had totally flipped.”~{ Levy, “Lawrence Lessig’s Supreme Showdown.” }~ No longer aspiring to be a businessman or a philosopher, Lessig set his sights on law and entered the University of Chicago Law School in 1986. He transferred the next year to Yale Law School (to be near a girlfriend), groomed himself to be a constitutional law scholar, and graduated in 1989. + +Although he now considered himself a liberal, Lessig spent the next two years in the service of two of the law’s most formidable conservatives. He clerked for circuit court judge Richard Posner in 1988–89, followed by a year clerking for Supreme Court justice Antonin Scalia during the 1990–91 term. His educational odyssey complete, the thirty-year-old Lessig settled into the life of a tenured law professor at the University of Chicago Law School. +={Posner, Richard;Scalia, Antonin} + +One of Lessig’s early scholarly concerns — adjudication — was not exactly a warm-up for tub-thumping activism. But it did curiously prefigure his later interest in using law as a tool to effect political change. In a 1993 law review article, Lessig wondered how courts should interpret the law when public sentiment and practice have changed. If a judge is going to be true to the original meaning of a law, Lessig argued, he must make a conscientious “translation” of the law by taking account of the contemporary context. A new translation of the law is entirely justified, and should supplant an old interpretation, Lessig argued, if prevailing social practices and understandings have changed, The important thing in interpreting law, therefore, is “fidelity in translation.”~{ Lawrence Lessig, “Fidelity in Translation,” /{Texas Law Review}/ 71, no. 1165 (May 1993). }~ +={law:social change, and+8;Lessig, Lawrence: law in contemporary context, and+8} + +Lessig elaborated on this theme in a 1997 article that spent twenty-seven dense pages pondering how two different Supreme Courts, separated by nearly a century, could look to identical words in the Constitution and reach precisely opposite conclusions.~[* The Erie ruling held that federal common law, previously recognized by the U.S. Supreme Court in 1842, was unconstitutional.]~ It is not as if one Court or the other was unprincipled or wrong, Lessig wrote. Rather, any court must take account of contemporary social norms and circumstances in “translating” an old law for new times. Lessig called this dynamic the "/{Erie}/-effect," a reference to the U.S. Supreme Court’s 1938 ruling in /{Erie Railroad Co. v. Tompkins}/. The /{Erie}/-effect is about the emergence of “a kind of contestability about a practice within a legal institution,” which prompts “a restructuring of that practice to avoid the rhetorical costs of that contestability.”~{ Lawrence Lessig, “Erie-Effects of Volume 110: An Essay on Context in Interpretive Theory,” /{Harvard Law Review}/ 110, no. 1785 (1997). }~ +={Erie Railroad Co. v. Tompkins+1} + +Lessig described how an /{Erie}/-effect might be exploited to catalyze a political shift (paraphrased here): /{identify}/ a socially contested law, aim to /{force}/ the conflicting social practice into the foreground by /{inflaming}/ conventional discourse, and then /{argue}/ for a change in legal interpretation in order to relieve the contestability that has been alleged.~{ Ibid., p. 1809. }~ If the conflict between the law and actual social practice can be made vivid enough, a court will feel pressure to reinterpret the law. Or the court will defer to the legislature because the very contestability of the law makes the issue a political question that is inappropriate for a court to resolve. One notable instance of the /{Erie}/-effect in our times, Lessig pointed out, was the successful campaign by feminist law scholar Catherine MacKinnon to define sexual harassment in the workplace as a form of illegal discrimination. The point was to transform popular understanding of the issue and then embody it in law. +={MacKinnon, Catherine} + +Lessig was not especially focused on tech issues until he ran across Julian Dibbell’s article “A Rape in Cyberspace,” which appeared in the /{Village Voice}/ in December 1993.~{ Julian Dibbell, “A Rape in Cyberspace: How an Evil Clown, a Haitian Trickster Spirit, Two Wizards, and a Cast of Dozens Turns a Database into a Society,” /{Village Voice}/, December 21, 1993, pp. 36–42, reprinted in Mark Stefik, /{Internet Dreams: Archetypes, Myths, and Metaphors}/ (Cambridge, MA: MIT Press, 1997), pp. 293–315, Dibbell quote at p. 296. }~ The piece described the social havoc that ensued in an online space, LambdaMOO, hosted at Xerox Palo Alto Research Center. One pseudonymous character “raped” another in the virtual space, using cruel words and graphic manipulations. The incident provoked an uproar among the thousand members of LambdaMOO, and had real emotional and social consequences. Yet, as Dibbell pointed out, “No bodies touched. Whatever physical interaction occurred consisted of a mingling of electronic signals sent from sites spread out between New York City and Sydney, Australia.” +={Dibbell, Julian:"A Rape in Cyberspace"+1;law:cyberspace, of+3} + +For Lessig, the LambdaMOO “rape” had an obvious resonance with Catherine MacKinnon’s arguments in her 1993 book /{Only Words}/. Does a rape in cyberspace resemble the harms inflicted on real women through pornography? Lessig saw intriguing parallels: “I really saw cyberspace as a fantastic opportunity to get people to think about things without recognizing the political valences. That’s all I was interested in; it was purely pedagogical.”~{ Interview with Lawrence Lessig, March 20, 2006. }~ +={MacKinnon, Catherine} + +To explore the issues further, Lessig developed one of the first courses on the law of cyberspace. He taught it in the spring semester of 1995 at Yale Law School, where he was a visiting professor, and later at the University of Chicago and Harvard law schools. During the Yale class, an exchange with a student, Andrew Shapiro, jarred his thinking in a new direction: “I was constantly thinking about the way that changing suppositions of constitutional eras had to be accounted for in the interpretation of the Constitution across time. Andrew made this point about how there’s an equivalent in the technical infrastructure [of the Internet] that you have to think about. And then I began to think about how there were norms and law and infrastructure — and then I eventually added markets into this — which combine to frame what policymaking is in any particular context.”~{ Ibid. }~ +={Shapiro, Andrew} + +This line of analysis became a central theme of Lessig’s startling first book, /{Code and Other Laws of Cyberspace}/, published in 1999.~{ Lessig, /{Code and Other Laws of Cyberspace}/ (New York: Basic Books, 1999). }~ /{Code}/ took on widespread assumptions that the Internet would usher in a new libertarian, free-market utopia. Cyberlibertarian futurists such as Alvin Toffler, Esther Dyson, George Gilder, and John Gilmore had routinely invoked cyberspace as a revolutionary force that would render government, regulation, and social welfare programs obsolete and unleash the transformative power of free markets.~{ Esther Dyson, George Gilder, George Keyworth, and Alvin Toffler, “Cyberspace and the American Dream: A Magna Carta for the Knowledge Age,” Progress and Freedom Foundation, August 1994, available at http://www.pff .org/issues-pubs/futureinsights/fil.2magnacarta.html. }~ In the libertarian scenario, individual freedom can flourish only if government gets the hell out of the way and lets individuals create, consume, and interact as they see fit, without any paternalistic or tyrannical constraints. Prosperity can prevail and scarcity disappear only if meddling bureaucrats and politicians leave the citizens of the Internet to their own devices. As Louis Rossetto, the founder and publisher of /{Wired}/, bluntly put it: “The idea that we need to worry about anyone being ‘left out’ is entirely atavistic to me, a product of that old economics of scarcity and the 19th century social thinking that grew out of it.”~{ David Hudson, interview with Louis Rossetto, “What Kind of Libertarian,” /{Rewired}/ (Macmillan, 1997), p. 255. }~ +={code:law, as+4;law:code as+4;Lessig, Lawrence:Code and Other Laws of Cyberspace+4;Dyson, Esther;Gilder, George;Gilmore, John;Rossetto, Louis;Toffler, Alvin;Internet:architecture of+2|freedom of+1;cyberspace: economic effects of} + +Lessig was more wary. In /{Code}/, he constructed a sweeping theoretical framework to show how freedom on the Internet must be actively, deliberately constructed; it won’t simply happen on its own. Inspired by conversations with computer programmer Mitch Kapor, who declared that “architecture is politics” in 1991, Lessig’s book showed how software code was supplanting the regulatory powers previously enjoyed by sovereign nation-states and governments. The design of the Internet and software applications was becoming more influential than conventional sources of policymaking — Congress, the courts, federal agencies. /{Code is law}/, as Lessig famously put it. +={Kapor, Mitch} + +What was worrisome, Lessig warned, was how relatively small changes in software code could alter the “architecture of control” governing the Internet. The current architecture was not necessarily stable and secure, in other words. Moreover, any future changes were likely to be animated by private, commercial forces and not publicly accountable and democratic ones. Lessig illustrated this point with a disarmingly simple drawing of a dot representing an individual, whose range of behaviors is affected by four distinct forces: software architecture, the market, law, and social norms. Each of these factors conspires to regulate behaviors on the Internet, Lessig argued — and commercial forces would clearly have the upper hand. + +/{Code}/ was a powerful and sobering rebuttal to libertarian assumptions that “keeping government out” would safeguard individual freedom. Its analysis quickly became the default conceptual model for talking about governance on the Internet. It helped situate many existing policy debates — Internet censorship, digital privacy, copyright disputes — in a larger political and policy framework. Although many readers did not share Lessig’s pessimism, /{Code}/ helped expose an unsettling truth — that a great many legislators, federal agencies, and courts were largely oblivious to the regulatory power of software code. They didn’t have a clue about the technical structures or social dynamics affecting life on the Internet, let alone how existing law would comport with this alien domain. +={Internet:freedom of} + +/{Code}/ was widely praised and widely read. But it was only one project of that period that catapulted Lessig to international prominence. In the mid-1990s, Charles Nesson, a bold-thinking, highflying evidence professor at Harvard Law School, was organizing the Berkman Center for Internet & Society. The new project aspired to study “the most difficult and fundamental problems of the digital age,” and show public-interest leadership in addressing them. Nesson, who had become modestly famous for his role in the W. R. Grace litigation chronicled in Jonathan Harr’s /{A Civil Action}/, recruited Lessig to be the Berkman Center’s marquee star in 1997. It was an irresistibly prestigious and visible perch. +={Berkman Center for Internet & Society;Nesson, Charles} + +This was demonstrated within months, when Judge Penfield Jackson tapped Lessig to be a “special master” in one of the most important antitrust cases in a generation, /{U.S. v. Microsoft}/.~{ Steven Levy, “The Great Liberator,” /{Wired}/, October 2002, and Poynder interview with Lessig, April 7, 2006. }~ Lessig’s assignment was to sift through the welter of technical claims and counterclaims in the case and produce a report with recommendations to the court. The government alleged that Microsoft had abused its monopoly power in its sales of its operating system and Web browser, particularly in “bundling” the browser with the Windows operating system. +={Jackson, Penfield;Microsoft:antitrust lawsuit against+1;Lessig, Lawrence:Microsoft lawsuit, and+1} + +Microsoft soon raised questions about Lessig’s neutrality as a special master. Among other objections, the company cited his book’s claim that software code is political and a passage that said Microsoft was “absolutely closed” compared to an open-standards body. It also dredged up an e-mail in which Lessig facetiously equated using Micosoft’s Internet Explorer with “selling one’s soul.” After nearly eight weeks on the job, the Court of Appeals, citing a technicality, took Lessig off the case, to his enduring disappointment. He has been deeply frustrated by the implication that he had been removed for bias (the court made no such finding) and by his abrupt banishment from a plum role in a landmark case. +={Lessig, Lawrence:Code and Other Laws of Cyberspace} + +2~ Waging the /{Eldred}/ Case + +Back at the Berkman Center, however, there were plenty of opportunities to influence the digital future. The center was a hothouse of venturesome ideas and eccentric visionaries. It was a place where John Perry Barlow could drop by to talk with Lessig and Berkman co-founder Jonathan Zittrain, one of the early cyberlaw experts. The center drew upon the ideas of intellectual property guru William (Terry) Fisher; Charles Nesson, who specialized in launching Big Ideas; and a self-renewing batch of bright law students eager to make their mark on a hip and emerging field of law. Richard Stallman at nearby MIT was an occasional visitor, as was MIT computer scientist Hal Abelson, who combined deep technical expertise with an appreciation of the social and democratic implications of digital technologies. It was during this time, in 1998, that Lessig and Abelson jointly taught The Law of Cyberspace: Social Protocols at Harvard Law School. The class was an attempt to make sense of some novel legal quandaries exploding on the Internet, such as computer crime, identity authentication, digital privacy, and intellectual property. +={Berkman Center for Internet & Society+5;Nesson, Charles;Abelson, Hal:cyberlaw, and;Barlow, John Perry;Fisher, William (Terry);Stallman, Richard;Zittrain, Jonathan;Internet:legal quandaries arising on;law:cyberspace, of} + +While nourished by the work of his academic colleagues, Lessig was determined to come up with ingenious ways to /{do something}/ about the distressing drift of copyright law. It was important to take the offensive. Notwithstanding the pessimism of /{Code}/, Lessig’s decidedly optimistic answer was to gin up a constitutional challenge to copyright law. Many legal experts and even sympathetic colleagues were skeptical. Peter Jaszi, a leading intellectual law professor at American University, told a reporter at the time, “It’s not so much that we thought it was a terrible idea but that it was just unprecedented. Congress has been extending copyright for 180 years, and this is the first time someone said it violated the Constitution.”~{ David Streitfeld, “The Cultural Anarchist vs. the Hollywood Police State,” /{Los Angeles Times Magazine}/, September 22, 2002, p. 32. }~ Others worried that an adverse ruling could set back the larger cause of copyright reform. +={Jaszi, Peter;Lessig, Lawrence:Code and Other Laws of Cyberspace;law:social change, and+3;copyright law:expansion of} + +In the spirit of the commons, Lessig and his Berkman Center colleagues decided that the very process for mounting the /{Eldred}/ lawsuit would be different: “Rather than the secret battles of lawyers going to war, we will argue this case in the open. This is a case about the commons; we will litigate it in the commons. Our arguments and strategy will be developed online, in a space called ‘openlaw.org.’ Key briefs will be drafted online, with participants given the opportunity to criticize the briefs and suggest other arguments. . . . Building on the model of open source software, we are working from the hypothesis that an open development process best harnesses the distributed resources of the Internet community. By using the Internet, we hope to enable the public interest to speak as loudly as the interests of corporations.”~{ Lawrence Lessig, “Commons Law,” June 24, 1999, posted on www.intellectu alcapital.com/issues/issue251/item5505.asp, and Open Law archive at http:// cyber.law.harvard.edu/openlaw. }~ +={Eldred v. Reno/Eldred v. Ashcroft+28;Lessig, Lawrence:Eldred v. Reno, and+28|law in contemporary context, and+1} + +Emulating the open-source development model was a nice touch, and perhaps useful; dozens of people around the world registered at the Openlaw site and posted suggestions. Some of the examples and legal critiques were used in developing the case, and the model was later used by lawyers in the so-called DeCSS case, in which a hacker broke the encryption of a DVD. But it turns out that open, distributed creativity has its limits in the baroque dance of litigation; it can’t work when secrecy and confidentiality are important, for example. + +The case, /{Eldred v. Reno}/ — later renamed /{Eldred v. Ashcroft}/ when the Bush II administration took office — was filed in federal district court in Washington, D.C., on January 11, 1999.~{ /{Eldred v. Reno}/ (later, Eldred v. Ashcroft), 537 U.S. 186 (2003), affirming 239 F. 3d 372. }~ The complaint argued that the Copyright Term Extension Act violated Article 1, section 8, clause 8, of the Constitution, which provides that copyright protection shall be of limited duration. It also argued that the Term Extension Act violated the free speech clause of the First Amendment. In some respects, the case could never have been waged without the foundation of legal scholarship produced in the 1990s, which rehearsed a great many of the arguments presented to the Court. In opposition were motion picture studios, the music industry, and book publishers. They argued that Congress had full authority under the Constitution to extend copyright terms, as it had done since the beginning of the republic. +={copyright law:expansion of;Copyright Term Extension Act;Copyright Clause, U.S. Constitution} + +In October 1999, the U.S. District Court brusquely dismissed the case without even holding a trial. Lessig and his Berkman colleagues were not entirely surprised, and quickly set about filing an appeal with the U.S. Court of Appeals for the District of Columbia Circuit. Going beyond the Openlaw experiment at Berkman, they enlisted the support of several lawyers at Jones, Day, Reavis & Pogue. On appeal, Lessig was allowed to argue the case personally to a panel of judges. But once again, in February 2001, the case was dismissed. Lessig considered it a significant victory that it was a 2-1 ruling, however, which meant that a further appeal was possible. Lessig was also encouraged that the dissenter had been the court’s most conservative member, Judge David Sentelle. Lessig requested that the full circuit court hear the case — a petition that was also rejected, this time after picking up support from a liberal dissenter, Judge David Tatel. +={Jones, Day, Reavis & Pogue+4;Sentelle, David;Tatel, David} + +Normally, this would have been the end of the road for a case. Very few appeals court cases are accepted for review by the U.S. Supreme Court, particularly when the case has not even been argued at trial and no other courts have passed judgment on the statute. So it was quite surprising when the Supreme Court, in February 2002, accepted /{Eldred}/ for review and scheduled oral arguments for October 2002. +={Eldred v. Reno/Eldred v. Ashcroft:Supreme Court, and+11} + +At this point, Lessig realized he needed the advice and support of some experienced Supreme Court litigators. He enlisted help from additional lawyers at Jones, Day; Alan Morrison of Public Citizen Litigation Group; Kathleen Sullivan, the dean of Stanford Law School; and Charles Fried, a former solicitor general under President Reagan. Professor Peter Jaszi and the students of his law clinic drafted an amicus brief. +={orrison, Alan;Fried, Charles;Jaszi, Peter;Sullivan, Kathleen} + +A key concern was how to frame the arguments. Attorney Don Ayer of Jones, Day repeatedly urged Lessig to stress the dramatic harm that the Bono Act was inflicting on free speech and free culture. But as Lessig later confessed, “I hate this view of the law. . . . I was not persuaded that we had to sell our case like soap.”~{ Lessig, “How I Lost the Big One,” /{Legal Affairs}/, March/April 2004, available at http://www.legalaffairs.org/issues/March-April-2004/story_lessig_marap r04.msp. }~ Lessig was convinced that the only way /{Eldred}/ could prevail at the Supreme Court would be to win over the conservative justices with a matter of principle. To Lessig, the harm was obvious; what needed emphasis was how the Sonny Bono Act violated “originalist” principles of jurisprudence. (Originalist judges claim to interpret the Constitution based on its “original” meanings in 1791, which includes a belief that Congress has strictly enumerated powers, not broad legislative discretion.) +={Ayer, Don;law:originalist principles of+2} + +“We tried to make an argument that if you were an originalist— in the way these conservative judges said they were in many other cases — then you should look to the original values in the Copyright Clause,” said Lessig. “And we argued that if you did that then you had to conclude that Congress had wildly overstepped its constitutional authority, and so the law should be struck down.”~{ Lessig interview with Richard Poynder, April 7, 2006, p. 25. }~ Flaunting the harm caused by the copyright term extension struck Lessig as showy and gratuitous; he considered the harm more or less selfevident. In the aftermath of a public debate that Lessig once had with Jack Valenti, a questioner on Slashdot, a hacker Web site, suggested that Lessig would be more persuasive if he asserted “a clear conception of direct harm . . . than the secondary harm of the copyright holders getting a really sweet deal.” Lessig conceded that such a focus “has been a weakness of mine for a long time. In my way of looking at the world, the point is a matter of principle, not pragmatics. . . . There are many others who are better at this pragmatism stuff. To me, it just feels insulting.”~{ “Lawrence Lessig Answers Your Questions,” Slashdot.org, December 21, 2001, Question 1, “The question of harm,” posted by “caduguid,” with Lessig response, available at http://interviews.slashdot.org/article.pl?sid=01/12/21/155221. }~ +={copyright law:expansion of;Copyright Clause, U.S. Constitution;Valenti, Jack} + +And so, despite warnings to the contrary, Lessig’s legal strategy relied on a call to uphold originalist principles. Having clerked for Justice Scalia and Judge Posner, Lessig felt that he understood the mind-set and sympathies of the conservative jurists. “If we get to the Supreme Court,” Lessig told Slashdot readers in December 2001, “I am certain that we will win. This is not a left/right issue. The conservatives on the Court will look at the framers’ Constitution— which requires that copyrights be granted for ‘limited times’ — and see that the current practice of Congress . . . makes a mockery of the framers’ plan. And the liberals will look at the effect of these never-ending copyrights on free speech, and conclude that Congress is not justified in this regulation of speech. The Supreme Court doesn’t give a hoot about Hollywood; they will follow the law.”~{ Lessig response to question 11, Slashdot.org, “Will the extension of copyright continue?” posed by “Artifice_Eternity,” available at http://interviews.slash dot.org/article.pl?sid=01/12/21/155221. }~ +={Posner, Richard;Scalia, Antonin;Copyright Clause, U.S. Constitution;copyright law:expansion of+5;Copyright Term Extension Act+5} + +Lessig took pride in the fact that thirty-eight amicus briefs were filed on behalf of /{Eldred}/. They included a wide range of authors, computer and consumer electronics companies, and organizations devoted to arts, culture, education, and journalism. Besides the usual suspects like the Free Software Foundation, Electronic Frontier Foundation, and Public Knowledge, supporting briefs were filed by fifteen economists including Kenneth Arrow and Milton Friedman, Phyllis Schlafly of the Eagle Forum, and the Intel Corporation. + +At oral arguments, Lessig immediately confronted a skeptical bench. Justice Sandra Day O’Connor worried about overturning years of previous copyright term extensions. Justice William Rehnquist proposed. “You want the right to copy verbatim other people’s books, don’t you?” And when Justice Anthony Kennedy invited Lessig to expound upon the great harm that the law was inflicting on free speech and culture, Lessig declined the opportunity. He instead restated his core constitutional argument, that copyright terms cannot be perpetual. “This was a correct answer, but it wasn’t the right answer,” Lessig later confessed in a candid postmortem of the case. “The right answer was to say that there was an obvious and profound harm. Any number of briefs had been written about it. Kennedy wanted to hear it. And here was where Don Ayer’s advice should have mattered. This was a softball; my answer was a swing and a miss.”~{ See http://www.supremecourtus.gov/oral_arguments/argument_transcripts/ 01-618.pdf. See also Lessig, “How I Lost the Big One,” and Linda Greenhouse, “Justices Hear Arguments in Challenge to Copyrights,” /{New York Times}/, October 10, 2002. A number of Supreme Court opinions in the /{Eldred}/ case can be found at the Openlaw archive at http://cyber.law.harvard.edu/openlaw/ eldredvreno. The /{Loyola Los Angeles Law Review}/ held a symposium on /{Eldred v. Ashcroft}/, available at http://llr.lls.edu/volumes/v36-issue1. }~ No justices spoke in defense of the Sonny Bono Act. +={Ayer, Don;Kennedy, Anthony;O’Connor, Sandra Day;Rehnquist, William} + +Yet they had clear reservations about the Supreme Court’s authority to dictate the length of copyright terms. + +A few months later, on January 15, 2003, the Supreme Court announced its ruling: a 7-2 defeat for Eldred. The majority opinion, written by Justice Ruth Bader Ginsburg, did not even raise the “enumerated powers” argument or engage with originalist philosophy. “We are not at liberty to second-guess Congressional determinations and policy judgments of this order, however debatable or arguably unwise they may be,” Ginsburg wrote.~{ 537 U.S. 186 (1993). See also “Court Majority Says It Won’t Second-Guess Congress,” /{New York Times}/, January 16, 2007, p. A22. }~ She likewise ignored the idea that there is a “copyright bargain” between the American people and copyright holders, which entitles the public to certain rights of access to the public domain. As for copyright’s impact on free speech, Ginsburg invoked the fair use doctrine and the “idea/expression dichotomy” (the notion that ideas are freely available but expression can be copyrighted) as sufficient protections for the public. She ignored the fact that both doctrines were (and are) under fierce assault. +={Ginsburg, Ruth Bader;law:originalist principles of;fair use doctrine:copyright law, and;copyright law:fair use doctrine, and|balance of public and private rights} + +Justices Stephen Breyer and John Paul Stevens accepted Lessig’s arguments, and wrote separate dissents. Breyer — a respected scholar of copyright law since his famous 1970 essay “The Uneasy Case for Copyright”~{ Stephen Breyer, “The Uneasy Case for Copyright,” /{Harvard Law Review}/ 84, no. 281 (1970). }~ —agreed that copyright terms had effectively become perpetual, and that the law was therefore unconstitutional. Stevens complained that the majority decision reneged on the copyright bargain and made copyright law “for all intents and purposes judicially unreviewable.” +={Breyer, Stephen;Stevens, John Paul} + +In assessing the broad impact of the /{Eldred}/ ruling, copyright scholar Siva Vaidhyanathan cited law professor Shubha Ghosh’s observation that the /{Eldred}/ ruling had effectively “deconstitutionalized” copyright law. /{Eldred}/ pushed copyright law +={Ghosh, Shubha;Vaidhyanathan, Siva+1} + +_1 farther into the realm of policy and power battles and away from principles that have anchored the system for two centuries. That means public interest advocates and activists must take their battles to the public sphere and the halls of Congress. We can’t appeal to the Founders’ wishes or republican ideals. We will have to make pragmatic arguments in clear language about the effects of excessive copyright on research, teaching, art and journalism. And we will have to make naked mass power arguments with echoes of “we want our MP3” and “it takes an industry of billions to hold us back.”~{ Siva Vaidhyanathan, “After the Copyright Smackdown: What Next?” /{Salon}/, January 17, 2003, at http://www.salon.com/tech/feature/2003/01/17/copy right.print.html. }~ +={copyright law:balance of public and private rights} + +2~ A Movement Is Born +={Eldred v. Reno/Eldred v. Ashcroft:effects of+12} + +The /{Eldred}/ case had a paradoxical effect. Early on, Lessig had said, “We didn’t want to make it a big political cause. We just wanted to make it an extension of the existing Supreme Court jurisprudence, because we realized that the only way to win the case was to win the conservatives’ view, and the conservatives were not likely to be motivated by great attacks on media concentration.”~{ Interview with Poynder, April 7, 2006, p. 25. }~ The upshot of the Court’s ruling was to intensify the political battles over copyright law. While such resistance was already growing, the /{Eldred}/ ruling and the publicity surrounding it spawned a new generation of “copyfighters.” Lessig had wanted to protect the commons through law, only to find that the courts were unwilling to offer any help. Any answers would now have to be pursued through politics, culture, and technology — and ingenious uses of law where feasible. How to proceed in this uncharted territory became the next challenge, as we see in chapter 4. + +After four years of relentless work, Lessig was frustrated and dejected. “I had failed to convince [the Supreme Court] that the issue was important,” he wrote in a frank confessional, “and I had failed to recognize that however much I might hate a system in which the court gets to pick the constitutional values that it will respect, that is the system we have.”~{ Lessig, “How I Lost the Big One.” See also Lessig, /{Free Culture}/ (New York: Penguin, 2004), pp. 228–48. }~ For a constitutional law scholar, it was a rude awakening: constitutional originalists could not be taken at their word! Scalia and fellow justice Clarence Thomas had declined to stand behind their jurisprudential principles. +={law:originalist principles of+1;Scalia, Antonin;Thomas, Clarence} + +Yet Lessig had certainly been correct that /{Eldred}/ would not succeed unless it convinced the Court’s conservative majority. The fact that the originalist gambit failed was perhaps the strongest message of all: /{nothing}/ would convince this Court to rein in the excesses of copyright law. + +Even before the Supreme Court had delivered its ruling, Lessig admitted his misgivings about the power of law to solve copyright’s failings: “The more I’m in this battle, the less I believe that constitutional law on its own can solve the problem. If Americans can’t see the value of freedom without the help of lawyers, then we don’t deserve freedom.”~{ Lessig response to Question 11, “Cyberspace Amendment,” posed by “kzinti,” in Slashdot, available at http://interviews.slashdot.org/article.pl?sid=01/12/ 21/155221. }~ Yet mobilizing freedom-loving Americans to seek redress from Congress was also likely to be doomed. Hollywood film studios and record companies had showered some $16.6 million and $1.8 million, respectively, on federal candidates and parties in 1998. Legislators know who butters their bread, and the public was not an organized influence on this issue. No wonder a progressive copyright reform agenda was going nowhere. +={Copyright Term Extension Act+1;Eldred v. Reno/Eldred v. Ashcroft:Supreme Court, and;law:limited power of;copyright law:expansion of+1} + +Four years after the /{Eldred}/ ruling, Lessig had some second thoughts about the “Mickey Mouse” messaging strategy. Opponents of the copyright term extension, including Lessig, had often flaunted Mickey motifs in their dealings with the press and railed at the “Mickey Mouse Protection Act.” Yet in 2006, Lessig lamented to one interviewer that “the case got framed as one about Mickey Mouse. Whereas the reality is, who gives a damn about Mickey Mouse? The really destructive feature of the Sonny Bono law is the way it locks up culture that has no continuing commercial value at all. It orphaned culture. So by focusing on Mickey Mouse, the Court thought this was an issue of whether you believed in property or not. If, however, we had focused people on all the culture that is being lost because it is locked up by copyright, we might have succeeded.”~{ Interview with Poynder, April 7, 2006, pp. 26–27. }~ + +The lasting impact of the /{Eldred}/ case, ironically, may have less to do with the law than with the cultural movement it engendered. The lawsuit provided a powerful platform for educating the American people about copyright law. A subject long regarded as arcane and complicated was now the subject of prominent articles in the /{New York Times}/, /{Salon}/, computer magazines, wire services, and countless other publications and Web sites. A cover story for the /{Los Angeles Times}/'s Sunday magazine explained how the case could “change the way Hollywood makes money — and the way we experience art.” /{Wired}/ magazine headlined its profile of Lessig “The Great Liberator.” Lessig himself barnstormed the country giving dozens of presentations to librarians, technologists, computer programmers, filmmakers, college students, and many others. Even Lessig’s adversary at the district court level, Arthur R. Miller, a Harvard Law School professor, agreed, “The case has sparked a public discussion that wasn’t happening before.” +={Miller, Arthur R.} + +Lessig’s orations often provoked the fervor of a revival meeting — and led to more than a few conversions. This may appear surprising because Lessig, with his receding hairline and wireframe glasses, strikes an unprepossessing pose. In the professorial tradition, he can sometimes be didactic and patronizing. But on the stage, Lessig is stylish, poised, and mesmerizing. His carefully crafted talks are intellectual but entertaining, sophisticated but plainspoken— and always simmering with moral passion. He typically uses a customized version of Keynote, a Macintosh-based program similar to PowerPoint, to punctuate his dramatic delivery with witty visuals and quick flashes of words. (Experts in professional presentations have dubbed this style the “Lessig Method,” and likened it to the Takahashi Method in Japan because slides often use a single word, short quote, or photo.)~{ Garr Reynolds’s blog on professional presentation design, “The ‘Lessig Method’ of Presentation,” October 5, 2005, available at http://presentationzen .blogs.com/presentationzen/2005/10/the_lessig_meth.html. }~ + +More than a sidebar, Lessig’s public speaking has been an important aspect of his leadership in building a commons movement. His talks have helped some fairly sequestered constituencies in technical fields — computer programming, library science, Internet policy, copyright law — understand the larger political and cultural significance of their work. The results have sometimes been galvanizing. As one veteran hacker told me in 2006, “There’s a whole connoisseurship of Lessig talks. He’s a little past his peak right now — but there was a period where, like when he gave the lecture at OSCON [a conference of open-source programmers], when he was done, they wanted to start a riot. People were literally milling around, looking for things to smash. He was saying to these people who worked on open source, ‘There’s a larger world context to your work. The government is doing things — and you can stop them!’ ”~{ Interview with Aaron Swartz, October 10, 2006. }~ +={Lessig, Lawrence:public speaker, as} + +Following oral arguments before the Supreme Court, the movement — such as it was — had a rare gathering of its leaders. Public Knowledge co-hosted a luncheon for those who had aided the lawsuit. The diners spanned the worlds of libraries, computers, Internet publishing, public-interest advocacy, and many other fields. The event was held at Washington’s Sewall-Belmont House, where the National Woman’s Party once led the fight for women’s suffrage. This prompted Gigi Sohn, president of Public Knowledge, to declare, “We, too, are building a movement.”~{ Amy Harmon, “Challenge in Copyright Case May Be Just a Beginning,” /{New York Times}/, October 14, 2002. }~ +={Sohn, Gigi} + +So after arguing — and losing — before the U.S. Supreme Court, what does a copyright superstar do for an encore? + +A seed had already been planted at the Starbucks meeting four years earlier. Eldred recalls telling Lessig, “I think this case is very important, and I think you’re the right guy for this. But at the same time, I’d like to talk to you about something else. I really think that we need to start up some sort of a copyright conservancy, which would be sort of like a nature conservancy. It would allow people to donate books to the public domain; we could then take ownership of them. They could maybe have a tax deduction for them, and we could — instead of having the book privately owned — they would be in the public domain, maybe before the copyright term expired. We could sort of have an independent group maintain this conservancy, and allow the books to be put on the Internet for free.” +={copyright conservancy;Eldred, Eric:copyright conservancy, and|Lessig, and+1;Lessig, Lawrence:Eldred, and+1;Eldred, Eric:Lessig, and|public domain, and} + +Eldred remembers that Lessig “was sort of stunned. He didn’t have anything to say for a little while. We sort of looked at each other, and I think he was very shocked and surprised that I said that. And he said, ‘I don’t think we can do it until we’ve done the work on the copyright term extension act suit, but I promise to do it.’”~{ Interview with Eric Eldred, August 1, 2006. }~ + +:B~ PART II + +:C~ The Rise of Free Culture + +1~intro_ii [Intro] -# + +To the commoners seeking to build a new cultural universe, the failure of the /{Eldred}/ case in the U.S. Supreme Court was both depressing and liberating. It confirmed what the legal scholars of the 1990s had long suspected— that both Congress and the courts were captives to a backward-looking vision of copyright law. Government was tacitly committed to a world of centralized and commercial mass media managed by elite gatekeepers. That was not likely to change soon. +={Eldred v. Reno/Eldred v. Ashcroft:effects of:Supreme Court, and} + +As for helping build a new digital republic with a more open, democratic character, the Clinton administration made its intentions clear in its infamous White Paper. It wanted to convert the gift economy of the Internet into a wall-to-wall marketplace. It wanted to give sellers absolute control over content and limit the disruptions of innovative newcomers. The government, acting on behalf of the film, record, and book industries, had no desire to legitimize or fortify the sharing culture that was fast gaining a hold on the Internet. Quite the contrary: strengthening the public’s fair use rights, access to the public domain, and online free speech rights might interfere with the perceived imperatives of electronic commerce. /{Freedom}/ would therefore have to be defined as the freedom of consumers to buy what incumbents were selling, not as a robust civic freedom exercised by a sovereign citizenry. +={Clinton administration:White Paper;fair use doctrine:White Paper vs.;White Paper [on copyright];gift economy;Internet:gift economy of;free culture:Internet, of the} + +By the conclusion of /{Eldred}/, in 2003, it was clear that the copyright dissidents were not just confronting one policy battle or another; they were confronting an antiquated and entrenched worldview. While Lessig, Eldred, and the growing band of commoners realized that it was important to pay close attention to pending legislation and lawsuits, many of them also realized that the real challenge was to develop a new vision — and then try to actualize it. +={Eldred, Eric:copyright law, and} + +A more affirmative, comprehensive vision was needed to supersede the limited intellectual parameters of copyright law.Copyright law was a mode of property discourse, after all, and that discourse simply could not adequately express the aspirations of hackers, citizen-journalists, librarians, academics, artists, democrats, and others trying to secure open online spaces for themselves. The online insurgents acknowledged the great importance of fair use and the public domain, but they also considered such doctrines to be vestiges of an archaic, fraying legal order. It was time to salvage what was valuable from that order, but otherwise instigate a new language, a new aesthetic, a new legal regime, a new worldview. +={copyright law:property rights, and;property rights:copyright law, and} + +This meant venturing into risky, unknown territory. Law professors accustomed to working within the comfort of the academy would have to clamber onto public stages and set forth idealistic, politically inflected scenarios for Internet culture. Activists accustomed to rhetorical critiques would have to initiate pragmatic, results-driven projects. Free software hackers would have to invent new software and digital protocols. Volunteers would need to be enlisted and organized and funding secured to sustain bare-boned organizational structures. Wholly new constituencies would have to be imagined and mobilized and brought together into something resembling a new movement. Part II, The Rise of Free Culture, describes the building of this foundation from 2000 to 2005. + +1~ 4 INVENTING THE CREATIVE COMMONS + +/{A public-spirited cabal schemes for a way to legalize sharing.}/ + +Larry Lessig remembers his Starbucks conversation with Eric Eldred as a “crystallizing moment,” a revelation that the stakes in copyright reform were much higher than he had originally imagined. Both Lessig and Eldred obviously wanted to win the lawsuit and recognized its importance. But Eldred had made clear that he didn’t just want to roll back regressive laws; he wanted to develop an affirmative and sustainable alternative. +={copyright law:property rights, and+1;property rights:copyright law, and+1;Eldred, Eric:copyright law, and+2|Lessig, and+16;Lessig, Lawrence:Eldred, and+16} + +This got Lessig thinking: “So, okay — you get the Supreme Court to strike the laws down, but you still live in a world where people think that everything is property and has to be owned. If nobody has a political awareness about why the judicial response makes sense, then it’s a pretty empty result.”~{ Interview with Lawrence Lessig, March 20, 2006. }~ Throughout the /{Eldred}/ case, paradoxically enough, Lessig says he was “skeptical” of the traditional liberal strategy of seeking redress through the courts. +={Eldred v. Reno/Eldred v. Ashcroft:Supreme Court, and} + + +The turning point for him, Lessig recalled, was in recognizing that Eldred was not just a plaintiff in a test case but “someone trying to build a movement around a practice of making things available in a way that took advantage of the infrastructure of the Net.”~{ Ibid. }~ True, Eldritch Press resembled an old-style archive of canonical works. Yet Eldred’s goal all along had been to host an active social community of book lovers, not just provide a repository for old texts. The Web site’s real importance was in the social activity it represented — the fact that thousands of participant-readers could come together around a self-selected amateur eager to build a new type of social community and information genre. +={Eldritch Press;World Wide Web:social activity on+7} + +Lessig told me that when he recognized Eldred’s Web site as a new type of social practice, it helped define the challenge: “The question became a very technical, legal one: How could we instantiate that movement?” Lessig said he needed to find a way to “disambiguate the social practice.” By that bit of tech-legalese, he meant, How could the practices and values animating Eldred’s Web site be articulated in law, denoted on the Web, and thereby be seen for what they were: a new mode of social practice and cultural freedom? + +It helps to remember that in 1998 and the following years, the legality of sharing online works and downloading them was highly ambiguous. Prevailing legal discourse set forth a rather stark, dualistic world: either a work is copyrighted with “all rights reserved,” or a work is in the public domain, available to anyone without restriction. The mental categories of the time offered no room for a “constituency of the reasonable,” in Lessig’s words. +={copyright law:public domain vs.;public domain:copyright law, and} + +Copyright law made nominal provisions for a middle ground in the form of the fair use doctrine and the public domain. But Lessig realized that fair use was “just a terrible structure on which to build freedom. There are basically no bright lines; everything is a constant debate. Of course, we don’t want to erase or compromise or weaken [these doctrines] in any sense. But it’s very important to build an infrastructure that doesn’t depend upon four years of litigation.” Or as Lessig was wont to put it in his impassioned performances on the stump: “Fuck fair use.”~{ Robert S. Boynton, “Righting Copyright: Fair Use and Digital Environmentalism,” /{Bookforum}/, February/March 2005, available at http://www.robert boynton.com/articleDisplay.php?article_id=1. }~ +={copyright law:fair use doctrine, and+2;fair use doctrine:copyright law, and+2;Lessig, Lawrence:fair use, on+2} + +This was a theatrical flourish, of course. Back in Palo Alto, Lessig in 2001 had launched the Center for Internet & Society at Stanford Law School, which actively takes on lawsuits seeking to vindicate the public’s fair use rights, among other things. One notable case was against Stephen Joyce, the grandson of novelist James Joyce. As executor of the Joyce literary estate, Stephen Joyce steadfastly prevented dozens of scholars from quoting from the great writer’s archive of unpublished letters.~{ See, e.g., D. T. Max, “The Injustice Collector,” /{New Yorker}/, June 19, 2006, pp. 34ff. }~ (After losing a key court ruling in February 2007, the Joyce estate settled the case on terms favorable to a scholar who had been denied access to the Joyce papers.) +={Joyce, Stephen} + +But Lessig’s intemperance toward fair use has more to do with the almost subliminal void in legal discourse and political culture. There was no way to talk about the social behaviors exemplified by Eldred’s Web site except through crabbed, legalistic rules. The only available language, the default vocabulary, is copyright law and its sanctioned zones of freedom, such as fair use. Lessig wanted to open up a new, more bracing line of discourse. “We wanted to rename the social practice,” he said. It sounds embarrassingly grandiose to state it so bluntly, but in later years it became clear to Lessig and his loose confederation of colleagues that the real goal was to /{imagine and build a legal and technical infrastructure of freedom}/. +={Lessig, Lawrence:freedom, and|law in contemporary context, and+2} + +Initially, the goal was more exploratory and improvisational — an earnest attempt to find leverage points for dealing with the intolerable constraints of copyright law. Fortunately, there were instructive precedents, most notably free software, which by 2000, in its opensource guise, was beginning to find champions among corporate IT managers and the business press. Mainstream programmers and corporations started to recognize the virtues of GNU/Linux and opensource software more generally. Moreover, a growing number of people were internalizing the lessons of Code, that the architecture of software and the Internet really does matter. +={free software:open source software, as;GNU/Linux;software:open source;Internet:architecture of+1;Lessig, Lawrence:Code and Other Laws of Cyberspace} + +Even as he sought to prevail in /{Eldred}/, Lessig understood that enduring solutions could not be conferred by the U.S. Supreme Court; they had to be made real through people’s everyday habits. The commoners needed to build a new set of tools to actualize freedom on the Internet, and to develop a new language, a new epistemology, a new vision, for describing the value proposition of sharing and collaboration. The big surprise, as we will see in chapter 6, was the latent social energies poised to support this vision. +={Eldred v. Reno/Eldred v. Ashcroft+7:Supreme Court, and;Internet:freedom of;Lessig, Lawrence:Eldred v. Reno, and|freedom, and} + +2~ What If . . . ? + +Shortly after the /{Eldred}/ case was filed in January 1999, a number of Harvard Law students working with Lessig announced the formation of a new group, “Copyright’s Commons.”~{ The Copyright’s Commons Web site is now defunct but can be found at the Internet Archive’s Wayback Machine, at http://cyber.law.harvard.edu/cc. }~ Led by Jennifer Love and Ashley Morgan, Copyright’s Commons published a monthly Web newsletter that provided updates on the progress of the /{Eldred}/ case and miscellaneous news about the public domain. +={Love, Jennifer;Morgan, Ashley;Copyright’s Commons+3:See also Creative Commons;Creative Commons (CC):Copyright’s Commons, as+3;Eldred v. Reno/Eldred v. Ashcroft:Copyright’s Commons, and+3} + +Copyright’s Commons described itself as “a coalition devoted to promoting the public availability of literature, art, music, and film.” It was actually a named plaintiff in the /{Eldred}/ case. + +That spring, Copyright’s Commons announced a new project that it called the “counter-copyright [cc] campaign.” Billed as “an alternative to the exclusivity of copyright,” the campaign invited the general public to “show your support for the public domain by marking your work with a [cc] and a link to the Copyright’s Commons website. . . . If you place the [cc] icon at the end of your work, you signal to others that you are allowing them to use, modify, edit, adapt and redistribute the work that you created.” +={counter-copyright (cc) campaign} + +The project may have been an imaginative call to arms, but there was no infrastructure behind it except one Web page, and no background material except a Web link to the Open Source Initiative. Wendy Seltzer, a Harvard Law student at the time, recalled that the [cc] symbol produced by Copyright’s Commons “was supposed to be a public domain dedication, but nobody had yet gone through all of the thinking about what was actually required to put something into the public domain, and did this satisfy the ‘affirmative act’ requirements [of the law]? Part of the germ of the Creative Commons was thinking about what would it take to make this — the [cc] symbol — an actual, meaningful, legally binding statement.”~{ Interview with Wendy Seltzer, September 28, 2006. }~ +={Seltzer, Wendy} + +Lessig, in the meantime, was keeping a frenetic schedule. He was overseeing the progress of the /{Eldred}/ lawsuit; traveling to give speeches to dozens of conferences and forums every year; promoting his book Code; and writing a monthly column in the /{Industry Standard}/ until it went under with the tech bubble collapse in 2001. The year before, Kathleen Sullivan of Stanford Law School persuaded Lessig to join its faculty and supervise a new law clinic, the Center for Internet and Society.~{ Ross Hanig, “Luring Lessig to Stanford Law School,” /{Recorder}/, October 17, 2001, at http://www.law.com. }~ Along the way Lessig also got married to Bettina Neuefeind, a human rights lawyer. +={Sullivan, Kathleen;Neuefeind, Bettina;Center for Internet and Society;Lessig, Lawrence:Code and Other Laws of Cyberspace;Lessig, Lawrence:Eldred v. Reno, and+1} + +Work on /{Eldred}/ intensified after the district court dismissed the case in October 1999. Lessig embarked on a new round of legal strategizing with colleagues to prepare the appeals court brief, which was submitted in May 2000. Throughout this period, intellectual property (IP) thinkers and tech activists — especially those in the Lessig/Cambridge/Stanford axis — were highly attuned to the gathering storm in copyright and software policy. + +One of the most tumultuous developments was Napster, a homemade file-sharing software program that had become an international sensation. Released in June 1999, Napster was the creation of hacker Shawn Fanning, then a student at Northeastern University in Boston. Within a year, the free program had been downloaded by an estimated 70 million users, drawing fierce denunciations by the recording industry and Washington officials. Napster used centralized file directories on the Internet to connect users to music files on thousands of individual computers. By enabling people to download virtually any recorded music in existence, for free, it was as if the fabled “cosmic jukebox” had arrived. Of course, much of the copying was blatantly illegal. Yet consumers welcomed Napster as one of the few vehicles they had for thumbing their nose at a reactionary music industry that refused to offer digital downloads. The Recording Industry Association of America (RIAA) sued Napster in December 1999, and succeeded in shutting it down in July 2001.~{ Wikipedia entry, at http://en.wikipedia.org/wiki/Napster. }~ +={Fanning, Shawn;Napster+2;Recording Industry Association of America (RIAA)+1} + +The Napster craze intensified the polarized property discourse that Lessig and his colleagues were trying to transcend. Napster encouraged an either/or debate by suggesting that a song is either private property or contraband; there was no middle ground for fair use or the public domain. While the RIAA and acts like Metallica and Madonna railed against massive copyright infringements, defenders of Napster were quick to point out its promotional power. An album produced by the English rock band Radiohead, for example, was downloaded for free by millions of people before its release — a fact that many credit with pushing the album, Kid A, to the top of the Billboard CD sales chart. But such claims carried little weight against those defending what they considered their property rights. +={Radiohead;property rights:copyright law, and+2;copyright law:property rights, and+2|public domain vs.+1;public domain:copyright law, and+1} + +The controversy over Napster was clearly influential in shaping the debate over how to protect the public domain. Berkman Center co-director Jonathan Zittrain recalls, “If we’re trying to hang the hopes of the community on the right just to copy stuff, we’re going to lose — and maybe we should. [The issue] is actually about the right to manipulate the symbols and talismans of our culture” — what Professor Terry Fisher likes to call “semiotic democracy.”~{ Interview with Jonathan Zittrain, September 28, 2006. }~ +={Zittrain, Jonathan;Fisher, William (Terry);free culture:sharing ethic of+1;copyright law:sharing economy vs.+1;democracy:semiotic} + +The problem was that copyright discourse, at least in the hands of the record and film industries, refused to acknowledge that the sharing and reuse of works might be necessary, desirable, or legal. The concept did not compute. There was a conspicuous void in the prevailing terms of debate. So the challenge facing the Cambridge copyright cabal was really a riddle about epistemology, law, and culture rolled into one. How could a new type of free culture, independent of the market, be brought into existence? And how could the creative works of this imagined culture be made legally “shareable” instead of being automatically treated as private property? + +This was an unprecedented challenge. When culture was chiefly a set of analog media — books, records, film — there had been affirmative legal limits on the scope of copyright. Before 1978, the law regulated only commercial uses of a work and only works that had been formally registered, which meant that most works automatically remained in the public domain. Moreover, there was a natural, physical “friction” preventing copyright holders from over-controlling how a work could circulate and be used. When words were fixed in books and sounds embedded in vinyl, people could circulate those objects freely, without having to ask permission from copyright holders. In the digital world, however, the physical constraints of analog media disappeared. Copyright holders now claimed that every digital blip, however transient, constituted a “copyright event” subject to their unilateral control. In practice, this greatly weakened the rights a person could enjoy under the fair use doctrine. +={copyright law:public domain vs.+3;public domain:copyright law, and+3;fair use doctrine:copyright law, and+1|digital age, in+1;copyright law:digital age, in+1|fair use doctrine, and+1|limits on the scope of} + +In a sense, the entire legal and cultural framework for free culture needed to be reimagined so it could function in the digital environment. The terms of fair use essentially had to be renegotiated — an undertaking that copyright law had never had to tackle in the past. But how might that be achieved when both Congress and the courts were beholden to the copyright maximalists’ worldview? + +Such were the kinds of conversations that swirled around the Berkman Center, Harvard Law School, MIT, and a handful of progressive intellectual property circles. Such discussions had been going on for years, especially in the context of free software and public-domain scholarship, but now they were reaching the lay public. The Napster and /{Eldred}/ cases were vehicles for educating the press and the public, and Lessig’s book /{Code}/ was becoming must reading for anyone who cared about Internet governance and digital culture. +={Berkman Center for Internet & Society;Napster;Lessig, Lawrence:Code and Other Laws of Cyberspace} + +Amid this swirl of copyright controversy, MIT professor Hal Abelson had lunch with Lessig at the Harvard Faculty Club in July 2000. The two had co-taught a class on cyberlaw two years earlier and shared many interests in the confluence of copyright and technology. One topic that day was Eric Eldred’s idea of a copyright conservancy — a “land trust” for public-domain works. On August 1, 2000, Abelson sent Zittrain an e-mail: +={Abelson, Hal:copyright conservancy idea, and+5|cyberlaw, and+5;Eldred, Eric:copyright conservancy, and;Zittrain, Jonathan;Eldred v. Reno/Eldred v. Ashcroft:effects of;copyright conservancy+35} + +_1 /{Here’s an idea that we might be able to get going, and where the Berkman Center could help.}/ + +_1 /{Let’s set up a tax-free, charitable foundation to which artists and record label companies could donate the copyright for recorded music. I’m thinking of all the old music for which there isn’t currently an active market.}/ + +_1 /{The foundation would arrange for this stuff to be loaded for free onto the internet and give the public permission to use it. The artists and record labels get a tax writeoff. The RIAA and Napster hug and kiss, and everyone goes home happy.}/ +={Recording Industry Association of America (RIAA)} + +_1 /{What do you think?}/ + +_1 /{Hal}/ + +Zittrain loved the idea, and suggested that it might make a great clinical project for Harvard Law students that fall. But he wondered if the Copyright Clearinghouse Center — a licensing and permissions organization for music — already offered such a service (it didn’t). Lessig proposed that Stanford and Harvard law schools jointly develop the program. He immediately identified one glaring problem: it would be difficult to “establish a process for valuing gifts of copyrighted stuff that would be clearly understood and would be accepted by the IRS.” +={Zittrain, Jonathan+1} + +What ensued was a lengthy and irregular series of e-mail conversations and social encounters through which the idea was chewed over and refined. Lessig acted as the “supernode” among a small group of participants that initially included Zittrain, Eldred, Nesson, and Diane Cabell, a lawyer and administrator at the Berkman Center. Within a month, others were invited into the conversation: Richard Stallman; Duke Law professors James Boyle and Jerome H. Reichman; and documentary film producer Eric Saltzman, who had just become director of the Berkman Center. +={Berkman Center for Internet & Society;Boyle, James:CC formation, and;Cabell, Diane;Reichman, Jerome H.;Nesson, Charles;Saltzman, Eric;Stallman, Richard:Copyright’s Commons, and;Eldred, Eric:copyright conservancy, and} + +A digital archive for donated and public-domain works had great appeal. Just as land trusts acted as trustees of donated plots of land, so the Copyright’s Commons (as Lessig proposed that it be named) would be a “conservancy” for film, books, music, and other works that were either in the public domain or donated. Six weeks after Abelson’s original suggestion, Lessig produced a “Proposal for an Intellectual Property Conservancy” for discussion purposes.~{ Lawrence Lessig, “Proposal for the Intellectual Property Conservancy,” e-mail to ipcommons group, November 12, 2000. }~ He now called the concept “an IP commons” — “the establishment of an intellectual property conservancy to facilitate the collection and distribution under a GPL-like license of all forms of intellectual property.” As elaborated by two Harvard Law School students, Chris Babbitt and Claire Prestel, “The conservancy will attempt to bridge the gap between authors, corporate copyright holders and public domain advocates by providing a repository of donated works which we believe will create a more perfect ‘market’ for intellectual property.”~{ Chris Babbitt and Claire Prestel, “Memorandum to Michael Carroll, Wilmer Cutler Pickering, ‘IP Conservancy,’ ” October 24, 2000. }~ +={belson, Hal:copyright conservancy idea, and+2;Babbitt, Chris;Prestel, Claire;Copyright’s Commons+27;Creative Commons (CC):Copyright’s Commons, as+27;IP Commons+27;Lessig, Lawrence:Copyright’s Commons, and+27} + +Friendly critiques started arriving immediately. Stallman considered the proposal a “good idea overall,” but as usual he objected to the words, such as “intellectual property” and “copyright protection,” which he considered “propaganda for the other side.”~{ E-mail from Richard Stallman to Lessig, September 11, 2000. See also http:// www.gnu.org/philosophy/words-to-avoid.html. Stallman suggested calling the project the “Copyright and Patent Conservancy.” }~ Abelson, a friend and colleague of Stallman’s at MIT, was not finicky about word choices, but he did believe that software donations should be directed to the Free Software Foundation, not to the envisioned project. FSF already existed, for one thing, but in addition, said Abelson, “It may be detrimental to have people initially associate this [new project] too closely with the FSF. . . . We need to craft a public position that will unify people. An FSF-style ‘let’s undo the effects of all those evil people licensing software’ is not what we want here.”~{ E-mail from Hal Abelson to Lessig, September 12, 2000. }~ Some people suggested attracting people to the conservancy by having “jewels” such as material from the estates of deceased artists. Another suggested hosting special licenses, such as the Open Audio License, a license issued by the Electronic Frontier Foundation in 2001 that lets musicians authorize the copying and reuse of their songs so long as credit is given and derivative songs can be shared. +={Stallman, Richard:Copyright’s Commons, and;Abelson, Hal:Free Software Foundation, and+1;Electronic Frontier Foundation (EFF);Free Software Foundation} + +The most difficult issue, said Abelson, was the economics of the project. The care and maintenance of donations, such as the master version of films, could be potentially huge expenses. Digitizing donated works could also be expensive. Finally, there were questions about the economic incentives to potential donors. Would people really wish to donate works that have significant cash value? + +Answers to such questions were hardly self-evident, but there were encouraging signs. After Lessig gave a speech at the University of Michigan in September 2000, a man came up to him and announced, “I’m one of the people who benefited by the Mickey Mouse Protection Act.” It was Robert Frost, Jr., son of the great poet. Frost said, “I obviously need to check with my family, but we may be interested in becoming a contributor to your conservancy.”~{ E-mail from Lawrence Lessig to ipcommons group, September 8, 2000. }~ If Robert Frost’s estate could come forward with his literary legacy, perhaps there were others willing to do the same. +={Frost, Robert, Jr.;Lessig, Lawrence:public speaker, as;Copyright Term Extension Act} + +When Berkman Center director Eric Saltzman joined the conversation, he raised a series of difficult questions about the whole idea: +={Saltzman, Eric+1} + +_1 Why would a person or corp. donate copyrighted materials? Larry’s draft implies a benefit to the IP owner — does this mean broader Internet facilitated use, and not merely a tax deduction? Under what circumstances, if any, does the Conservancy charge for use of its IP? If a user modifies a story, say, producing a screenplay, to whom does that screenplay belong? Would a motion picture based upon that screenplay owe $$ to the Conservancy? If so, how much (this is the damages phase of the /{Rear Window}/ case)?~{ This case, /{Stewart v. Abend}/, 100 S. Ct. 1750 (1990), required the copyright owners of Alfred Hitchcock’s movie /{Rear Window}/ to pay damages to the author of a book upon which the film was based. Saltzman was concerned that the conservancy would be liable for any illicit derivative works. See Daniel A. Saunders, “Copyright Law’s Broken Rear Window: An Appraisal of Damage and Estimate of Repair,” /{California Law Review}/ 80, no. 1 (January 1992), pp. 179–245. }~ Wouldn’t a new, hopeful band prefer to allow free use of its song(s) on a commercially promoted site like MP3.com rather than the Conservancy site? All asking: How to make the Conservancy into a useful garden, not a well-meaning weed patch of unwanted, neglected IP?~{ E-mail to ipcommons group, September 18, 2000. }~ + +By early October 2001, some of these questions had been provisionally answered. For example: Only digital works would be accepted initially. No limitations or restrictions would be set on the use of donated works. Prospective academic partners would include the University of California at Berkeley, Duke, Harvard, MIT, and Stanford. Lessig suggested both Richard Stallman and Jack Valenti as possible board members. The central goal was to develop a new sort of noncommercial space in cyberspace for the sharing and reuse of music, visual art, film, literature, nonfiction, academic work, software, and science.~{ E-mail from Lawrence Lessig to ipcommons group, November 12, 2000. }~ +={Valenti, Jack;Stallman, Richard:Copyright’s Commons, and} + +But many questions still hung in the air. Could the free software ethic really translate to other creative genres? Would tax incentives elicit donations of works? Would independent appraisals of donated works be needed? How would the conservancy search the titles of works and get permissions clearances? + +For all of its brainpower and commitment, Lessig’s rump caucus might not have gotten far if it had not found a venturesome source of money, the Center for the Public Domain. The center — originally the Red Hat Center — was a foundation created by entrepreneur Robert Young in 2000 following a highly successful initial public offering of Red Hat stock. As the founder of Red Hat, a commercial vendor of GNU/Linux, Young was eager to repay his debt to the fledgling public-domain subculture. He also realized, with the foresight of an Internet entrepreneur, that strengthening the public domain would only enhance his business prospects over the long term. (It has; Young later founded a print-on-demand publishing house, Lulu.com, that benefits from the free circulation of electronic texts, while making money from printing hard copies.) +={Young, Robert;Red Hat;GNU/Linux:Red Hat, and;enter for the Public Domain+2;public domain:Center for Public Domain+2} + +The director of the center, Laurie Racine, a former geneticist and business professor, was skilled at making shrewd strategic grants and “character bets” in public-domain activism. Because the center was not hobbled by the bureaucracy or timidity that afflicts many large foundations, it was able to make swift decisions and bold bets on innovative projects. (I came to work closely with Racine on a number of projects, including the co-founding of Public Knowledge, in 2001.) +={Racine, Laurie+1} + +Lessig met with Racine in October 2000. On a napkin, he sketched his idea for expanding copyright for authors. He came away with funding for a meeting at the Berkman Center and, later, a $100,000 commitment to launch the IP conservancy; the Center for the Public Domain eventually put up $1 million to get the project going, well before other funders saw the promise of the idea. Racine wanted her new center to be associated with “a project that has broad vision, credibility, range and staying power.” She saw Lessig’s project as having all of those things.~{ E-mail from Lawrence Lessig to ipcommons group, October 11, 2000, which contained e-mail from Laurie Racine to Lessig, October 25, 2000. }~ The grant was based more on the concept than a specific plan, however. At the time it was not entirely clear if the project would own and manage digital works, host Web services that made things freely available, or provide legal and software tools — or something else.~{ E-mail from Lawrence Lessig to ipcommons group, November 12, 2000. }~ There was, nonetheless, a great sense of mission and urgency to get under way. + +Interestingly, two similar initiatives were also in the early stages of development. The Knowledge Conservancy, led by David Bearman at Carnegie Mellon University in Pittsburgh, had a similar model of accepting donations of materials and making them available online. It focused more on sponsorship donations and memberships, while Lessig’s group was more oriented toward legal research and Web hosting of works. Another project, OpenCulture.org, planned to compensate artists for contributions to the public domain, but apparently it never took off.~{ http://web.archive.org/web/*/http://Openculture.org. }~ Lessig and his group were not averse to joining forces with others, but they were intent on vetting their own business model, such as it was, before joining anyone else’s venture. +={Bearman, David;OpenCulture.org;Knowledge Conservancy} + +One turning point came in January 2001 after Saltzman had met with several lawyers at Wilmer, Cutler & Pickering, a prominent law firm in Washington, D.C.~{ Contained in e-mail from Christina Ritchie to ipcommons group, December 15, 2000. }~ After conversations with attorneys David Johnson and Michael W. Carroll, it became clear that a nonprofit trust managing donated material could face considerable liability if it turned out that the donors did not actually own the works. To explore this issue, Carroll produced a much-praised legal memo that raised a red flag: “What if we were fools, and the person who gave us the rights [to a work] actually never had the rights and suddenly we get sued for infringement?” asked Carroll.~{ Michael Carroll, “Potential Copyright Liability and DMCA Safe Harbor Relief for Creative Commons,” appendix to “Briefing Book for Creative Commons Inaugural Meeting,” May 7, 2001. }~ One successful lawsuit could sink the whole enterprise. +={Carroll, Michael W.;Johnson, David R.;Wilmer, Cutler & Pickering;Saltzman, Eric+8} + +The project was caught in a conundrum. It wanted to legalize a whole set of social practices for sharing and reusing creative works — but establishing a content intermediary for that purpose appeared to be financially prohibitive under the law. It could be hugely expensive to clear titles and indemnify the organization and future users against copyright infringement risks. +={copyright law:sharing economy vs.+1} + +For a few months, various people in Lessig’s orbit suggested complicated schemes to try to finesse the legal problems. For example, one way that the conservancy could reduce its liability would be to simply point to the Web locations of public-domain materials, in the style of Napster’s centralized index of songs. This would also avoid the nuisance and expense of clearing titles on thousands of works. Another idea was to create a “three zone system” of content — Zone A for content that the conservancy owned and licensed; Zone B for content that was merely hosted at the conservancy site with no copyright representations; and Zone C, a simple search engine with links to public-domain content. Each of these zones, in turn, raised a flurry of complicated, speculative legal issues.~{ E-mail from Eric Saltzman to ipcommons group, January 19, 2001. }~ +={Napster;Copyright’s Commons:three zone system of} + +None of the proposed alternatives got much traction, especially when Saltzman took a closer look at the realities of tax deductions for donors. Saltzman came to see that tax breaks would have very little incentive value for most potential donors, and establishing the cash value of donations would be difficult in any case. Moreover, if donors were getting little in return for their donations, they would be wary of signing a form indemnifying the conservancy against legal liability. On top of all this, Saltzman, like others, had misgiving about “the idea of the federal treasury contributing public money [in the form of tax expenditures].” In short, the conservancy approach seemed plagued with many complicated and perhaps insoluble problems. + +As if to keep the pot boiling, newcomers kept adding new thoughts. Two leading thinkers about the public domain in science, Paul Uhlir and Jerome H. Reichman, urged that the group expand its mission to include scientific research and take an international perspective.~{ E-mail from Paul Uhlir and Jerry Reichman, January 30, 2001. }~ (Uhlir directs the international scientific and technical information programs at the National Academy of Sciences/ National Research Council; Reichman is an intellectual property professor at Duke Law School.) Both were keenly aware of the dangers to scientific progress if copyright and patent protection continued to expand. +={Reichman, Jerome H.;Uhlir, Paul;copyright law:expansion of} + +In January 2001, the caucus reached one point of consensus— that the primary function of this commons should be “to facilitate free/low-cost public use of original works.” It also agreed upon a name. Asked to vote on a name from a list that included IP Commons, Dot-commons, Sui Generous, IP Conservancy, and Public Works, Saltzman piped up, “May I suggest another name? CREATIVE COMMONS.” When the final poll results were counted, Creative Commons was the clear winner with five votes, with one vote apiece for the remaining names. A later poll pitted “The Constitution’s Commons” against “Creative Commons” (CC) in a final runoff. The vote tally is lost to history, but we do know which name prevailed.~{ E-mails from ipcommons listserv to ipcommons group, January 11, 12, 13, 16, 2001. }~ +={Creative Commons (CC):development of+10|function of} + +Viewpoints quickly diverged on how a commons ought to be structured and what metrics of success should be used. Should it seek to maximize the number of donations or the number of downloads? Should it develop quality holdings in a given field or provide the widest possible breadth of content? Should it focus on social interaction and creative reuses of works? Should the focus be on producers or consumers of intellectual property? Should the organization focus on individuals or institutions? And how would it be different from other rights clearance organizations and content archives? The group seemed mired in a great cloud of uncertainty. + +For the next nine months, the group intensified its debate about how to build the envisioned conservancy. After law student Dotan Oliar sketched out possible “business models,” Saltzman persuaded a friend at McKinsey & Company, the consulting firm, to provide a pro bono assessment.~{ Dotan Oliar, “Memo on Creative Commons — Towards Formulating a Business Plan,” March 19, 2001. }~ “The McKinsey folks were very skeptical and, I think, had a hard time fitting this into their [business] framework,” recalled one student at the meeting, Chris Babbitt. After the meeting, he was convinced that Creative Commons could not possibly host a content commons: “It would just be huge amounts of material, huge costs, and we didn’t have the money for that.” ~{ Interview with Chris Babbitt, September 14, 2006. }~ +={Babbitt, Chris+1;McKinsey & Company;Oliar, Dotan} + +Feeling the need to force some concrete decisions, Saltzman and Lessig convened twenty-eight people for an all-day meeting in Hauser Hall at Harvard Law School, on May 11, 2001, to hash out plans. “What we’re trying to do here is /{brand the public domain}/,” Lessig said. A briefing book prepared by Chris Babbitt posed a pivotal question to the group: Should Creative Commons be structured as a centralized Web site or as an distributed, open-source licensing protocol that would allow content to be spread across cyberspace? The centralized model could be “an eBay for opensource IP” or a more niche-based commons for out-of-print books, film, or poetry. A mock Web site was actually prepared to illustrate the scenario. The home page read: “The member sites listed on the CommonExchange have been certified by Creative Commons to offer high-quality, non-infringing content on an unrestricted basis. Please feel free to use and pass these works along to others. We invite you to donate works of your own to help maintain the digital Commons.”~{ The mock-up can be found at http://cyber.law.harvard.edu/creativecom mons/site.htm. }~ +={public domain:branding of} + +The distributed commons model would resemble the Chicago Mercantile Exchange or the New York Stock Exchange — “a trusted matchmaker to facilitate the transaction of securing rights,” according to the briefing book. “Just as corporations or commodities producers must meet certain criteria before they are listed on the Exchange, we could condition ‘listing’ in the Commons on similar criteria, albeit reflecting open source rather than financial values.”~{ “Briefing Book for Creative Commons Inaugural Meeting,” May 7,2001, p.10. }~ The virtue of the distributed model was that it would shift costs, quality control, and digitization to users. Creative Commons would serve mostly as a credentialing service and facilitator. On the other hand, giving up control would be fraught with peril — and what if Creative Commons’ intentions were ignored? + +Several participants remember Lessig, Nesson, and Zittrain pushing for the distributed model, which seemed a bolder and riskier option. “Larry was the lead advocate for a distributed commons, where it would be focused on a license mechanism that we then would release to the world, and we let the world do with it what it will,” one attendee recalled. “At the time, I think, XML-type capabilities were just coming around, and Larry was very confident that that was the direction to go.”~{ Interview with Chris Babbitt, September 14, 2006. }~ XML, or Extensible Markup Language, is a programming language that uses self-created “tags” that help Internet users aggregate and share digital content residing on different computer systems. Lessig envisioned XML tags embedded in any Creative Commons–licensed work, which could then be used to identify shareable content on the Internet. +={Nesson, Charles;Zittrain, Jonathan;XML (Extensible Markup Language);Creative Commons (CC) licenses+3} + +This perspective carried the day, and the “conservancy” model of the commons was formally abandoned. CC would serve as a licensing agent. The licenses would enable authors’ works to be made available online in an easy, low-cost way without the full restrictions of copyright law. A standardized set of licenses would overcome the ambiguities of the fair use doctrine without overriding it. Creators could voluntarily forfeit certain copyright rights in advance— and signal that choice — so that others could freely reuse, share, and distribute CC-licensed works. +={authorship:Creative Commons, and+2} + +Jonathan Zittrain remembers being skeptical at first: “So this whole thing is just about some tags? It’s about /{licensing}/? How boring.” Upon reflection, however, he saw the value of CC licensing as a way to create a new default. “As soon as you realize — ‘Well, wait a minute! It’s just about authors being able to express their desires!’”~{ Interview with Jonathan Zittrain, September 28, 2006. }~ + +More than a menu of individual choices, the licenses would constitute an embryonic cultural order — a “constitutional system” to direct how text, images, and music could circulate in the online world, based on authors’ individual choices. But the new focus on licenses raised its own set of novel quandaries. What options should an author be able to choose? What suite of licenses made sense? While licensing terms may be boring and legalistic, the architecture could have potentially profound implications for cultural freedom — which is why the legal minds involved in the licenses spent so much time arguing seemingly obscure points. +={Creative Commons (CC):function of} + +However these debates were resolved, everyone agreed that it was time to incorporate Creative Commons as a nonprofit group, assemble a board, recruit a chief executive officer, and of course raise more money. The stated goal: “to expand the shrinking public domain, to strengthen the social values of sharing, of openness and of advancing knowledge and individual creativity.”~{ Oren Bracha and Dotan Oliar, “Memo: May 7th Consensus Regarding the Creative Commons Project,” August 20, 2001, p. 1. }~ +={public domain:CC licenses, and+1;Creative Commons (CC) licenses:public domain, and+1} + +There was a certain audacity if not unreality to the whole venture. Law professors don’t go around inventing ambitious public projects to revamp the social applications of copyright law. They don’t generally muck around with software, contract law, and artists to build an imagined “sharing economy.” “There was always this lingering suspicion in the back of my mind,” recalled Babbitt in 2006, “that it [Creative Commons] would be kind of a rich man’s folly, and this would just be some little thing — a niche experiment — that really wouldn’t turn out to have merited the sort of sustained interest of this high-caliber group of people.”~{ Interview with Chris Babbitt, September 14, 2006. }~ +={Babbitt, Chris} + +2~ Crafting the Licenses +={Creative Commons (CC) licenses+14} + +If Creative Commons licenses were going to enable artists to determine future uses of their works — on less restrictive terms than copyright law — what did actual artists think of the whole idea? To get a crude baseline of opinion, Laura Bjorkland, a friend of Lessig’s and manager of a used-book store in Salem, Massachusetts, conducted an unscientific survey. She asked about a dozen writers, photographers, painters, filmmakers, and a sculptor if they would be interested in donating their works to a commons, or using material from one? Most of them replied, “I’ve never even /{thought}/ of this before. . . .”~{ Laura Bjorkland, “Regarding Creative Commons: Report from the Creative Community,” in “Briefing Book for Creative Commons Inaugural Meeting,” May 7, 2001, pp. 16–19. }~ +={Bjorkland, Laura;Creative Commons (CC):function of} + +A classical composer said he “loved the idea of a Nigerian high school chamber group playing one of my string quartets without paying royalties . . . but I would not want a film studio or pop song writer using one of my themes on a commercial project, even if my name’s attached, without royalties.” Some artists worried about others making money off derivatives of their work. Many complained that artists earn so little anyway, why should they start giving away their work? Others were reluctant to see their work altered or used for violence or pornography. Photographers and visual artists found it “a little scary” to let their signature style be used by anyone. + +In short, there was no stampede for starting a public-domain conservancy or a set of licenses. Some worried that the CC licenses would be a “case of innovation where’s there’s no current demand.” Another person pointed out, more hopefully, that it could be a case of “changing the market demand with a new model.”~{ Oren Bracha and Dotan Oliar, “Memo: May 7th Consensus Regarding the Creative Commons Project,” August 20, 2001, p. 3, note 9. }~ + +The Lessig caucus was clearly struggling with how best to engage with the networked environment. Napster had demonstrated that, in the dawning Internet age, creativity would increasingly be born, distributed, and viewed on the Web; print and mass media would be secondary venues. For a society still deeply rooted in print and mass media, this was a difficult concept to grasp. But Michael Carroll, the Washington lawyer who had earlier vetted the conservancy’s liability issues, shrewdly saw network dynamics as a potentially powerful tool for building new types of digital commons. In 2001, he had noticed how a bit of Internet folk art had become an overnight sensation. Mike Collins, an amateur cartoonist from Elmira, New York, had posted the cartoon below on Taterbrains, a Web site.~{ http://politicalhumor.about.com/od/funnypictures/ig/100-Funny-Pictures/ Confusing-Florida-Ballot.htm. }~ The image suddenly rocketed throughout the cyberlandscape. Everyone was copying it and sharing it with friends. +={Carroll, Michael W.+4;Collins, Mike+4;Napster} + +{ vs_db_1.png }http://viralspiral.cc/ + +% group{ +% +% Official Florida Presidential Ballot +% Follow the arrow and Punch the appropriate dot. +% +% Bush Buchanan Gore Nadar +% (c) 2000 Mike Collins, Taterbrains.com +% +% }group + +Carroll observed: + +_1 [Collins] distributed his design initially without a motive to profit from it. But the scale of distribution surpassed what he imagined, and in a subsequent interview he expressed some resentment over those who had made money from T-shirts and other paraphernalia using his design. But he appears to have taken no actions to enforce his copyright, the notice notwithstanding. Copyright lawyers would consider the unlicensed distribution of this work “leakage” — that is, a violation of law but not worth pursuing. + +_1 But if we could take steps to make it cheap, easy and desirable for the Mike Collinses of the world to stick a CC tag on something like this before sending it out, “leakage” becomes legal, changing the terms of the debate.~{ E-mail from Michael Carroll to Molly Van Houweling and Larry Lessig, October 15, 2001. }~ + +CC tags could make nonproprietary culture the default, reversing the presumption of copyright law. Everyone agreed with this general approach, but implementing it was rife with difficult questions. As Saltzman recalled: “What kind of relationship did we want to encourage between the creator/licensor and the user? Should it be totally automated? Should it invite some back-and-forth? Should there be a requirement that licensors provide contact information?”~{ Interview with Eric Saltzman, April 11, 2006. }~ The General Public License for software had shown the feasibility of a license for safeguarding a commons of shared code. Could it work in other creative sectors? It would be critical to strike the right balance. As law student Chris Babbitt put it, “Too little protection for the donor’s interests and no one will donate; too little room for the users to use the work, and the service is useless.”~{ “Briefing Book,” p. 12. }~ +={Babbitt, Chris;Saltzman, Eric;General Public License (GPL):legal enforceability of} + +If there were going to be several licenses, the next question was how many, and of what sort? There are many different types of creativity, after all. Should each one have its own set of special licenses? The Berkman conclave agreed that there should be a publicdomain license enabling creators to grant a nonexclusive, royaltyfree license on their works, without the viral conditions of the GPL. As for other licenses, five ideas were put on the table for consideration: a license authorizing free reuses of a work so long as proper attribution is given to the author; a license authorizing noncommercial uses; and a license authorizing free reuses but prohibiting derivative uses. Other suggestions included a license authorizing academic uses only and a “timed donations” license, which would allow an artist to revoke a work from the commons after a stipulated number of years.~{ Ibid. }~ Neither of these two licenses gained support from the group. + +There were also lots of open questions about how to structure the specific terms of the licenses. Should they be perpetual? Will the licensor be liable for “downstream” uses of a work that are deemed an infringement? Will licensors be required to identify themselves? Should licensors be able to add their own separate warranties and representations? Crafting the licenses meant going beyond the abstract rhetoric of the commons. These licenses had to be serious, operational legal instruments that courts would recognize as valid. + +Another concern was making the new CC licenses compatible with existing licenses seeking similar goals. MIT had produced the first such license for its OpenCourseWare initiative, which allows Internet users to use the university’s curricula and syllabi (see chapter 12). To ensure that CC- and MIT-licensed content would be compatible, the CC lawyers deliberately wrote a license that would meet MIT’s needs. Another license, the GNU Free Documentation License (FDL), was being used on Wikipedia, among other online sites. But the FDL, originally intended for software documentation materials, was incompatible with the CC licenses. Stallman refused to alter the FDL, and Wikpedia was already under way and committed to the FDL. This quirk of history meant that Wikipedia content and CC-licensed content could not legally be combined. As we will see in chapter 9, this was the beginning of a rancorous schism in the free culture world, and the beginning of a heated philosophical/ political debate over which licenses truly promote “freedom.” +={ree Documentation License;GNU Project:GNU FDL;Stallman, Richard:GNU Project, and;Wikipedia:GNU FDL, and} + +As this overview suggests, licensing complexities can quickly soar out of control and become overwhelming. Yet the very point of the Creative Commons licenses was to simplify the sharing and reuse of digital material. CC planners wanted to help ordinary people bypass the layers of mind-numbing legalese that make copyright law so impenetrable and inaccessible. The Creative Commons was all about empowering individuals and avoiding lawyers. A proliferation of licensing choices would only lead to license incompatibilities, a Balkanization of content on the Internet, and more lawyers. Sharing and interoperability go together, as Stallman’s early experiences with his Emacs Commune showed. +={Emacs;Stallman, Richard:Emacs, and} + +Somehow, therefore, the licenses had to thread three needles at once. They needed to align (1) the technical dynamics of the Internet with (2) the legal realities of copyright law and (3) the everyday needs of people. The ingenious solution was to create licenses on three layers: a “lawyer-readable” license that could stand up in court, a “human-readable” license that could be understood by ordinary people, and a “machine-readable” license that could be recognized by search engines and other software on the Internet. Each “layer” expressed the same license terms in a different way — an unexpected twist on Lessig’s concern for “fidelity in translation.” The formal license was called the “Legal Code” (or “legal source code”); the machine-readable translation of the license was called “Digital Code”; and the plain-language summary of the license, with corresponding icons, was the “Commons Deed” (or the “happy deed”). + +2~ Branding the Public Domain in Code +={code:branding the public domain in+8;Creative Commons (CC):code, and+8;public domain:branding of+8} + +As the lawyers brooded and debated the licensing terms, another complicated debate was unfolding on the tech side of CC: how to brand the public domain in software code. If code is law, then it was imperative for Creative Commons to find some way to represent CC licenses in digital code. Abelson, Lessig, and others understood that the future of the Internet was likely to include all sorts of automated, computer-to-computer functions. One of the best ways to promote a new body of “free content” on the Web, therefore, would be to develop machine-readable code that could be inserted into any digital artifact using a Creative Commons license. That way, search engines could more easily identify CC-licensed works by their terms of use, and help assemble a functionally accessible oeuvre of digital content that was free to use. +={code:law, as;law:code as+1;Abelson, Hal:CC licensing, and;Internet:freedom of;Internet:future of;Lessig, LawrenceCC licenses, and;World Wide Web:free content on} + +At this time, in 2001, the founder of the World Wide Web, Tim Berners-Lee, and others at the World Wide Web Consortium, based at MIT, were trying to conceptualize the protocols for a new “logical layer” of code on top of the World Wide Web. They called it the Semantic Web. The idea is to enable people to identify and retrieve information that is strewn across the Internet but not readily located through conventional computer searches. Through a software format known as RDF/XML,~[* RDF, or Resource Description Framework, is a way to make a statement about content in a digital artifact. XML, or Extensible Markup Language, is a way to write a specialized document format to send across the Web, in which certain content can be marked up, or emphasized, so that other computers can “read” it.]~ digital content could be tagged with machine-readable statements that would in effect say, “This database contains information about x and y.” Through Semantic Web protocols and metatags on content, it would be possible to conduct searches across many types of digital content — Web pages, databases, software programs, even digital sensors — that could yield highly specific and useful results. +={Berners-Lee, Tim;Semantic Web+6;World Wide Web:Semantic Web+6|protocols for+6;RDF/XML} + +Unfortunately, progress in developing the Semantic Web has been bogged down in years of technical disagreement and indifference among the larger Web community. Some critics argue that the project has stalled because it was being driven by a small corps of elite software theorists focused on databases, and not by a wider pool of decentralized Web practitioners. In any case, the Creative Commons became one of the first test cases of trying to implement RDF/XML for the Semantic Web.~{ For background, see “The Semantic Web: An Introduction,” at http://in fomesh.net/2001/swintro; Aaron Swartz and James Hendler, “The Semantic Web: A Network of Content for the Digital City,” at http://blogspace.com/ rdf/SwartzHendler; and John Markoff, “Entrepreneurs See a Web Guided by Common Sense,” /{New York Times}/, November 12, 2006. }~ The project was led initially by Lisa Rein, a thirty-three-year-old data modeler who met Lessig at an O’Reilly open-source software conference. Lessig hired her as CC’s first technical director in late 2001 to embed the CC legal licenses in machine-readable formats. +={Rein, Lisa+2;Swartz, Aaron;Lessig, Lawrence:CC licenses, and} + +Writing the XML code was not so difficult, said Rein; the real challenge was “deciding what needed to be included and how you represent the licenses as simply as possible.”~{ Interview with Lisa Rein, December 20, 2006. }~ This required the lawyers and the techies to have intense dialogues about how the law should be faithfully translated into software code, and vice versa. Once again, there were complicated problems to sort through: Should there be a central database of CC-licensed content? How could machine-readable code be adapted if the legal licenses were later modified? + +Rein got an unexpected assist in the project from programming whiz Aaron Swartz, who had heard about Creative Commons and volunteered to help write the RDF/XML code. Swartz was an esteemed member of the RDF core working group at the World Wide Web Consortium (W3C), and so was intimately involved in Semantic Web deliberations. He was also a fifteen-year-old junior high school student living with his parents in Chicago. “I remember these moments when I was sitting in the locker room, typing on my laptop, in these [W3C] debates, and having to close it because the bell rang and I had to get back to class,” Swartz recalled. At CC, he was given the title of “Volunteer Metadata Coordinator.” His job was “to design the RDF schema and what the XML documents would look like, and work that out with my friends at the W3C and get their approval on things.”~{ Interview with Aaron Swartz, October 10, 2006. }~ For his troubles, Swartz received an in-kind donation of a laptop computer and travel expenses, rather than a salary. “At the time, I felt bad,” said Swartz. “They were a nonprofit doing work I believe in. I didn’t feel I should be taking their money when I didn’t need it.” With later help from Ben Adida, the CC team managed to develop an RDF that could attach CC licenses to Web pages. But since the Semantic Web protocols were still in flux, and not widely used, the effort amounted to a speculative gamble on future and widespread adoption of those protocols. +={RDF/XML+3;Adida, Ben;Swartz, Aaron;World Wide Web Consortium (W3C);Internet:future of|protocols of+3} + +Although inspired by the Semantic Web and by Lessig’s analysis in /{Code}/, the RDF/XML coding was also provoked by the growing specter of digital rights management (DRM), the reviled systems used by film and music companies to lock up their content. The Creative Commons dreamed of developing an “anti-DRM” code to express the idea, “This content is and shall remain free.” Professor Hal Abelson remembered that “we even used the phrase, ‘DRM of the public domain.’”~{ Interview with Hal Abelson, April 14, 2007. }~ The coinage that Lessig later popularized is “digital rights expression” — metadata that indicate that a digital object can be shared and reused. There was a passing fear that CC’s digital rights expression code might infringe on DRM patents; one company known for its aggressive patent defense raised concerns. But once it was made clear that the CC’s RDF code amounted to a label, and did not execute online rights on a person’s computer, the problem disappeared. +={Abelson, Hal:digital rights management, and;digital rights management (DRM);copyright law:digital age, in;Lessig, Lawrence:Code and Other Laws of Cyberspace} + +The machine-readable CC licenses were one of the first major buildouts of RDF protocols. Swartz ruefully recalled the reaction of his friends at W3C: “I got the sense that they thought it was sort of a silly project, that they were thinking about bigger and longer-term things.” Adida, who later replaced Swartz as the CC representative at the W3C, played a major role in helping develop the metatags and protocols. +={Adida, Ben;Swartz, Aaron+1} + +The RDF/XML coding was part of a larger CC strategy to brand the public domain via software code. Since RDF code alone is like a nail without a hammer, Creative Commons decided to develop a specialized search engine so that Internet users could locate CC-licensed content. Without such a search engine, Lessig said in April 2002, “there will be no way to demonstrate that we’ve produced anything useful.”~{ E-mail from Lawrence Lessig to Hal Abelson, April 22, 2002. }~ Swartz, who was not involved in the project, said, “I was impressed that they did it, because it was probably the biggest programming job I’d seen them do at the time.” In the meantime, the CC began a series of overtures to Google and Yahoo in an attempt to get their search engines to search for CC-licensed content. After years of lukewarm interest, both Google and Yahoo added CC-search capabilities in 2005. Creative Commons also nurtured the hope that once enough content contained CC metadata, software developers would develop new applications to let people browse, use, and distribute CC-tagged content. +={Google;Yahoo;Lessig, Lawrence:CC licenses, and} + +2~ The Action Shifts to Palo Alto +={Creative Commons (CC) licenses:evolution of+31} + +By the fall of 2001, Creative Commons was still an idea without definition. The project gained new momentum in September 2001 when Lessig hired a former student, Molly Shaffer Van Houweling, to be the first director of the organization. Van Houweling, a sophisticated yet plainspoken law scholar with strong executive skills, had just finished clerking for Supreme Court justice David Souter. She set about incorporating the Creative Commons, organizing the board, building a Web site, and hammering out final versions of the licenses. +={Souter, David;Van Houweling, Molly Shaffer+5} + +Once a key foundation grant was secured — $1 million from the Center for the Public Domain — the Creative Commons was incorporated in Massachusetts (home to many key backers of the project) on December 21, 2001. The first board members included several legal scholars (Boyle, Carroll, Lessig), a computer scientist (Abelson), two filmmakers (Saltzman and Davis Guggenheim, a friend of Lessig’s), and a Web publisher (Eldred). Charged with breathing life into a fragile idea, Van Houweling settled into a small office on the third floor of Stanford Law School (before the project was reassigned to basement offices). +={Abelson, Hal;Boyle, James:CC board, on|CC formation, and;Carroll, Michael W.;Center for Public Domain;public domain:Center for Public Domain;Saltzman, Eric;Eldred, Eric:Creative Commons, and;Guggenheim, Davis} + +In January 2002, Glenn Otis Brown, a lawyer and former student of Lessig’s, was hired as assistant director. Brown had been a law student at Harvard Law School, where he had known Van Houweling and taken a constitutional law course from Lessig. An affable Texan who had flirted with a journalism career, Brown had just finished a year of clerking for a circuit court judge. He was due to start a job in New York City the following week when he got a call from Van Houweling. “She and Larry were starting something to do with copyright at Stanford,” recalled Brown. “I knew pretty much nothing else about it except it was a nonprofit and it was going to be a fulltime job. . . . The next thing I knew, I was moving to California.”~{ Interview with Glenn Otis Brown, June 9, 2006. }~ +={Brown, Glenn Otis:CC formation, and+1} + +Lessig, Van Houweling, and Brown took the menu of licenses proposed by two graduate students, Dotan Oliar and Oren Bracha, and sought to refine them and make them as legally bulletproof as possible.~{ Oren Bracha and Dotan Oliar, “Memo: Presentation of Two Possible Creative Commons Layer 1 Architectures,” October 1, 2001. }~ They were torn about the process to adopt. “We didn’t want to do a collective drafting process with the entire Internet community,” said Van Houweling. “That didn’t seem practical. And yet we were a little nervous, I think, about not knowing what our potential user base would want to use.” Lessig was unfazed. Release of the licenses “isn’t going to be like a movie premiere,” he told Van Houweling, but more of an evolutionary process. The idea was to get the licenses in circulation, monitor their progress, and make changes as necessary.~{ Interview with Molly Van Houweling, March 21, 2006. }~ +={Lessig, Lawrence:CC licenses, and+1;Bracha, Oren;Oliar, Dotan;Brown, Glenn Otis:CC licensing, and} + +Two of the most prestigious law firms in Silicon Valley, Cooley Godward Kronish and Wilson, Sonsini, offered pro bono legal assistance to the effort. Attorney John Brockland, an expert in opensource software licenses at Cooley Godward and a former student of Lessig’s, was the architect of the final licenses, assisted by Catherine Kirkman, a licensing attorney at Wilson, Sonsini. Brockland recalled, “One of the drafting challenges was to write something that could be broadly useful across a wide range of copyrighted works and would not be tied to particular nuances of the way the copyright statute works.”~{ Interview with John Brockland, January 5, 2007. }~ Most copyright licenses are drafted for specific clients and particular circumstances, not for the general public and all types of copyrighted works. +={Brockland, John;Cooley Godward Kronish;Kirkman, Catherine;Wilson, Sonsini, Goodrich & Rosati;copyright law:licenses for;open source software:legal implications of} + +Much of the discussion, said Van Houweling, “revolved around the values that we wanted to embed in the licenses, and what were the outer limits of those values?” Ultimately, she said, “we opted for a menu of licenses that was weighted toward the nonproprietary [content]. . . . We wanted to subsidize a certain set of choices that are otherwise underserved.”~{ Interview with Molly Van Houweling, March 21, 2006.}~ The point was to facilitate the rise of a sharing culture, after all, not to replicate the baroque dysfunctions of copyright law. +={copyright law:purpose of} + +Since the CC licenses were trying to articulate a new “middle ground” of voluntary choices for sharing, it had to grapple with all sorts of fine legal complexities. How exactly should they define a derivative work? What should be considered a noncommercial reuse of a work? Can you dedicate a work to the public domain? + +Some artists felt that they ought to be able to prohibit derivative uses of their works in pornography or hate speech. Hal Abelson adamantly disagreed. If the licenses had an “offensive uses” clause, as either a standard or optional term, it would open up a can of worms and put Creative Commons on the side of censors. That view readily prevailed. +={Abelson, Hal} + +A primary concern was anticipating how the licenses might be interpreted by the courts. Wendy Seltzer was worried that the CC licenses might become entangled with court cases involving the fair use doctrine. She wanted to make sure that the CC licenses were not seen as limiting or waiving a person’s fair use rights in any way. Her concern, shared by many others, resulted in an explicit disclaimer stating that intention. “I’m really glad that we did that,” recalled Glenn Brown, then the assistant director of CC, “because we ended up pointing to that over and over and over again — to make clear that this was something that went above and beyond fair use.”~{ Interview with Glenn Otis Brown, June 9, 2007. }~ +={Brown, Glenn Otis:fair use, and;Seltzer, Wendy;fair use doctrine:CC licenses, and;Creative Commons (CC) licenses:fair use, and} + +To ensure that the licenses would be enforceable, the CC lawyers built on the same legal base as the GPL; the licenses were crafted not as contracts, but as conditional permissions based on copyright law. A contract requires that the licensee have the opportunity to accept or reject the terms of an agreement, which would not be the case here. A conditional permission, by contrast, is the legal prerogative of a copyright holder. She is simply offering advance permission to use a CC-licensed work (to share, modify, distribute, etc.) so long as the specified terms are respected. +={General Public License (GPL):legal enforceability of;copyright law:conditional permission license|enforceability of|CC licenses, and+21;Creative Commons (CC) licenses:copyright law, and+21|version 1.0 of+21|enforceability of} + +Countless lawyerly refinements of a very technical nature were made to the licenses to ensure that they would be specific as needed, vague enough to be versatile, and rigorous enough to survive a court’s scrutiny.~{ The lawyers also wrestled with a host of imponderables that had no obvious answers, such as: What if people started spoofing the licenses by using them in inappropriate ways? Should the Creative Commons establish a central registry for CC-licensed works as a way to ensure the credibility of the project? (After long debate, the idea was ultimately rejected.) Would the Creative Commons be held liable for contributory negligence if someone used a CC license on a copyrighted song? (The CC took its chances.) Would the Creative Commons lose its trademark if it allowed anyone to use its trademarked logo? (Several lawyers warned that CC licensing of its trademark could not be properly policed.) Glenn Otis Brown worried that the board might be sued for facilitating the unauthorized practice of law. “I don’t know how long I spent calling up different insurance brokers trying to get a quote,” he recalled. “People had no idea what I was talking about. We ended up going all the way to Lloyd’s of London to ask them,” said Brown, laughing. “They wrote back and said, ‘You can’t insure that.’ ” }~ + +The first set of licenses, version 1.0, was completed in the spring of 2002 and included eleven choices. The six basic licenses, listed here in order of least restrictive to most restrictive, included: +={Creative Commons (CC) licenses:types of+12} + +!_ Attribution (BY). +Authorizes free reuses (download, distribution, modifications, commercial uses, etc.) so long as the author is credited for the original creation. + +!_ ShareAlike (SA). +Authorizes free reuses so long as credit is given and the new work is licensed under the same terms. + +!_ No Derivatives (ND). +Authorizes free reuses so long as the new work is unchanged and in whole. + +!_ NonCommercial (NC). +Authorizes free reuses so long as they are not commercial in nature. + +!_ NonCommercial ShareAlike (NC-SA). +Requires free reuses so long as the new work is passed along on the identical terms as the original work (so, for example, works that use a NonCommercial ShareAlike work will also have to be distributed as NonCommercial ShareAlike works). + +!_ NonCommercial No Derivatives (NC-ND). +Authorizes free reuses so long as credit is given, no changes are made, the work is kept intact, and it is not used commercially. This is the most restrictive CC license. + +Because each of these six basic choices can be combined with other CC licenses, copyright holders had five additional choices: + +!_ Attribution-ShareAlike (BY-SA). +Authorizes free reuses so long as the author is credited and the new work is licensed under the same terms. + +!_ Attribution-NonCommercial (BY-NC). +Authorizes free reuses so long as the author is credited and the new work is used for noncommercial purposes. + +!_ Attribution NonCommercial-ShareAlike (BY-NCSA). +Authorizes free reuses so long as the author is credited, the new work is used for noncommercial purposes, and the new work is passed along using this same license. + +!_ Attribution-No Derivatives (BY-ND). +Authorizes free reuses so long as the author is credited and the new work is unchanged and in whole. + +!_ Attribution No Derivatives-ShareAlike (BY-ND-SA). +Authorizes free reuses so long as the author is credited, the new work is unchanged and in whole, and the new work is passed along using this same license. + +It soon became clear that very few people were choosing any of the five licenses that did not require attribution of the author (the SA, ND, NC, NC-SA, and NC-ND licenses). So in May 2004 Creative Commons decided to “retire” those licenses, leaving the six most commonly used ones today (BY, BY-SA, BY-NC, BY-NC-SA, BY-ND, and BY-ND-SA). + +Still another choice was offered to copyright holders, a “public domain dedication,” which is not a license so much as “an overt act of relinquishment in perpetuity” of any rights in the work. The public domain dedication places no restrictions whatsoever on subsequent reuses of the work. +={public domain dedication} + +To the first-time user, the licenses may seem a little daunting.~{ A FAQ at the Creative Commons Web site answers the most frequent user questions about the licenses. It is available at http://wiki.creativecommons .org/. }~ The full implications of using one or another license are not immediately obvious. The tagline for the licenses, “Some Rights Reserved,” while catchy, was not really self-explanatory. This became the next big challenge to Creative Commons, as we see in chapter 6: how to educate creators about a solution when they may not have realized they even had a problem. + +By December 2002, the three levels of code — legal, digital, and human — had been coordinated and finalized as version 1.0. The organization was set to go public, which it did at a splashy coming-out party in San Francisco. The gala featured appearances by the likes of rapper DJ Spooky (an ardent advocate for remix culture) and a London multimedia jam group, People Like Us. Lessig proudly introduced the licenses as “delivering on our vision of promoting the innovative reuse of all types of intellectual works, unlocking the potential of sharing and transforming others’ work.”~{ http://creativecommons.org/press-releases/entry/3476. }~ +={DJ Spooky;People Like Us;code:levels of;Lessig, Lawrence:CC licenses, and+2} + +Perhaps the biggest surprise was a set of video testimonials from both ends of the copyright spectrum — John Perry Barlow of Electronic Frontier Foundation and Jack Valenti of the Motion Picture Association of America. With uncharacteristic solemnity, Barlow said: “I personally think there is something deeply oxymoronic about the term ‘intellectual property.’ But as long as we have set up a huge matrix of laws and social understandings that traffic in that assumption, we have to meet the conditions as we have found them and use what exists to preserve the human patrimony.” The silvermaned Valenti saluted the “Lessig compact” that is both “respectful of, and supports, copyright” while allowing people “to give up some of their copyrighted material, or all of it, and put it on the creative commons for others to view it or hear it.” “Larry, I hope that my supporting you in this doesn’t ruin your reputation,” Valenti joked.~{ See http://mirrors.creativecommons.org/cc-barlow-valenti.mov. }~ +={Barlow, John Perry;Electronic Frontier Foundation (EFF);Valenti, Jack+1} + +Many copyfighters were not thrilled to have an arch-adversary like Valenti praise their efforts at their moment of triumph. Yet that was a deliberate part of Lessig’s strategy: to assert a politically neutral middle ground from which to remake the social landscape of creativity. The question raised in some people’s mind was whether something so politically unassailable could have significant impact. Still others saw it as a welcome base upon which to build a new sharing economy. + +The CC launch party can be seen as a watershed moment in the struggle to protect the public domain. It announced a novel gambit to transcend the political impasse over copyright reform, a way to address copyright abuses without getting embroiled in a pitched and unwinnable confrontation. It legitimized all sorts of activities that had historically been seen as morally problematic, if not illegal. While building on the idea of the public domain developed over the preceding twenty years, Creative Commons inaugurated a new story about the commons, creativity, and the value of sharing. Watching the rocking party and savoring the hard work completed, Glenn Brown remembers a friend musing to him, “I wonder if we’ll see another legal hack like this in our careers.” +={Creative Commons (CC) licenses:launch of|public domain, and;public domain:CC licenses, and} + +1~ 5 NAVIGATING THE GREAT VALUE SHIFT + +/{Amateurs discover new tools for creating value: open networks and self-organized commons.}/ + +“It was never really clear to me what was going to happen after we launched the licenses,” recalled Glenn Otis Brown. “Would our work be done?” The intense push to craft the licenses and release them now over, Brown and his colleagues were only too happy to ease up in their work. (Van Houweling had left in 2002 to teach law; she is now at the University of California at Berkeley.) Despite his enthusiasm for the licenses, Brown had his private doubts about their future success. “To be honest, I was pretty scared,” he said. “I was worried they were going to go nowhere, and that I was going to be blamed for that.”~{ Interview with Glenn Otis Brown, August 10, 2006. }~ +={Brown, Glenn Otis:CC licensing, and+1;Van Houweling, Molly Shaffer;Creative Commons (CC) licenses:evolution of+10} + +In January 2003, a month after the CC licenses were announced, however, the project took on a new urgency. The Supreme Court handed down its /{Eldred}/ ruling, sending a clear signal that the courts were not much interested in reforming copyright law. Soon after this crushing disappointment, Lessig began to intensify his focus on the Creative Commons. “The pressure really increased,” said Brown, “but that’s also when things started to get a lot more fun. That’s when the staff started working on things /{all the time}/ and we got a stable, permanent staff, instead of contractors.” +={Eldred v. Reno/Eldred v. Ashcroft:Copyright’s Commons, and|Supreme Court, and;Lessig, Lawrence:CC licenses, and+9} + +What began as a modest licensing experiment began to take on the character of a permanent campaign. Working from the themes in /{The Future of Ideas}/, Lessig came to see the Creative Commons as more than a nonprofit custodian of some free public licenses; it was a champion for a bracing new vision of culture. This broader orientation meant reaching out to various creative sectors and the general public with messages that were both practical (“here’s how to use the licenses”) and idealistic (“you, too, can build a better world”). +={Lessig, Lawrence:The Future of Ideas;Creative Commons (CC):function of+5|social movement, as+5} + +The band of enterprising law scholars and techies who once saw their challenge as one of bolstering the public domain began to widen their gaze to the vast world of creativity and democratic culture. Social practice, not theory, became the animating force in their work. + +This meant reaching out to writers, musicians, filmmakers, photographers, librarians, academics, and other creators. All faced worrisome threats to their freedoms in the digital environment, as we saw in chapter 2. Lessig and the small Creative Commons staff made it their job to speak to these threats, promote the licenses, and set forth an alternative to the corporate media’s vision of culture. + +“Our single, overarching aim,” said Lessig in December 2002, “is to build the public domain, by building projects that expand the range of creative work available for others to build upon.”~{ Lawrence Lessig, Creative Commons press release, December 19, 2002; “CC in Review: Lawrence Lessig on How It All Began” [weekly e-mail series], October 12, 2005. }~ In an attempt to credential the licenses, the Creative Commons touted endorsements by a number of educational institutions (MIT, Rice University, Stanford Law School), public-spirited tech enterprises (iBiblio, the Internet Archive, O’Reilly & Associates), and venturesome musicians (DJ Spooky, Roger McGuinn of the Byrds). +={DJ Spooky;McGuinn, Roger;Lessig, Lawrence:public domain, and;public domain:CC licenses, and;Creative Commons (CC) licenses:public domain, and} + +As if by spontaneous replication, people from far-flung corners of the Internet began to use the licenses on their blogs, their MP3 music files, their photographs, their books. Week after week, the Creative Commons’s blog trumpeted the new recruits — the blog for book designers (Foreword), the database of metadata about music (MusicBrainz), the online storytelling Web site (Fray), the 2004 presidential campaign of Dennis Kucinich. +={Kucinich, Dennis} + +But the larger challenge for Creative Commons was finding ways to reach new constituencies who knew little about technology or copyright law. Why should they bother to use a CC license? This was a major public education challenge. Besides appearing at many conferences and cultivating press coverage, Glenn Brown spent a lot of time developing a Web site that could explain the licenses clearly. Great pains were taken to develop a precise, intuitive user interface to help people learn about the licenses and choose the right one for them. Copyright law was complicated enough; the CC licenses had to be seen as a simple alternative. +={Brown, Glenn Otis:CC licensing, and+1;copyright law:licenses, and+3} + +Advertisers have plenty of trouble communicating the virtues of mouthwash in a crowded public sphere. Could something as dry and forbidding as copyright law ever be made lucid and even hip? Although not a trained marketer, Glenn Brown had a knack for communicating things simply.Working with graphic designer Ryan Junell and Web designer Matt Haughey, Brown developed a site that combined a certain institutional authority with contemporary pizzazz. This style was on abundant display in a series of jaunty and entertaining Flash animations that explained the rationale for Creative Commons. +={Haughey, Matt;Junell, Ryan+1} + +Junell designed the now-familiar CC logo as a deliberate counterpoint to the copyright logo, ©. “I thought that Creative Commons should have something like the copyright logo since it deals with the same stuff,” said Junell. “It should be something really simple and pure.”~{ Interview with Ryan Junell, September 23, 2006. }~ Junell set his sights on making the CC logo a standard, ubiquitous symbol. He hoped that it would eventually be incorporated into the Unicode, an international registry for every character in any language used in software, from % to ∆ to ≠. +={Creative Commons (CC):logo of;Unicode} + +In promoting its licenses, Creative Commons fashioned itself as a neutral, respectable defender of individual choice. “Our tools are just that — tools,” said Haughey, who was then developing the CC Web site. “Our model intentionally depends on copyright holders to take responsibility for how they use those tools. Or how they don’t use them: If you’re unsure and want to keep your full copyright, fine. If you choose to allow others to re-use your work, great.”~{ Matthew Haughey, “Blogging in the Public Domain,” Creative Commons blog post, February 5, 2003, at http://creativecommons.org/weblog/entry/3601. }~ While many CC users were enthusiastically bashing copyright law, Lessig and the CC staff made it a point to defend the basic principles of copyright law — while extolling the value of collaborative creativity and sharing under CC licenses. +={Haughey, Matt} + +Despite praise by the heads of the Motion Picture Association of America and the Recording Industry Association of America, the licenses nonetheless did attract critics. Some in the music industry regarded the licenses as a Trojan horse that would dupe unsuspecting artists. David Israelite, president and CEO of the National Music Publishers’ Association, told /{Billboard}/, “My concern is that many who support Creative Commons also support a point of view that would take away people’s choices about what to do with their own property.”~{ Susan Butler, “Movement to Share Creative Works Raises Concerns in Music Circles,” /{Billboard}/, May 28, 2005.}~ /{Billboard}/ went on to cite the cautionary tale of a songwriter who was being kept alive by his AIDS medications, thanks to the royalties from a highly successful song. “No one should let artists give up their rights,” said Andy Fraser of the rock group Free. Other critics, such as John Dvorak of /{PC Magazine}/, called the CC licenses “humbug” and accused them of adding “some artificial paperwork and complexity to the mechanism [of copyright],” while weakening the rights that an author would otherwise enjoy.~{ John C. Dvorak, “Creative Commons Humbug: This Scheme Doesn’t Seem to Benefit the Public,” PC Magazine, July 28, 2005. }~ Still others had cultural scores to settle and criticized “anything advocated by clever, sleek young lawyers.”~{ Researchers at the Economic Observatory of the University of Openness, “Commercial Commons,” on the online journal /{Metamute}/, at http://www .metamute.org/?q=en/Commercial-Commons. }~ +={Creative Commons (CC) licenses:critics of;sraelite, David;Recording Industry Association of America (RIAA);Dvorak, John;Fraser, Andy} + +Putting aside such quibbles and prejudices, the CC licenses seemed a benign enough idea. Given its reliance on copyright law, how could any entertainment lawyer object? Yet the real significance of the licenses was only appreciated by those who realized that a Great Value Shift was kicking in. For them, the licenses were a useful legal tool and cultural flag for building a new sharing economy. +={Great Value Shift} + +2~ The Great Value Shift +={Great Value Shift+9;Inernet:Great Value Shift, and+9} + +In retrospect, the CC licenses could not have been launched at a more propitious moment. Networked culture was exploding in 2003. Broadband was rapidly supplanting dial-up Internet access, enabling users to navigate the Web and share information at much faster speeds. Prices for personal computers were dropping even as computing speeds and memory capacity were soaring. Sophisticated new software applications were enabling users to collaborate in more powerful, user-friendly ways. The infrastructure for sharing was reaching a flashpoint. + +Put another way, the original promise of the Internet as a gift economy was coming into its own. Originally built as a platform for efficient sharing among academic researchers, the Internet by 2003 was being used by some 600 million people worldwide.~{ Nielsen/Net Ratings estimated 585 million Internet users in 2002; the International Telecommunications Union estimated 665 million. See http://www2 .sims.berkeley.edu/research/proiects/how-much-info-2003/internet.htm. }~ The open framework for sharing was no longer just a plaything of technophiles and academics; it was now insinuated into most significant corners of the economy and social life. As it scaled and grew new muscles and limbs, the Internet began to radically change the ways in which wealth is generated and allocated. +={Internet:gift economy of+1} + +I call this the Great Value Shift — a deep structural change in how valuable things are created for commerce and culture. The shift is not only a fundamental shift in business strategy and organizational behavior, but in the very definition of wealth. On the Internet, wealth is not just financial wealth, nor is it necessarily privately held. Wealth generated through open platforms is often /{socially created value}/ that is shared, evolving, and nonmonetized. It hovers in the air, so to speak, accessible to everyone. +={Internet:socially created value of+1;value:creation of} + +Creative Commons had the good fortune to introduce its licenses just as the Great Value Shift was picking up momentum. The types of distributed innovation first seen in free software were now popping up in every imaginable corner of cyberspace. The social content was not just about listservs and newsgroups, but instant messaging networks, Web logs, podcasts, wikis, social networking sites, collaborative archives, online gaming communities, and much else. +={free software:Great Value Shift, and} + +“What we are seeing now,” wrote Yochai Benkler in his book, /{The Wealth of Networks}/, “is the emergence of more effective collective action practices that are decentralized but do not rely on either the price system or a managerial structure for coordination.” Benkler’s preferred term is “commons-based peer production.” By that, he means systems that are collaborative and non-proprietary, and based on “sharing resources and outputs among widely distributed, loosely connected individuals who cooperate with each other.”~{ Yochai Benkler, /{The Wealth of Networks: How Social Production Transforms Markets and Freedom}/ (New Haven, CT: Yale University Press, 2006), p. 60. }~ +={Benkler, Yochai:The Wealth of Networks+1;commoners:sharing by;commons-based peer production} + +Informal social relationships, working in the unregimented, free space of open platforms, were beginning to change economic production and culture. “Behaviors that were once on the periphery— social motivations, cooperation, friendship, decency — move to the very core of economic life,” Benkler argued.~{ Benkler at the iCommons Summit, Dubrovnik, Croatia, June 15, 2007. }~ Money and markets do not necessarily control the circulation of creativity; increasingly, online communities — large numbers of people interacting with one another on open platforms — are the engines that create value. + +The CC licenses were launched at a moment when the new modes of value creation were just gaining a foothold. + +We do not yet have well-accepted theoretical models for understanding this new “socioeconomic space”; the online environments are still so new, and much is still in flux.~{ An excellent overview of these new spaces is Don Tapscott and Anthony D. Williams, /{Wikinomics: How Mass Collaboration Changes Everything}/ (New York: Portfolio, 2006). }~ But it has not escaped the notice of major corporations that online social dynamics can result in some radically more effective models for organizing employees and engaging with customers. A /{BusinessWeek}/ cover story touted “The Power of Us” in June 2005, profiling the ways in which companies like Procter & Gamble use mass collaboration for R&D; Hewlett-Packard had created a virtual stock market among its staff to gather collective estimates that have improved sales forecasts.~{ Robert D. Hof, “The Power of Us: Mass Collaboration on the Internet Is Shaking Up Business,” /{BusinessWeek}/, June 20, 2005, pp. 73–82. }~ The /{Economist}/ has written about the “fortune of the commons” that can result when there are open technical standards, and business professors such as Henry Chesbrough have examined new “open business models.”~{ “The Fortune of the Commons,” Economist, May 8, 2003; Henry Chesbrough, /{Open Business Models: How to Thrive in the New Innovation Landscape}/ (Cambridge, MA: Harvard Business School Press, 2006). }~ +={Chesbrough, Henry;Hewlett-Packard;Procter & Gamble} + +Before looking at the many creative sectors that have adopted the CC licenses — the focus of chapter 6 — it helps to understand the Great Value Shift that open networks have catalyzed. In one market after another, open networks have helped new competitors slash all sorts of business costs while enhancing their capacity to innovate and respond to changing consumer demand. Open networks have also given rise to new types of social platforms on the Web, often known as Web 2.0, which are making it economically attractive to serve niche markets. This is the so-called Long Tail. Yet even these sweeping changes in market structure are facing a qualitatively different kind of competition — from the commons sector. It turns out that informal online communities based on trust, reciprocity, and shared social norms can perform a great many tasks more efficiently than markets, and with some measure of social pleasure and fun. +={Long Tail;Web 2.0:Great Value Shift, and} + +2~ The Endangered Economics of Centralized Media +={Centralized Media+7;media:See also Centralized Media} + +The dominant systems of communications in the twentieth century — radio, broadcast and cable television, recorded music, theatrical film — required large amounts of centralized capital, corporate management, and professional control. These media have very different business models and practices, but they all rely upon centralized control of capital and distribution to large, fairly undifferentiated audiences. Each depends upon efficiencies derived from high-volume sales and a limited spectrum of commercial choices. + +Centralized Media also dictate certain economic and social identities for people. There are “sellers,” who are the prime source of expertise, innovation, and production, and there are “consumers,” who passively buy, or don’t buy, what is offered. Sellers mostly determine what choices are offered to buyers, and they tend to have greater market power and information than consumers. Interactions between sellers and consumers are mostly brief and transactional; there is little ongoing conversation or relationship between seller and buyer. + +Much of the strength of the Centralized Media derives from its control of critical “choke points” of product development and distribution. By controlling the technical standards for a product, its retail distribution or its brand identity, a company can maximize its competitive advantages and limit competition. The high concentration of capital needed to communicate through a Centralized Media outlet is itself a useful way to limit competition. No surprise that only large, publicly traded corporations and rich individuals own and control Centralized Media — and that their messages tend to be overtly commercial or commercial-friendly. +={Centralized Media:choke points of|competition, and+4} + +While this paradigm is obviously quite attractive for those investors with a piece of the action, it also entails some very large costs that are not readily evident. Companies have to spend a lot on advertising to build a brand identity that can enhance sales. Their “blockbuster” business model entails large upfront costs in order to reap large financial returns. Centralized Media require expensive systems for finding, recruiting, and developing stars; an elaborate marketing apparatus to find and retain customers; and legal and technological means to identify and prosecute “piracy” of creative works. +={Centralized Media:piracy, and;piracy} + +In a more static environment, this model worked fairly well. But as the Internet revolution proceeded in the 2000s, distributed media started to undercut the economic logic of Centralized Media. Your personal computer, connected to other computers via inexpensive telecommunications and software, can do things more cheaply. Distributed online media not only avoid the costly overhead needed by Centralized Media, they can generate dynamic, interactive, and sociable types of communication: /{user-generated content!}/ While this amateur content is wildly variable in quality, it does have this virtue: it is more culturally diverse and authentic than the homogenous, overproduced programming of Centralized Media. And because distributed media are not economically driven to amass large, undifferentiated audiences, the content can be more idiosyncratic, passionate, and, in its own ways, creative. There is no “fifty-seven channels and nothing on” problem. The problem is how to locate what you want from among millions of choices. +={Centralized Media:Internet vs.;Internet:Centralized Media vs.} + +For all these reasons — but mostly because of the economics— conventional media are becoming more vulnerable to the most advanced Internet-based competitors (Amazon, eBay, Google, Yahoo) as well as to new types of nonmarket social production (e.g., Craigslist, Wikipedia, special-interest affinity groups). We may even be approaching a point at which the historic cost structures and risk management strategies of major media companies are no longer sustainable. Some analysts fret about the long-term viability of American newspapers, whose stock value fell by 42 percent, or $23 billion, between 2005 and 2008. Broadcast and cable television have similar fears. They worry, correctly, that Internet venues are siphoning away “eyeballs” by providing more timely and convenient alternatives. While the amateur videos of YouTube may not have the production quality of NBC, broadcast and cable television cannot ignore an upstart platform that in 2006 was attracting more than 100 million video downloads /{per day}/ and had a market valuation of $1.65 billion when bought by Google that year. No wonder Cable News Network co-hosted a presidential debate with YouTube in 2007; it needed to reassert its cultural relevance. +={Amazon;eBay;Google;Yahoo;Craigslist;Wikipedia;YouTube} + +Large media companies are struggling to support some huge financial, administrative, and marketing burdens simply to “tread water” and retain some measure of their customary market dominance. This helps explain why Centralized Media are so keenly focused on influencing Congress and the Federal Communications Commission. They want to lock in competitive advantages through regulation. (Consider the fierce battles over media ownership rules, spectrum allocation policies, anticopying technology mandates such as the “broadcast flag,” new copyright and trademark protections, must-carry rules for cable operators, and on and on.) Centralized Media’s great interest in securing legal and regulatory privileges for themselves suggests their relative weakness and decline. For them, it is easier to chase market advantages through political interventions than through innovation, superior performance, and price. +={Centralized Media:ownership rules for;media:ownership of+3} + +2~ The Economic Advantages of Open Media +={Centralized Media:Internet vs.+7;Internet:Centralized Media vs.+7;media:open+7} + +By contrast, a profusion of new ventures are finding that a company can thrive on the open networks of the Internet. Even a startup without brand recognition or regulatory preferences can compete on the merits — price, quality, responsiveness — against entrenched giants. They can leverage user-generated content and the vast reservoir of value previously known as the public domain. The success of thousands of new Internet businesses reflects an epochal shift in the terms of competition — a Great Shift in how value is created. +={Great Value Shift} + +The most significant shifts in the history of capitalism have come when new mechanisms lower the costs of managing risk and serving latent market demand. We are apparently in such a stage of economic transformation today. The genius of the Renaissance banks and the Dutch insurance and shipping companies, for example, was to reinvent the structure of markets through new financial and legal instruments that enabled commercial trust and transparency to work on a larger scale. The limited liability corporation was also a powerful innovation for diversifying risk, coordinating people, and deploying capital on a scale that was previously impossible.~{ I am indebted to my friend John Clippinger for this insight, as explained in his book /{A Crowd of One: The Future of Individual Identity}/ (New York: Public Affairs, 2007), chapter 7, “Transforming Trust: Social Commerce in Renaissance Florence,” pp. 97–114. }~ +={markets:restructuring+1} + +In like fashion, the Internet is now facilitating some deep shifts in the cost structures and scale of markets. Innovative online business models are significantly undercutting the (expensive) cost structures of traditional Centralized Media, and in the process sometimes creating entirely new sorts of markets (search engine advertising, discounted travel, specialty niches) and more open, competitive markets. +={Internet:socially created value of+3} + +One of the most intriguing developments is a set of “open business models” that shun closed, proprietary technical standards and content restrictions. Unlike the classic industrial business models of the twentieth century, the new open business models make money by aggressively insinuating themselves into open networks. They are able to identify new trends, mobilize talent, interact with customers, and develop customized products more rapidly than competitors. They are also building ingenious new business models “on top of ” social behaviors of online users. (See chapter 10.) +={open business models} + +MySpace, for example, hosts a social network of more than 100 million “friends” (a claim that, even if inflated by inactive user accounts, is indisputably huge). eBay consolidated the world’s garage sales and flea markets into a more efficient market by developing Web-based software that “manages” social trust and reputation and evolves with user interests. Amazon has become a premier online retail Web site by hosting a platform open to all sorts of online vendors and spurred by the recommendations and collective purchase records of buyers. Google devised its famous PageRank search algorithms to aggregate the Web-surfing “wisdom of the crowd,” making online searches vastly more useful. +={Amazon:eBay;Google;MySpace} + +The basic point is that open media platforms are significantly reducing business coordination and communication costs by leveraging people’s natural social behaviors in ways that conventional businesses simply cannot. Open Web platforms allow large and diverse groups to organize themselves and their projects more easily. Individuals have greater self-defined choice and the capacity to express their own market demand; they need not be constrained by the choices presented to them in the market. The Internet has opened up gushing channels of virtual word of mouth, which is a more trusted form of consumer information than advertising. Those companies with excellent products use favorable word of mouth to reduce their marketing and distribution costs. “Smart mobs” can elevate obscure bloggers and Web sites because they regard them as more trustworthy, expert, and authentic (or entertaining) than those of Centralized Media. Many conservatives now trust the Drudge Report and Free Republic more than CBS News, just as many liberals trust DailyKos and Huffington Post more than CBS News. Indeed, the very genre of “objective journalism” — an artifact of the economic necessity of appealing to broad, lowest-commondenominator audiences — is now in jeopardy. +={Internet:virtual word of mouth on;media:participatory;Centralized Media:competition, and+3;markets:restructuring+1} + +As people migrate to the Web, advertising revenues for Centralized Media are eroding further, setting off a scramble to devise new advertising vehicles to reach fugitive Internet users. It is a chase that cannot be avoided because that’s where the eyeballs are. Moreover, the value proposition of open networks is too attractive to ignore. But because that value proposition is so radically different from conventional media — a company must revamp its organizational structures, strategies, marketing, etc. —it raises some wrenching choices for Centralized Media: Should they “go native” and let their products loose on open networks? Or would that destroy their entrenched business models for television shows, theatrical films, music CDs, and other content? The vast infrastructure and business practices of Centralized Media cannot be summarily abandoned, but neither can they remain economically tenable over the long haul without significant changes. For now, Centralized Media are attempting an ungainly straddle of both worlds. +={Centralized Media:alternatives vs.} + +2~ Web 2.0: A New Breed of Participatory Media +={media:participatory+17;Centralized Media:alternatives vs.+17;Web 2.0:applications+17} + +At the time, Eric Eldred’s Web repository of public-domain books could be seen as a modest little experiment. In retrospect, it can be seen as a dawning cultural archetype. It betokened the power of the amateur.~{ Dan Hunter and F. Gregory Lastowka, “Amateur-to-Amateur,” /{William and Mary Law Review}/ 46, no. 951 (December 2004). }~ While Centralized Media continue to have greater resources, talent, and political clout, amateurs are finding their voices and new online venues. A significant cultural emancipation is under way. Creative expression need no longer cater to corporate gatekeepers and the imperatives of the mass market. A no-name amateur can produce useful and influential work without having to go through New York, Los Angeles, London, or Tokyo. The do-ityourself culture is flourishing and expanding. With little money or marketing, anyone can launch a viral spiral that, with enough luck and panache, can sweep across global culture. +={Eldritch Press} + +It is only now dawning on some media chieftains that the biggest threat to Centralized Media is not piracy or online competitors, but /{nonmarket alternatives}/: you, me, and the online friends that we can attract. Hollywood and record labels might rail against “pirates” and demand stronger copyright protection, but the real longterm threat to their business models is the migration of consumer attention to amateur creativity and social communication. Social production on open networks has become a powerful creative and economic force in its own right. Ordinary people can now find their own voices and develop folk cultures of their own that may or may not use the market. +={piracy;Centralized Media:piracy, and} + +After the tech bubble of 2000–2001 burst, the surviving techies and entrepreneurs developed a remarkable range of cheap, versatile software that took to heart the lessons of free software and open networks. Blogs, wikis, social networking software, peer-to-peer file-sharing and metadata tools began to migrate from the tech fringe to the mainstream. There have been many conceptual frames and buzzwords associated with this new order — “smart mobs” (Howard Rheingold), “the wisdom of crowds” (James Surowiecki), “wikinomics” (Don Tapscott and Anthony D. Williams) — but the catchphrase that has gained the most currency is “Web 2.0,” a term launched by Tim O’Reilly in a canonical 2003 essay.~{ Tim O’Reilly, “What Is Web 2.0: Design Patterns and Business Models for the Next Generation of Software,” O’Reilly Media Web site, September 30, 2005, at http://www.oreilly.com/pub/a/oreilly/tim/news/2005/09/30/what-isweb-20.html. }~ +={O’Reilly, Tim;Rheingold, Howard;Surowiecki, James;Tapscott, Don;Williams, Anthony D.;free software:economic effects of;open source software:economic implications of} + +O’Reilly, a prominent publisher of books on open-source software, coined Web 2.0 to describe the fluid social dynamics that occur on open Web platforms — wikis, blogs, social networking Web sites, and other open, collaborative platforms — where people have the freedom to share and reuse work. Web 2.0 amounts to a worldview that celebrates open participation as a way to create valuable collective resources. It regards open technical protocols and content as the basis for this process (whether managed as a commons or a business), and dismisses closed, proprietary regimes as both socially and economically questionable. In essence, Web 2.0 honors socially created value as the basis for value creation, which market players may or may not be able to exploit. +={Web 2.0:coining of term} + +Blogging is more of a social medium than is generally supposed, for example. It is not just the outburst of some ranter in his pajamas, as the stereotype has it, but a social medium that connects people in new ways. Most blogs have a blogroll — a list of admired blogs— which enables the readers of one blog to identify other bloggers engaged in similar conversations. Permalinks — stable Web addresses for blog content — enable people to make reliable Web citations of content, which means that people can coalesce around a shared body of work. And RSS feeds— “Really Simple Syndication” — allow people to “subscribe” to individual blogs and Web sites, enabling them to keep abreast of a sprawling set of communities. +={blogging+1;permalinks;RSS feeds} + +The rise of blog-tracking companies like Technorati and Alexa has also helped blogging become a durable social genre. These companies inventory and rank blogs, and help people discover blogs for virtually any subject of interest — cocktail mixing, high-energy physics, needlework design. By 2007, there were an estimated 100 million blogs in existence (although many were inactive or abandoned), making the blogosphere a powerful cultural force in its own right. There was also a flood of online “news aggregators” — Web sites that cherry-pick their own mix of pieces from the wire services, newspapers, Web sites, blogs, and other online sources. With huge audiences, news aggregators like the Drudge Report (1.6 million unique monthly visitors) and the Huffington Post (773,000 visitors) have begun to rival major daily newspapers in reach and influence. + +Another seminal social innovation has been Wikipedia, a strange and wondrous cultural eruption. Founded by Jimmy Wales and Larry Sanger in January 2001, the English-language Wikipedia began to gain serious momentum in the months after the CC licenses were released, and by early 2003 hosted 100,000 articles. (A “wiki” is a special type of Web site that allows anyone who accesses it to add or modify its contents.) After two years, Wikipedia had amassed a collection of 400,000 articles and inspired the launch of affiliated Wikipedias in more than 100 languages. In May 2008, +={Sanger, Larry;Wales, Jimmy;Wikipedia:social movement, as+2} + +Wikipedia featured 10.2 million articles in 255 languages; 2.3 million of the articles were in English. By harnessing the energies of tens of thousands of volunteers to write an infinitely expandable “encyclopedia,” Wikipedia has become the leading symbol for a radically new way of compiling and editing knowledge.~{ Wikipedia statistics from http://en.wikipedia.org/wiki/Wikipedia:About. }~ Remarkably, the Wikimedia Foundation, the umbrella organization that funds Wikipedia and many sister projects, had fewer than twenty paid employees in 2008 and a budget of less than $2 million. + +Wikipedia has also spun off affiliated multilingual, free-content wikis on various subjects. Wikispecies is compiling an inventory of the world’s species, Wikiquote is collecting thousands of memorable quotations, the Wikimedia Commons is providing freely usable media files, and Wikibooks is assembling open-content textbooks. Wiki software has been adopted by dozens of different online communities, giving rise to scores of collaborative Web sites such as Conservapedia (for American political conservatives), Intellipedia (for U.S. intelligence agencies), Wookieepedia (for Star Wars fans), Wikitravel (for travelers), and OpenWetWare (for biological researchers). + +In the months following the launch of the CC licenses, peer-topeer (P2P) file sharing was also expanding rapidly. Long associated with illicit sharing of copyrighted music, P2P software in fact has many entirely legitimate uses in science, education, and diverse creative sectors. One of the key attractions of P2P software is its efficiency. It does not need to route information through centralized servers; information can be rapidly shared by routing digital files directly to participants, computer to computer, or by passing it through key nodes in an on-the-fly manner. Even after the courts shut down Napster in 2002, a variety of other P2P software applications — Grokster, Lime Wire, KaZaA, Gnutella, BitTorrent — continued to facilitate online sharing and collaboration. Some thirty-five companies, including Hollywood studios, are sufficiently impressed with the efficiencies of P2P that they have licensed BitTorrent technology to distribute their video content. +={Napster;software:P2P} + +Peer-to-peer file sharing has also unleashed radically new types of knowledge creation: volunteers who join the NASA Clickworkers project to count and classify craters on Mars, “citizen scientists” who help compile an interactive database of butterfly and bird sightings, or geneticists from around the world who submit data to the Human Genome Project and share access to the database. + +Although the tech world and some Internet users had known about various networking tools for years, the general public was largely in the dark until the presidential campaign of Vermont governor Howard Dean in 2002 and 2003. At the time, Dean was considered a long-shot antiwar candidate with little base and little money. Within a few short months, however, thanks to Dean’s outspoken style and his campaign’s skillful use of the Internet, he became the front-runner in a field of twelve candidates. Dean did not use the Internet as a simple publishing tool, but as a way to stimulate decentralized collaboration and thereby organize a diverse community of supporters. The campaign was not just about Dean, but about the participation of 640,000 volunteers who virtually organized themselves through various online tools. The campaign became a dynamic conversation between the candidate and voters — and generated a gusher of more than $50 million, most of it donations of a hundred dollars or less. So much was raised that Dean famously asked his supporters whether he should forgo federal matching funds, and instead raise more money from them. They agreed. The campaign ultimately imploded, of course, after his famous “Dean’s Scream” speech — itself a complex story — but what is notable is how the Dean campaign vividly demonstrated the speed and power of viral networks. +={Dean, Howard;Interenet:political campaigns on} + +By 2003 many ordinary people knew about the Napster controversy, the record industry’s scorched-earth litigation tactics against consumers, and the Supreme Court’s ruling in the /{Eldred}/ case. So people welcomed blogs, wikis, and other Web 2.0 applications as tools to emancipate themselves culturally. In the mass media era, people had few tools or sufficient money to speak to the general public or organize their own communities of interest. But now, using a lightweight infrastructure of software code and telecommunications, people could build stable online communities that reflected their own values and social practices. No permission or payment necessary. No expensive capital investments. +={Napster;Eldred v. Reno/Eldred v. Ashcroft;Internet:socially created value of+4;communities:online+4} + +In many instances, amazingly, virtual communities are performing tasks that existing markets are not performing as efficiently or with as much social trust and goodwill. Craigslist, the free want-ad service that has significantly undercut classified advertising in newspapers, is one of the more stellar examples. In South Korea, OhmyNews.org uses thirty-six thousand citizen-journalists to write up to two hundred online stories a day. The publication is considered the sixth-most influential media outlet in Korea, based on a national magazine poll. Countless specialty blogs are considered more expert and timely sources of information and analysis than mainstream newspapers and magazines. +={OhmyNews.org;Craigslist;blogging} + +Taken together, the new participatory media platforms constitute something new under the sun — a globally accessible space that is both personal and public, individual and social. The riot of unfiltered expression that has materialized on the Internet is often dismissed as stupid, unreliable, and silly; or praised as brilliant, stylish, and specialized; or simply accepted as idiosyncratic, irregular, and local. It is all of these things, of course, and that is precisely the point. +={Internet:mass participation in+1} + +If print culture honors the ethic of “edit, then publish,” the Internet inverts it: /{anything}/ can be made public . . . and then it is up to users to become their own editors. On the Internet, people do not “consume” content, they become active writers, editors, and critics in their own right. They use search engines, news aggregators, and favorite bloggers to identify what they want — or they create their own content, as desired. They are /{participants}/, not merely informed consumers who choose what some professional editor offers to them. +={blogging} + +The Web 2.0 environment was quite hospitable for the spread of the CC licenses. It enabled people to signal their willingness to share and their enthusiasm for cool niche fare as opposed to massaudience kitsch.Members of online communities could confidently share their work on wikis and collaborative Web sites, knowing that no one could appropriate their content and take it private. Socially, the licenses let people announce their social identity to others and build a countercultural ethos of sharing. The ethos became hipper and more attractive with every new antipiracy measure that Centralized Media instigated. +={Web 2.0:CC licenses, and;Creative Commons (CC) licenses:Web 2.0 environment, and} + +% Creative Commons (CC) licenses not separated from Creative Commons (CC) in index, try fix above this point + +2~ Open Networks and the Long Tail +={open networks+6} + +While technology and economics have been driving forces in shaping the new participatory platforms, much of their appeal has been frankly cultural. Amateur content on the Net may be raw and irregular, but it also tends to be more interesting and authentic than the highly produced, homogenized fare of commercial media. Some of it vastly outshines the lowest common denominator of mass media. Again, the cheap connectivity of the Internet has been key. It has made it possible for people with incredibly specialized interests to find one another and organize themselves into niche communities. For closeted homosexuals in repressive countries or isolated fans of the actor Wallace Beery, the Internet has enabled them to find one another and mutually feed their narrow interests. You name it, there are sites for it: the fans of obscure musicians, the collectors of beer cans, Iranian exiles, kite flyers. Freed of the economic imperative of attracting huge audiences with broad fare, niche-driven Internet content is able to connect with people’s personal passions and interests: a powerful foundation not just for social communities, but for durable markets. +={Internet:communication system, as+1} + +This, truly, is one of the more profound effects of networking technologies: the subversion of the “blockbuster” economics of the mass media. It is becoming harder and more expensive for film studios and broadcast networks to amass the huge, cross-demographic audiences that they once could. In the networked environment, it turns out that a diversified set of niche markets can be eminently profitable with lower-volume sales. While Centralized Media require a supply-side “push” of content, the Internet enables a demand-side “pull” of content by users. This radically reduces transaction costs and enhances the economic appeal of niche production. It is easier and cheaper for a company (or single creator) to “pull” niche audiences through word of mouth than it is to pay for expensive “push” advertising campaigns. Specialty interests and products that once were dismissed as too marginal or idiosyncratic to be profitable can now flourish in small but robust “pull markets.”~{ David Bollier, “When Push Comes to Pull: The New Economy and Culture of Networking Technology” (Washington, DC: Aspen Institute, 2006), at http://www.aspeninstitute.org/atf/cf/%7BDEB6F227-659B-4EC8-8F84-8 DF23CA704F5%7D/2005InfoTechText.pdf. }~ +={Centralized Media:Internet vs.;Internet:Centralized Media vs.} + +The term associated with this phenomenon is the “Long Tail” — the title of a much-cited article by Chris Anderson in the October 2004 issue of /{Wired}/ magazine, later expanded into a book. Anderson explained the “grand transition” now under way: +={Anderson, Chris+2;Long Tail+3} + +_1 For too long we’ve been suffering the tyranny of lowestcommon-denominator fare, subjected to brain-dead summer blockbusters and manufactured pop. Why? Economics. Many of our assumptions about popular taste are actually artifacts of poor supply-and-demand matching — a market response to inefficient distribution. . . . Hit-driven economics is a creation of an age without enough room to carry everything for everybody. Not enough shelf space for all the CDs, DVDs, and games produced. Not enough screens to show all the available movies. . . .~{ Chris Anderson, “The Long Tail,” /{Wired}/, October 2004, at http://www.wired .com/wired/archive/12.10/tail.html. }~ + +The “Long Tail” refers to the huge potential markets that can be created for low-volume niche books, CD, DVDs, and other products. More than half of Amazon’s book sales, for example, come from books that rank below its top 130,000 titles. The implication is that “the market for books that are not even sold in the average bookstore is larger than the market for those that are,” writes Anderson. “In other words, the potential book market may be twice as big as it appears to be, if only we can get over the economics of scarcity.” +={Amazon} + +Unconstrained by the size and tastes of a local customer base or by limited shelf space, online retailers such as Amazon, Netflix (DVDs), Rhapsody (music), and iTunes (music) are showing that the Long Tail can be a very attractive business model. These companies have developed new tools, such as collaborative filtering software and user recommendations, to drive demand for lesser-known titles at the far end of the Long Tail. This is just another instance of using new technologies that leverage people’s natural social dynamics, and in so doing inventing new types of markets. +={Amazon} + + +2~ Another Vehicle for Niche Communities: The Commons +={commons:niche communities as+13;communities:commons, and+13} + +If the Long Tail is a market vehicle for amassing niche communities, the commons is the social analogue. A commons does not revolve around money and market exchange, but around collective participation and shared values. It does not use property rights and contracts in order to generate value; it uses gift exchange and moral commitments to build a community of trust and common purpose. Such communities, it turns out, can generate significant “wealth” — as Richard Stallman demonstrated with free software. +={Stallman, Richard:free software, and;free software:economic effects of} + +Generically speaking, a commons is a governance regime for managing collective resources sustainably and equitably. The commons is generally associated with open fields, forests, and other natural resources that were collectively used by villagers for their subsistence needs. During the “enclosure movement” in medieval times and extending through the eighteenth century, British gentry and entrepreneurs began to privatize the commons and convert its resources into marketable commodities. Enclosures essentially dispossessed the commoners and installed a new market regime to manage resources that were previously shared. The commoners, unable to feed themselves or participate in markets, migrated to the industrial cities of England to become the wage slaves and beggars who populate Charles Dickens’s novels. +={commons:enclosure of+2|tragedy of+2;enclosure movement+2} + +Although markets tend to be more efficient than commons, they also tend to focus on that which can be sold and converted into cash. Markets presume that deserts and the public domain have no value because they have no marketable output. Markets also presume that a commons cannot be sustained because inevitably someone will overuse a shared resource — a practice known as “free riding” —and ruin it. This is the famous “tragedy of the commons” notion popularized by biologist Garret Hardin in a 1968 essay, which described how a few farmers will let their sheep overgraze a common pasture and so destroy it. +={Hardin, Garret;free riding;markets:efficiency of} + +The “tragedy of the commons” metaphor has ossified into a truism of neoclassical economics. It takes for granted that shared resources cannot be managed sustainably, and that private property regimes are much better stewards of resources. This prejudice was powerfully rebutted by political scientist Elinor Ostrom in her noted 1990 book /{Governing the Commons}/, which marshaled many empirical examples of natural resource commons that have been managed responsibly for decades or even hundreds of years. Ostrom’s scholarship has since given rise to a great deal of academic study of commons, particularly through the International Association for the Study of the Commons and the Workshop in Political Theory and Policy Analysis at Indiana University. It also inspired thinking about the commons by law scholars like Yochai Benkler, Lawrence Lessig, and James Boyle, who saw close parallels with the commons as they watched corporations use copyright law to enclose culture and information. +={Benkler, Yochai;Boyle, James;Ostrom, Elinor, Governing the Commons;Lessig, Lawrence:law in contemporary context, and;commons:types of+2} + +Cultural commons differ significantly from natural resource commons in this key respect: they are not finite, depletable resources like pastures or forests. Online commons tend to grow in value as more people participate, provided there is sufficient governance and common technical standards to enable sharing. Online commons, in short, are less susceptible to the dreaded “tragedy of the commons” and, indeed, tend to be highly generative of value. Their output does not get “used up” the way natural resources do. +={commons:online} + +The burden of Lessig’s 2001 book /{The Future of Ideas}/ was to argue that the Internet constitutes a great, underappreciated commons. It can serve as the infrastructure for tremendous wealth and innovation if its “layers” — the hardware, software, and content— remain sufficiently open and usable by all. The problem, he warned with great prescience, is that policymakers are generally blind to the value of the commons and markets are too eager to reap short-term individual gains. They fail to appreciate that too much private control at any “layer” of the Internet — through proprietary hardware or software, or excessive copyright or patent protection — can stifle personal freedom, market competition, and innovation. Lessig wanted to name the book /{Dot.commons}/, but his publisher rejected it as too obscure. +={Lessig, Lawrence:The Future of Ideas} + +One of the key advantages of treating key infrastructure (such as Internet transmission protocols and computer operating systems) as a commons is that people have the freedom to modify and improve them, with resulting benefits for all. Innovation and competition can flourish more readily. At the content layer, much of the appeal of the commons is the creative freedom, above and beyond what the market may enable. Precisely because it is a commons, and not a market, people’s freedoms are not constrained by marketability. A commons is a noncommercial, nongovernmental space that is free from corporate manipulations and government meddling. It offers a qualitatively different type of experience than the marketplace or government power. A commons tends to be more informal, a place where people know you by name, and where your contributions are known and welcomed. A commons based on relationships of trust and reciprocity can undertake actions that a business organization requiring extreme control and predictable performance cannot. +={commons:concept of+1} + +Precisely because a commons is open and not organized to maximize profit, its members are often willing to experiment and innovate; new ideas can emerge from the periphery. Value is created through a process that honors individual self-selection for tasks, passionate engagement, serendipitous discovery, experimental creativity, and peer-based recognition of achievement. The Open Prosthetics Project, for example, invites anyone to contribute to the design of a prosthetic limb and/or the specification of limbs that ought to be designed, even if they don’t know how to do it.~{ http://www.openprosthetics.org. }~ This has generated such unexpected innovations as limbs specifically adapted for rock climbers and an arm designed for fishing. Athletes who engage in “extreme sports” — skiing, biking, surfing — have been a rich source of ideas for new products, just as software hackers are among the first to come up with innovative programming ideas. +={commoners:sef-selection of;commons:value proposition of+1;value:creation of+1} + +Part of the value proposition of the commons at the content layer is that it can host a more diverse range of expression — personal, social, and creative — than the market, in part because it does not have the burden of having to sustain costly overhead and sell a product. It has other goals — the personal interests and whims of the commoners — and it can often meet those needs inexpensively. Yet the commons does in fact generate many marketable innovations, thanks to its open accessibility, the social relationships it enables and the free sharing and circulation of work. + +Seeing the success of online commons, Centralized Media have tried to fight back by embracing elements of user participation. They invite audiences to vote in polls (/{American Idol}/), publish lists of “most e-mailed” articles (major newspapers), and direct radio listeners to their Web sites for more information (National Public Radio). /{Time}/ magazine’s choice for the “Person of the Year” in 2006 — “You,” the primary driver of Web sites like MySpace and YouTube — was a landmark moment in media history: with a pinched smile and backhanded assertion of its cultural authority, Centralized Media formally acknowledged its most powerful competitor, Decentralized Media! +={Centralized Media:competition, and+2;commons:online;Time (magazine)} + +Yet for all the celebration of “you” as the master of your own fate in cyberspace, the question that is skirted is whether “you” can indeed retain control of your stuff in a Centralized Media environment. The point of conventional business models, after all, is to engineer a proprietary lock-in of customers through technological dependence, binding contract terms, frequent-buyer credits, brand loyalty, etc. That’s how companies have traditionally secured a more durable customer base and preempted competition. + +But the commons is about securing user freedoms, and not necessarily about prevailing in a market. Web 2.0 may or may not protect both concerns. Like the commons, Web 2.0 relies upon user-generated content, network effects, and bottom-up innovation. But Web 2.0 entrepreneurs, at the end of the day, need to make money. Their sites need to adopt business practices that protect revenue streams. Facebook is catering to advertisers, not users, when they sift through masses of users’ personal data in order to sell targeted advertising. MySpace at one point refused to let its users connect to rival Web sites and outside software “widgets.”~{ Rachel Rosmarin, “Why MySpace Blinked,” /{Forbes}/, April 24, 2007. }~ In this sense, Web 2.0 media may be “open,” but they are not necessarily “free,” as in freedom. Web 2.0 entrepreneurs are more likely to focus on protecting their market advantages than advancing user freedoms. The two issues may overlap substantially, but they are not identical. +={commons:concept of;Facebook;MySpace;Web 2.0:purposes of+1} + +Science-fiction writer William Gibson once wrote, “The future is already here; it’s just not well-distributed yet.” That sums up the Great Value Shift circa 2003. The efficiencies and affordances made possible by the Internet were there. They were enabling all sorts of pioneers to build new business models, new creative genres, and new online communities — but these innovations were unevenly distributed. More to the point, their potential was unevenly perceived, especially in many precincts of Washington officialdom and the corporate world. The challenge for amateurs venturing onto open platforms was to validate the new sorts of socially created value enabled by the Internet. +={Gibson, William;Great Value Shift;Internet:Great Value Shift, and} + +1~ 6 CREATORS TAKE CHARGE + +/{Rip, remix, burn, mashup — legally. The CC licenses facilitate new Internet genres and business models.}/ + +The first users of CC licenses understood that something different was going on; a different order was taking shape. More than just a legal tool, the CC licenses gave the tech vanguard a way to express their inchoate sense that a new and better world was possible, at least on the Internet. They yearned for a noncommercial sharing economy with a different moral calculus than mass media markets, and for markets that are more open, accountable, and respectful of customers. +={Creative Commons (CC) licenses:first users of+9|books, for+19} + +The early adopters were unusually informed about the politics of technology, skeptical of Big Media, and passionate about the artistic freedoms and social responsibility. They were a locally engaged but globally aware network of tech sophisticates, avant-garde artists, clued-in bloggers, small-/{d}/ democratic activists, and the rebellious of spirit: the perfect core group for branding the Creative Commons and instigating a movement. + +It only made sense that Cory Doctorow — copyfighter, sciencefiction writer, tech analyst, co-editor of the popular Boing Boing blog — became the first book author to use a CC license. Doctorow — then a thirty-two-year-old native of Canada, the son of Trotskyite schoolteachers, the European representative for the Electronic Frontier Foundation from 2002 to 2006 — is a singular character on the tech/intellectual property/free culture circuit. He can hold forth with intelligence, wry wit, and bravado on digital rights management, Internet economics, or the goofy gadgets and pop culture artifacts that he regularly showcases on Boing Boing. + +In January 2003, a month after the CC licenses were released, Doctorow published his first novel, /{Down and Out in the Magic Kingdom}/, under an Attribution, NonCommercial, No Derivative Works license (BY-NC-ND). Simultaneously, his progressive-minded publisher, Tor Books of New York City, sold hard copies of the book. “Why am I doing this thing?” Doctorow asked rhetorically: +={Doctorow, Cory:Down and Out in the Magic Kingdom+5} + +_1 Well, it’s a long story, but to shorten it up: first-time novelists have a tough row to hoe. Our publishers don’t have a lot of promotional budget to throw at unknown factors like us. Mostly, we rise and fall based on word-of-mouth. I’m not bad at word-of-mouth. I have a blog, Boing Boing (http://boingboingnet), where I do a /{lot}/ of word-ofmouthing. I compulsively tell friends and strangers about things I like. And telling people about stuff is /{way, way}/ easier if I can just send it to ’em. Way easier.~{ Cory Doctorow, “A Note About This Book,” February 12, 2004, and “A Note About This Book,” January 9, 2003, in /{Down and Out in the Magic Kingdom}/, available at http://www.craphound.com/down. }~ + +A year later, Doctorow announced that his “grand experiment” was a success; in fact, he said, “my career is turning over like a goddamned locomotive engine.” More than thirty thousand people had downloaded the book within a day of its posting. He proceeded to release a collection of short stories and a second novel under a CC license. He also rereleased /{Down and Out in the Magic Kingdom}/ under a less restrictive CC license — an Attribution, NonCommercial, ShareAlike license (BY-NC-SA), which allows readers to make their own translations, radio and film adaptations, sequels, and other remixes of the novel, so long as they are made available on the same terms.~{ Anna Weinberg,“Buying the Cow, Though the Milk Is Free: Why Some Publishers are Digitizing Themselves,” June 24, 2005, /{Book Standard}/, June 24, 2005, available at http://www.thebookstandard.com/bookstandard/news/publisher/ article_display.jsp?vnu_content_id=1000968186.}~ + +With some sheepish candor, Doctorow conceded: “I wanted to see if the sky would fall: you see writers are routinely schooled by their peers that maximal copyright is the only thing that stands between us and penury, and so ingrained was this lesson in me that even though I had the intellectual intuition that a ‘some rights reserved’ regime would serve me well, I still couldn’t shake the atavistic fear that I was about to do something very foolish indeed.” + +By June 2006, /{Down and Out in the Magic Kingdom}/ had been downloaded more than seven hundred thousand times. It had gone through six printings, many foreign translations, and two competing online audio adaptations made by fans. “Most people who download the book don’t end up buying it,” Doctorow conceded, “but they wouldn’t have bought it in any event, so I haven’t lost any sales. I’ve just won an audience. A tiny minority of downloaders treats the free e-book as a substitute for the printed book — those are the lost sales. But a much larger minority treats the e-book as an enticement to buy the printed book. They’re gained sales. As long as gained sales outnumber lost sales, I’m ahead of the game. After all, distributing nearly a million copies of my book has cost me nothing.”~{ Cory Doctorow, “Giving it Away,” Forbes.com, December 1, 2006, available at http://www.forbes.com/2006/11/30/cory-doctorow-copyright-tech-media _cz_cd_books06_1201doctorow.html. }~ In 2008, Doctorow’s marketing strategy of giving away online books to stimulate sales of physical books paid off in an even bigger way. His novel for teenagers, /{Little Brother}/, about a youthful hacker who takes on the U.S. government after it becomes a police state, spent weeks on the /{New York Times}/ bestseller list for children’s books. + +It is perhaps easier for a sci-fi futurist like Doctorow than a publishing business to take such a wild leap into the unknown. But that, too, is an important insight: artists are more likely to lead the way into the sharing economy than entrenched industries. “I’d rather stake my future on a literature that people care about enough to steal,” said Doctorow, “than devote my life to a form that has no home in the dominant medium of the century.” Book lovers and authors will pioneer the future; corporate publishing will grudgingly follow, or be left behind. + +Over the past few years, a small but growing number of pioneering authors have followed Doctorow’s lead and published books under Creative Commons licenses. While the hard evidence is scarce, many authors who use CC licenses believe that releasing free electronic versions of their books does not hurt, and probably helps, the sales of physical copies of their books. Lessig released his 2004 book, /{Free Culture}/, under an Attribution, NonCommercial license (BY-NC), and scores of authors and established publishers have since released books under CC licenses. Among the more notable titles: Yochai Benkler’s /{The Wealth of Networks}/ (Yale University Press, 2006), Kembrew McLeod’s /{Freedom of Expression}/ (Doubleday, 2005), Peter Barnes’s /{Capitalism 3.0}/ (Berrett-Koehler, 2006), and Dan Gillmor’s /{We the Media}/ (O’Reilly Media, 2004). +={Barnes, Peter:Capitalism 3.0;Benkler, Yochai:The Wealth of Networks;Gillmor, Dan:We the Media;Lessig, Lawrence:Free Culture;McLeod, Kembrew:Freedom of Expression} + +In 2006, Paulo Coelho, author of a bestselling book, /{The Alchemist}/, created a “pirate” blog site that invited readers to use BitTorrent and other file-sharing networks to download free copies of his books. After he put the Russian translation of /{The Alchemist}/ online, sales of hardcover copies in Russia went from around 1,000 a year to 100,000, and then to more than 1 million. Coelho attributes the success of foreign translations of his book to their free availability online.~{ Smaran, “Alchemist Author Pirates His Own Book,” TorrentFreak blog, January 24, 2008, at http://torrentfreak.com/alchemist-author-pirates-own-books080124. }~ Experiments such as these were likely influential in the launch of LegalTorrents, a site for the legal peer-to-peer distribution of CC-licensed text, audio, video games, and other content. +={blogging;Coelho, Paulo} + +The CC licenses have been useful, not just for helping individual authors promote their books, but in fueling open-access scholarly publishing. As we will see in chapter 11, the CC licenses help scientists put their “royalty-free literature” on the Internet — a move that enlarges their readership, enhances their reputations, and still enables them to retain copyrights in their works. + +Free culture publishing models are popping up in many unusual quarters these days. LibriVox, to take one instance, is a nonprofit digital library of public-domain audio books that are read and recorded by volunteers.~{ Mia Garlick, “LibriVox,” Creative Commons blog, December 5, 2006, at http://creativecommons.org/text/librivox. }~ Since it started in 2005, the group has recorded more than 150 books by classic authors from Dostoyevsky and Descartes to Jane Austen and Abraham Lincoln. All of them are free. Most are in English but many are in German, Spanish, Chinese, and other languages. +={Lessig, Lawrence:Free Culture+1} + +Founder Hugh McGuire said the inspiration for LibriVox was a distributed recording of Lessig’s book /{Free Culture}/ read by bloggers and podcasters, chapter by chapter. “After listening to that, it took me a while to figure out how to record things on my computer (which I finally did, thanks to free software Audacity). Brewster Kahle’s call for ‘Universal Access to all human knowledge’ was another inspiration, and the free hosting provided by archive.org and ibiblio.org meant that LibriVox was possible: there was no worry about bandwidth and storage. So the project was started with an investment of $0, which continues to be our global budget.” LibriVox’s mission, said McGuire, is the “acoustical liberation of books in the public domain.” +={Kahle, Brewster;LibriVox;McGuire, Hugh;bloging} + +Several publishing businesses now revolve around CC licenses. Wikitravel is a collaborative Web site that amasses content about cities and regions around the world; content is licensed under the CC Attribution, ShareAlike license (BY-SA).~{ “Wikitravel Press launches,” Creative Commons blog, August 3, 2007, at http://creativecommons.org/weblog/entry/7596. See also Mia Garlick, “Wikitravel,” Creative Commons blog, June 20, 2006, at http://creativecom mons.org/text/wikitravel. }~ In 2007, its founder joined with a travel writer to start Wikitravel Press, which now publishes travel books in a number of languages. Like the Wikitravel Web pages, the text in the books can be freely copied and reused. +={Wikitravel Press} + +Another new business using CC licenses is Lulu, a technology company started by Robert Young, the founder of the Linux vendor Red Hat and benefactor for the Center for the Public Domain.Lulu lets individuals publish and distribute their own books, which can be printed on demand or downloaded. Lulu handles all the details of the publishing process but lets people control their content and rights. Hundreds of people have licensed their works under the CC ShareAlike license and Public Domain Dedication, and under the GNU Project’s Free Documentation License.~{ Mia Garlick, “Lulu,” Creative Commons blog, May 17, 2006, at http://creative commons.org/text/lulu. }~ +={Lulu;Red Hat;Young, Robert;Center for the Public Domain;GNU Project:GNU FDL;public domain:Center for Public Domain} + +As more of culture and commerce move to the Internet, the question facing the book industry now is whether the text of a book is more valuable as a physical object (a codex) or as a digital file (intangible bits that can circulate freely), or some combination of the two. Kevin Kelly, the former editor of /{Wired}/ magazine, once explained: “In a regime of superabundant free copies, copies lose value. They are no longer the basis of wealth. Now relationships, links, connection and sharing are. Value has shifted away from a copy toward the many ways to recall, annotate, personalize, edit, authenticate, display, mark, transfer and engage a work.”~{ Kevin Kelly, “Scan This Book!” /{New York Times Magazine}/, May 14, 2006, p. 43. }~ +={Kelly, Kevin+1} + +What this means in practice, Kelly has pointed out, is that books become more valuable as they become more broadly known and socially circulated — the very functionalities that the Internet facilitates. If people can discover a book online and read portions of it, share it with friends, and add annotations and links to related materials, it makes a book more desirable than a hard-copy version that is an inert text on a shelf. As Kelly writes: “When books are digitized, reading becomes a community activity. Bookmarks can be shared with fellow readers. Marginalia can be broadcast. Bibliographies swapped. You might get an alert that your friend Carl has annotated a favorite book of yours. A moment later, his links are yours.”~{ Ibid., p. 45. }~ + +Needless to say, most book publishers and authors’ organizations are not yet prepared to embrace this newfangled value proposition. It seems way too iffy. A “sharing” business model would seemingly cannibalize their current revenues and copyright control with little guarantee of doing better in an open, online milieu. The bigger problem may be the cultural prejudice that an absolute right of control over any possible uses of a book is the best way to make money. +={open business models} + +In general, the publishing trade remains skeptical of the Internet, clueless about how to harness its marketing power, and strangers to CC licenses. And it could be years before mainstream publishing accepts some of the counterintuitive notions that special-interest Internet communities will drive publishing in the future. In a presentation that caused a stir in the book industry, futurist Mike Shatzkin said in May 2007 that this is already happening in general trade publishing: “We’re close to a tipping point, or maybe we’re past it . . . where Web-based branding will have more credibility than print, because print, needing more horizontal reach to be viable, won’t deliver the attention of the real experts and megaphones in each field.”~{ Mike Shatzkin, “The End of General Trade Publishing Houses: Death or Rebirth in a Niche-by-Niche World,” presented to the Book Expo America, New York, May 31, 2007, available at http://www.idealog.com/speeches/ endoftrade.htm. }~ +={Shatzkin, Mike} + +2~ DIY Videos and Film +={Internet:videos and films on+12;videos and film+12;World Wide Web:videos and film on+12} + +One of the biggest cultural explosions of the past decade has been amateur video on the Web. The volume of online video has been so great that there are actually many distinct genres of amateur video: short videos on YouTube, video mashups, “machinima” (a combination of video and online gaming images), amateur pornography, and hybrid forms that combine user videos with conventional broadcast and cable television shows. Just as the Great Value Shift has empowered musicians, so it is giving video- and filmmakers new powers to express themselves as they wish, and reach huge audiences via the Internet. This power represents a potentially major threat to the cultural dominance of the television and film industries, as reflected in various schemes by the networks and studios to establish their own online presences. The threat of do-it-yourself (DIY) video and film is big enough that Viacom alleged that YouTube’s copyright infringements of Viacom-owned video should entitle Viacom to $1 billion in damages. The entertainment industry and the Writers Guild of America endured a long, bitter strike in 2007–2008 precisely because the projected revenues from Internet video are so large. +={Great Value Shift;Internet:Great Value Shift, and;YouTube+2;Viacom;Writers Guild of America} + +It is too early to know which new video styles will be flash-inthe-pan novelties and which will ripen into popular, and perhaps lucrative, genres. But rarely has a culture seen so many diverse experiments in amateur and indie video expression. One site, Justin.tv, is a free platform for broadcasting and viewing live video. Some people make round-the-clock “life casts” of their daily activities; others have used it to broadcast live from Baghdad, showing war-related events. Yahoo and Reuters have entered into a partnership to host amateur photojournalism by people using their digital cameras and camera phones. Machinima video, the product of the underground gaming community, blends filmmaking with online games to produce computer-generated imagery. As John Seely Brown describes it, “Basically, you can take Second Life or Worlds of Warcraft and have a set of avatars run all over the world, that come together and create their own movie, and then you can ‘YouTube’ the movie.”~{ Cited in David Bollier, /{The Rise of Collective Intelligence: Decentralized Cocreation of Value as a New Paradigm in Commerce and Culture}/ (Washington, DC: Aspen Institute Communications and Society Program, 2007), p. 27. }~ +={Brown, John Seely} + +As amateur video and film proliferate, thanks to inexpensive technologies and Internet access, the CC licenses have obvious value in letting the creator retain a copyright in the video while inviting its duplication and reuse by millions of people online. To industry traditionalists locked into binary options, the free circulation of a work precludes any moneymaking opportunities. But of course, that is precisely what is now being negotiated: how to devise ingenious new schemes to make money from freely circulating video. One option is to own the platform, as YouTube does. But there are also competitors such as Revver and blip.tv, which have established their own approaches based on advertising and commercial licensing of works. There are also schemes that use Internet exposure to drive paying customers into theaters and advertisers to buy commercial licenses. For some amateurs, DIY video is simply a way to get noticed and hired by a conventional media company. +={Creative Commons (CC) licenses:video and film, for+8} + +That’s what the Los Angeles–based comedy collective The Lonely Island did to promote themselves to national attention. They posted their comedy shorts and songs to their Web site using Creative Commons licenses. Soon other artists began making remixes of their songs. The remixes in effect served as free marketing, which caught the attention of the Fox Broadcasting Company, which in turn hired them to create a comedy pilot TV episode. In the end, Fox did not pick up the show, but as /{Wired News}/ recounted, “Instead of letting the show wither on a shelf somewhere, the group posted the full video both cut and uncut. The edgy, quirky short— Awesometown — spread like wildfire online and eventually landed all three performers an audition spot for /{Saturday Night Live}/.”~{ Matt Haughey, “From LA’s Awesometown to New York City’s SNL,” /{Wired News}/, October 1, 2005. }~ + +Perhaps the most successful example of leveraging free Internet exposure to reap commercial benefits is the sci-fi parody /{Star Wreck}/. Finnish producer Samuli Torssonen took seven years to shoot a fulllength movie using a Sony DVCAM, computer-generated graphics, and a makeshift studio. Some three hundred people were involved in the project, including some professional actors and many amateurs. When /{Star Wreck}/ was deliberately posted to the Internet in 2005, tagged with a CC-BY-NC-ND license (Attribution, NonCommercial, No Derivatives), it was eventually downloaded 5 million times and became the most-watched Finnish film in history. Fans in Russia, China, and Japan soon copied the film, which stimulated broader viewer demand and led to commercial deals to distribute the film. /{Star Wreck}/ became so popular that Universal Pictures, the American studio, signed a deal in 2006 to distribute DVD versions of the film. Torssonen says that the film has earned a 20to-1 return on investment. “I wouldn’t call free distribution stupid, as some people say, but a success,” he told an audience in 2007.~{ Samuli Torssonen presentation at iCommons Summit 2007, Dubrovnik, Croatia, June 15, 2007. See also www.starwreck.com. }~ +={Star Wreck Studios+1;Torssonen, Samuli} + +The lesson for Stephen Lee, CEO of Star Wreck Studios, is that “you don’t need millions to make a quality movie. You need an active, passionate community.” Lee says the plan for a peer-produced model of “wrecking a movie” is to develop an Internet collaboration, make the film popular through viral marketing, and then license it commercially. Star Wreck Studios is now developing a new movie, /{Iron Sky}/, about a Nazi base on the far side of the moon. +={Lee, Stephen} + +One of the more daring experiments in film production is being pioneered by the Blender Institute, a studio for open-content animation and game projects located in the Amsterdam docklands. Started in August 2007, the Institute employs fourteen full-time people who are obsessed with improving its three-dimensional open-source software, the so-called Blender 3D suite. The software is widely used by a large international user community for modeling, animation, rendering, editing, and other tasks associated with 3D computer-generated animation. +={Blender Institute+2} + +Ton Roosendaal, who directs the Blender Institute, is trying to demonstrate that a small studio can develop a virtuous cycle of economically sustainable creativity using open-source software, Creative Commons licenses, and talented programmers and artists from around the world. “We give programmers the freedom to do their best, and what they want to do is improve the technology,” he said. “The market is too hyper-rational and nailed down and filled with limits,” he argues, referring to his peers at major animation studios. “Open source is free of most of these constraints.”~{ Ton Roosendaal remarks at conference, “Economies of the Commons,” De Balie Centre for Culture and Politics, Amsterdam, April 10–12, 2008. }~ +={Roosendaal, Ton} + +In April 2008, the Blender Institute released a ten-minute animated short, /{Big Buck Bunny}/, which features a kind-hearted, fat white bunny who endures the abuse of three stone-throwing rodents until they smash a beautiful butterfly with a rock — at which point the bunny rallies to teach the bullies a lesson.~{ The film can be downloaded at http://www.bigbuckbunny.org/index.php/ download. }~ The film uses cutting-edge computer-generated animation techniques that rival anything produced by Pixar, the Hollywood studio responsible for /{Toy Story}/, /{Cars}/, and /{Ratatouille}/. /{Big Buck Bunny}/ is licensed under a CC Attribution license, which means the digital content can be used by anyone for any purpose so long as credit is given to the Blender Institute. +={Big Buck Bunny (animated short)+1} + +/{Big Buck Bunny}/ was initially distributed to upfront investors as a DVD set that includes extras such as interviews, outtakes, deleted scenes, and the entire database used in making the film. Then, to pique wider interest in sales of the DVD set, priced at thirty-four euros, a trailer was released on the Internet. This resulted in extensive international press coverage and blog exposure. Early signs are promising that Blender will be able to continue to make highquality animation on a fairly modest budget without worries about illegal downloads or a digital rights management system. The Blender production model also has the virtue of enabling access to top creative talent and cutting-edge animation technologies as well as efficient distribution to paying audiences on a global scale. + +While CC-licensed films are not common, neither are they rare. Davis Guggenheim, the filmmaker who directed /{An Inconvenient Truth}/, made a short film, /{Teach}/, to encourage talented people to become teachers. The film was released in 2006 under a CC BY-NCND license because Guggenheim wanted the film widely available to the public yet also wanted to preserve the integrity of the stories told, hence the NoDerivatives provision. A Spanish short film, /{Lo que tú Quieras Oír}/, became YouTube’s fifth most-viewed video— more than 38 million views. The film’s viral diffusion may have been helped by the CC BY-NC-SA (Attribution, NonCommercial, ShareAlike) license, which allows viewers not only to share the film, but to remix for noncommercial purposes so long as they use the same license. +={Guggenheim, Davis;YouTube} + +In Brazil, director Bruno Vianna released his first full-length film, /{Cafuné}/, under a CC BY-NC-SA license (Attribution, NonCommercial, ShareAlike) and put it on file-sharing networks at the same time that it was exhibited in a handful of theaters.~{ Mia Garlick, CC blog, at http://creativecommons.org/weblog/entry/6048; see also “Cafuné breaking the limits for open business models,” iCommons blog, at http://www.icommons.org/static/2006/11/22/cafune-breakingthe-limits-for-open-business-models. }~ Each release had different endings; downloaders were invited to remix the ending as they wished. The film was financed by the government’s culture ministry as part of a competition for low-budget films, but only about fifty Brazilian films are released to commercial theaters each year. Vianna saw the Internet release as a great way to build an audience for his debut film . . . which is exactly what happened. For some weeks, it made it into the list of twenty most-watched films in the country. +={Vianna, Bruno} + +2~ Letting the Music Flow +={music:CC licenses for+18|remixes+18;remix works+18;Creative Commons (CC) licenses:music, for+18} + +Media reform activist Harold Feld offers a succinct overview of why creativity in music — and therefore the business of selling recorded music — has suffered over the past two decades: +={Feld, Harold+2} + +_1 The 1990s saw a number of factors that allowed the major labels to push out independents and dominate the market with their own outrageously priced and poorly produced products: consolidation in the music industry, the whole “studio system” of pumping a few big stars to the exclusion of others, the consolidation in music outlets from mom-andpop record stores to chains like Tower Records and retail giants like Wal-Mart that exclude indies and push the recordings promoted by major labels, and the consolidation of radio — which further killed indie exposure and allowed the labels to artificially pump their selected “hits” through payola. All this created a cozy cartel that enjoyed monopoly profits. +={music:music industry+1} + +_1 As a result, the major labels, the mainstream retailers, and the radio broadcasters grew increasingly out of touch with what listeners actually wanted. But as long as the music cartel controlled what the vast majority of people got to hear, it didn’t matter . . . The music cartel remained the de facto only game in town.~{ Harold Feld, “CD Sales Dead? Not for Indies!” blog post on Public Knowledge Web site, March 27, 2007, at http://www.publicknowledge.org/node/ 890. }~ + +Changing the music industry is obviously a major challenge that is not going to be solved overnight. Still, there is a growing effort led by indie musicians, small record labels, Internet music entrepreneurs, and advocacy groups such as the Future of Music Coalition to address these problems. Creative Commons is clearly sympathetic, but has largely focused on a more modest agenda — enabling a new universe of shareable music to arise. Its chief tools for this mission, beyond the CC licenses, are new software platforms for legal music remixes, online commons that legally share music, and new business models that respect the interests of both fans and artists. Ultimately, it is hoped that a global oeuvre of shareable music will emerge. Once this body of music matures, attracting more artists and fans in a self-sustaining viral spiral, the record industry may be forced to give up its dreams of perfect control of how music may circulate and adopt fan-friendly business practices. +={Future of Music Coalition} + +This, at least, is the theory, as Lessig explains it. He calls it the “BMI strategy,” a reference to the strategy that broadcasters and musicians used to fight ASCAP’s monopoly control over radio music in the early 1940s. ASCAP, the American Society of Composers, Authors and Publishers, is a nonprofit organization that collects royalties for musical performances. At the time, ASCAP required artists to have five hits before it would serve as a collection agency for them, a rule that privileged the playing of pop music on the radio at the expense of rhythm and blues, jazz, hillbilly, and ethnic music. Then, over the course of eight years, ASCAP raised its rates by 450 percent between 1931 and 1939 — at which point, ASCAP then proposed /{doubling}/ its rates for 1940. In protest, many radio stations refused to play ASCAP-licensed music. They formed a new performance-rights body, BMI, or Broadcast Music, Inc., which sought to break the ASCAP monopoly by offering free arrangements of public-domain music to radio stations. They also charged lower rates than ASCAP for licensing music and offered better contracts for artists.~{ Donald Clarke, /{The Rise and Fall of Popular Music}/, chapter 11. }~ +={ASCAP+1;BMI (Broadcast Music, Inc.)+3;music:ASCAP+l;Lessig, Lawrence:CC licenses, and+2|music, and+2} + +“The Internet is today’s broadcasters,” said Lessig in a 2006 speech. “They are facing the same struggle.”~{ Lessig explained his BMI strategy at a speech, “On Free, and the Differences Between Culture and Code,” at the 23d Chaos Communications Conference (23C3) in Berlin, Germany, December 30, 2006; video can be watched at http://video.google.com/videoplay?docid=7661663613180520595&q= 23c3. }~ Just as ASCAP used its monopoly power to control what music could be heard and at what prices, he said, so today’s media corporations want to leverage their control over content to gain control of the business models and technologies of digital environments. When Google bought YouTube, one-third of the purchase price of $1.65 billion was allegedly a financial reserve to deal with any copyright litigation, said Lessig. This is how the incumbent media world is trying to stifle the emergence of free culture. +={Google;YouTube} + +The same questions that once confronted broadcasters are now facing Internet innovators, Lessig argues: “How do we free the future from the dead hand of the past? What do we do to make it so they can’t control how technology evolves?” With copyright terms lasting so long, it is not really feasible to try to use public-domain materials to compete with a commercial cartel. Lessig’s answer is a BMI-inspired solution that uses the CC licenses to create a new body of “free” works that, over time, can begin to compete with popular works. The legendary record producer Jerry Wexler recalled how ASCAP marginalized R & B, country, folk, and ethnic music, but “once the lid was lifted — which happened when BMI entered the picture — the vacuum was filled by all these archetypal musics. BMI turned out to be the mechanism that released all those primal American forms of music that fused and became rock-androll.”~{ From BMI, Inc., Web site, at http://www.bmi.com/genres/entry/533380. }~ Lessig clearly has similar ambitions for Creative Commons. +={Wexler, Jerry} + +For now, the subculture of CC-licensed music remains something of a fringe movement. It is easy to patronize it as small, amateurish, and quirky. Yet its very existence stands as a challenge to the music industry by showing the feasibility of a more artist- and fanfriendly way of distributing music. Is it visionary to believe that free culture artists will force the major labels to change — just as BMI forced ASCAP to lower prices — and make them more competitive and inclusive? +={ASCAP;music:ASCAP+1;music:music industry} + +Creative Commons’s primary task is practical — to help musicians reach audiences directly and reap more of the financial rewards of their music. So far, a wide range of indie bands, hip-hop artists, and bohemian experimentalists of all stripes have used the licenses. One of the most popular is the Attribution, NonCommercial license, which lets artists share their works while getting credit and retaining commercial rights. A number of marquee songwriters and performers — David Byrne, Gilberto Gil, the Beastie Boys, Chuck D — have also used CC licenses as a gesture of solidarity with free culture artists and as an enlightened marketing strategy. Inviting people to remix your songs is a great way to engage your fan base and sell more records. And tagging your music with a CC license, at least for now, wraps an artist in a mantle of tech sophistication and artistic integrity. +={Beastie Boys;Byrne, David;Chuck D;Gil, Gilberto} + +Guitarist Jake Shapiro was one of the first musicians to show the marketing potential of unleashing free music on the Internet. In 1995, Shapiro put MP3 files of music by his band, Two Ton Shoe, on the group’s Web site. Within a few years, Two Ton Shoe was one of the most-downloaded bands on the Internet, developing fan bases in Italy, Brazil, Russia, and South Korea. One day Shapiro received a phone call out of the blue from a South Korean concert promoter. He wanted to know if the band would fly over to Seoul to perform four concerts. It turned out that fans in South Korea, where fast broadband connections are the norm, had discovered Two Ton Shoe through file sharing. A local CD retailer kept getting requests for the band’s music, which led him to contact a concert promoter. In August 2005, Shapiro and his buddies arrived in Seoul as conquering rock stars, selling out all four of their concerts. “The kids who showed up knew all the words to the songs,” Shapiro recalled. A year later, the band signed a deal to distribute a double CD to East Asia.~{ Shapiro described his experiences at the “Identity Mashup Conference,” June 19–21, 2006, hosted by the Berkman Center for Internet and Society at Harvard Law School, at http://blogs.law.harvard.edu/mediaberkman/2006/ 06/28/id-mashup-2006-day-two-the-commons-open-apis-meshups-andmashups. His band’s Web site is at http://www.twotonshoe.com. }~ +={Shapiro, Jake;Two Ton Shoe} + +While such stories of viral marketing success are not common, neither are they rare. Lots of bands now promote themselves, and find admiring (paying) fans, by posting their music, for free, on Web sites and file-sharing sites. Perhaps the most scrutinized example was Radiohead’s decision to release its album /{In Rainbows}/ for free online, while inviting fans to pay whatever they wanted. (The band did not release any numbers, but considered the move a success. They later released the album through conventional distribution channels as well.)~{ Jon Pareles, “Pay What You Want for This Article,” /{New York Times}/, December 9, 2007. }~ +={Radiohead} + +Just as previous generations of fans came together around FM radio or live performance venues, the Internet is the new gathering place for discovering interesting, fresh, and authentic talent. The lesson that the record industry hasn’t quite learned is that music is not just a commodity but a /{social experience}/ — and social experiences lose their appeal if overly controlled and commercialized. If the music marketplace does not provide a place for fans to congregate and share in a somewhat open, unregimented way — if the commodity ethic overwhelms everything else — the music dies. Or more accurately, it migrates underground, outside the marketplace, to sustain itself. This is why so much of the best new music is happening on the fringes of the stagnant commercial mainstream. +={music:social experience, as+4} + +It is also why the Creative Commons licenses have acquired such cachet. They have come to be associated with musicians who honor the integrity of music making. They symbolize the collective nature of creativity and the importance of communing freely with one’s fans. Nimrod Lev, a prominent Israeli musician and supporter of the CC licenses, received considerable press coverage in his country for a speech that lamented the “cunning arrangement” (in Israeli slang, /{combina}/) by which the music industry has betrayed people’s love of music, making it “only a matter of business and commerce.” Said Lev: +={music:music industry+1;Lev, Nimrod+2} + +_1 The music industry treats its consumer as a consumer of sex, not of love, the love of music. Just like everything else: a vacuum without values or meaning. But it is still love that everyone wants and seeks. . . . The music vendors knew then [a generation ago] what they have forgotten today, namely that we must have cultural heroes: artists that are not cloned in a manner out to get our money. There was an added value with a meaning: someone who spoke to our hearts in difficult moments, and with that someone, we would walk hand in hand for a while. We had loyalty and love, and it all meant something.~{ Nimrod Lev, “The Combina Industry,” November 16, 2004, at http://law .haifa.ac.il/techlaw/new/try/eng/nimrod.htm. }~ + +At the risk of sounding naïve, Lev said he wanted to stand up for the importance of “authenticity and empathy and my own truth” in making music. It is a complaint that echoes throughout the artistic community globally. A few years ago, Patti Smith, the punk rocker renowned for her artistic integrity, decried the “loss of our cultural voice” as the radio industry consolidated and as music television became a dominant force. She grieved for the scarcity of places for her to “feel connected” to a larger musical community of artists and fans.~{ Patti Smith at a panel at the National Conference for Media Reform, St. Louis, sponsored by Free Press, May 14, 2005. }~ +={Smith, Patti} + +The classic example of music as social experience — music as a vehicle for a community of shared values — is the Grateful Dead. The band famously invited its fans to record all of its concerts and even provided them with an authorized “tapers’ section” in which to place their microphones and equipment. Fans were also allowed to circulate their homemade tapes so long as the music was shared, and not sold. This had the effect of building a large and committed fan base, which avidly archived, edited, and traded Grateful Dead cassettes. One reason that the Dead’s “customer base” has been so lucrative and durable over several decades is that the fans were not treated as mere customers or potential pirates, but as a community of shared values. The music belonged to the fans as much as to the band, even though Deadheads were only too happy to pay to attend concerts and buy the officially released CDs and t-shirts.~{ A fascinating collision of the Grateful Dead’s sharing ethic and the copyright business model occurred in 2005, when the Internet Archive placed a huge cache of fan recordings online, available for free download. When Grateful Dead Merchandising objected, Deadheads accused the band’s representatives of betraying the band’s long-established sharing ethic. Paradoxically, the band’s merchandisers may also have jeopardized the band’s commercial appeal by prohibiting the downloads. As music critic Jon Pareles put it, “The Dead had created an anarchy of trust, going not by statute but by instinct and turning fans into co-conspirators, spreading their music and buying tickets, T-shirts and official CDs to show their loyalty. The new approach . . . removes what could crassly be called brand value from the Dead’s legacy by reducing them to one more band with products to sell. Will the logic of copyright law be more profitable, in the end, than the logic of sharing? That’s the Dead’s latest improvisational experiment.” Jon Pareles, “The Dead’s Gamble: Free Music for Sale,” /{New York Times}/, December 3, 2005. }~ +={Grateful Dead+1} + +While the Grateful Dead may be an outlier case, it exemplifies the sharing ethic that the Internet is facilitating: the formation of communities of amateurs that flourish by sharing and celebrating music. Artists can make some money through CD sales, but much more through performances, merchandising, endorsements, and sales to films, television, and advertisers. If established singers and bands are reluctant to make a transition to this new business model, hungry newcomers are not. + +The Mountain Goats, an indie rock group, authorized the Internet Archive to host their live shows on the Web because they realized the videos seed market demand for their music. The group’s front man, John Darnielle, said, “I am totally in favor of tape trading, and file sharing never did anything wrong by me. People got into The Mountain Goats after downloading my stuff.”~{ Creative Commons blog, “Musicians Large and Small on Internet Downloading,” by Matt Haughey, July 26, 2004. }~ In 2001, two newcomers working out of a basement produced a cover version of Tears for Fears’ “Mad World,” which two years later went to the top of the British pop charts.~{ http://news.bbc.co.uk/l/hi/entertainment/3352667.stm. }~ In a world where amateur creativity can easily migrate to the commercial mainstream, tagging works with a NonCommercial CC license is a valuable option. By requiring uses that fall outside the scope of the license to pay as usual, it can help artists get visibility while retaining their potential to earn money. A larger restructuring of the music industry, alas, will take longer to achieve. +={Darnielle, John;Mountain Goats} + +2~ Music as Remix +={Creative Commons (CC) licenses:music, for+18;music:remixes+18;remix works+18} + +If any segment of the music world really understands the social dynamics of musical creativity, it is hip-hop artists. As Joanna Demers documents in her book about “transformative appropriation” in music, /{Steal This Music}/, hip-hop was born as a remix genre in the 1970s and 1980s.~{ Joanna Demers, /{Steal This Music: How Intellectual Property Law Affects Musical Creativity}/ (Athens: University of Georgia Press, 2006). }~ In defiance of copyright law, which considers unauthorized borrowing as presumptively illegal, hip-hop artists used turntable scratching and digital sampling to transform existing songs into something new, which in time grew into a lucrative market segment. Hip-hop illustrates how the commons and the market need to freely interact, without undue restrictions, in order for both to flourish. It works because sampling is not a simple matter of “theft” but a mode of creativity, a way of carrying on a cultural conversation. Sampling is a way of paying tribute to musical heroes, mocking rivals, alluding to an historical moment, or simply experimenting with an arresting sound. When the rap group Run-DMC used Aerosmith’s “Walk This Way” as the basis for a remix, it was not only a salute to the group’s musical influence and a new turn of the creative wheel, it revived Aerosmith’s sagging career (or, in economist’s terms, it “created new value”). +={Demers, Joanna:Steal This Music+1;music:hip-hop+1} + +The problem, of course, is that most remix culture (and the value it creates) is illegal. By the late 1980s, in fact, the freedom of the commons that gave birth to hip-hop was coming under siege. Musicians and record labels were routinely invoking copyright law to demand permission and payments for the tiniest samples of music. Only wealthy artists could afford to clear the rights of familiar songs, and basement amateurs (who had given rise to the genre in the first place) were being marginalized. When George Clinton’s group Funkadelic succeeded in its lawsuit against the rap group N.W.A. for using a nearly inaudible sample of a three-note, two-second clip from “Get Off Your Ass and Jam” — the infamous /{Bridgeport v. Dimension Films}/ decision, in 2004 — it became clear that the commons of hip-hop music was being enclosed.~{ This story is told by Demers in Steal This Music. The court ruling is /{Bridgeport v. Dimension Films}/, 383 F. 3d 390 (6th Circ. 2004). }~ Critics like Siva Vaidhyanathan and Kembrew McLeod believe that the legal crusade against sampling has significantly harmed the creative vitality of hip-hop. Something is clearly amiss when the one of the most critically acclaimed albums of 2005 — /{The Grey Album}/, a remix collection by DJ Danger Mouse — cannot be legally released. /{The Grey Album}/ artfully combined music from the Beatles’s /{White Album}/ with lyrics from Jay-Z’s /{Black Album}/, resulting in “the most popular album in rock history that virtually no one paid for,” according to /{Entertainment Weekly}/.~{ DJ Danger Mouse’s remix received considerable press attention. A good overview is by Chuck Klosterman, “The DJ Auteur,” /{New York Times Magazine}/, June 18, 2006, pp. 40–45. }~ +={Bridgeport v. Dimension Films;Clinton, George;Funkadelic;McLeod, Kembrew;Vaidhyanathan, Siva;DJ Danger Mouse} + +The impetus for a solution to the sampling problem started with Negativland, an irreverent “sound collage” band known as much for its zany culture jamming as for its anticopyright manifestos. (One of its CDs includes a polemical booklet about fair use along with a whoopee cushion with a © symbol printed on it.) Negativland gained notoriety in the 1990s for its protracted legal battle with the band U2 and Island Records over Negativland’s release of a parody song called “U2.” Island Records claimed it was an infringement of copyright and trademark law, among other things. Negativland claimed that no one should be able to own the letter U and the numeral 2, and cited the fair use doctrine as protecting its song and title. The case was eventually settled.~{ See Negativland’s book, /{Fair Use: The Story of the Letter U and the Numeral 2}/ (Concord, CA: Seeland, 1995). }~ +={Negativland+1} + +As an experienced sampler of music, Negativland and collagist People Like Us (aka Vicki Bennett) asked Creative Commons if it would develop and offer a music sampling license. Don Joyce of Negativland explained: +={Joyce, Don} + +_1 This would be legally acknowledging the now obvious state of modern audio/visual creativity in which quoting, sampling, direct referencing, copying and collaging have become a major part of modern inspiration. [A sampling option would] stop legally suppressing it and start culturally encouraging it — because it’s here to stay. That’s our idea for encouraging a more democratic media for all of us, from corporations to the individual.~{ Glenn Otis Brown, “Mmm . . . Free Samples (Innovation la),” Creative Commons blog, March 11, 2003, at http://creativecommons.org/weblog/entry/ 3631. }~ + +With legal help from Cooley Godward Kronish and Wilson, Sonsini, Goodrich & Rosati, Creative Commons did just that. During its consultations with the remix community, Creative Commons learned that Gilberto Gil, the renowned /{tropicalismo}/ musician and at the time the Brazilian minister of culture, had been thinking along similar lines, and so it received valuable suggestions and support from him. +={Cooley Godward Kronish;Wilson, Sonsini, Goodrich & Rosati;Gil, Gilberto} + +In 2005, Creative Commons issued the Sampling license as a way to let people take pieces of a work for any purpose except advertising.~{ Creative Commons Web site, at http://creativecommons.org/about/sampling. See also Ethan Smith, “Can Copyright Be Saved?” /{Wall Street Journal}/, October 20, 2003. }~ It also prohibited copying and distribution of the entire work.~[* A “Sampling Plus” license was also issued to allow noncommercial copying and distribution of an entire work, which means it could be distributed via file-sharing networks. Finally, a “NonCommercial Sampling Plus” license was devised to let people sample and transform pieces of a work, and copy and distribute the entire work, so long as it was for noncommercial purposes.]~ For example, an artist could take a snippet of music, a clip of film, or a piece of a photograph, and use the sample in a new creation. Since its release, the Sampling license has been criticized on philosophical grounds by some commoners who say it does not truly enhance people’s freedom because it prohibits copying and distribution of the entire work. This concern reached serious enough proportions that in 2007 Creative Commons “retired” the license; I’ll revisit this controversy in chapter 9. + +The CC Sampling license only whetted the imagination of people who wanted to find new ways to sample, share, and transform music. Neeru Paharia, then the assistant director of the Creative Commons, came up with the idea of developing ccMixter, a software platform for remixing music on the Web.~{ See http://wiki.creativecommons.org/ccMixter. Interview with Mike Linksvayer, February 7, 2007, and Neeru Paharia, April 13, 2007. }~ Paharia realized one day that “this whole remixing and sharing ecology is about getting feedback on who’s using your work and how it’s evolving. That’s almost half the pleasure.”~{ Interview with Neeru Paharia, April 13, 2007. }~ So the organization developed a Web site that would allow people to upload music that could be sampled and remixed. The site has about five thousand registered users, which is not terribly large, but it is an enthusiastic and active community of remix artists that acts as a great proof of concept while promoting the CC licenses. There are other, much larger remix sites on the Internet, such as Sony’s ACIDplanet, but such sites are faux commons. They retain ownership in the sounds and remixes that users make, and no derivative or commercial versions are allowed. +={Paharia, Neeru} + +One feature of viral spirals is their propensity to call forth a jumble of new projects and unexpected partners. The CC licenses have done just that for music. ccMixter has joined with Opsound to offer a joint “sound pool” of clips licensed under an Attribution ShareAlike license. It also supports Freesound, a repository of more than twenty thousand CC-licensed samples ranging from waterfalls to crickets to music.~{ Neeru Paharia, “Opsound’s Sal Randolph,” Creative Commons blog, October 1, 2005, at http://creativecommons.org/audio/opsound; Mike Linksvayer, “Freesound,” Creative Commons blog, October 1, 2005, at http://creative commons.org/audio/freesound; Matt Haughey, “Free Online Music Booms as SoundClick Offers Creative Commons Licenses,” Creative Commons blog, August 11, 2004. }~ + +Runoff Records, Inc., a record label, discovered a remix artist who teaches physics and calculus and goes by the name of Minus Kelvin. Runoff heard a podcast of Kelvin’s CC-licensed music, and signed him up, along with another ccMixter contributor, to do music for three seasons of the television show /{America’s Next Top Model}/.~{ Neeru Paharia, “Minus Kelvin Discovered on ccMixter,” Creative Commons blog, May 17, 2005, at http://creativecommons.org/weblog/archive/2005/5. }~ A few months later, two ccMixter fans based in Poland and Holland started an online record label, DiSfish, that gives 5 percent of all sale proceeds to CC, another 5 percent to charity, with the remainder split between the label and the artist. All music on the label is licensed under CC.~{ Cezary Ostrowski from Poland and Marco Raaphorst from Holland met online at ccMixter and decided to go into business together. They started an online label called DiSfish. }~ + +The CC licenses are not just the province of daring remix artists and other experimentalists. Disappointed by its CD sales through traditional channels, the Philharmonia Baroque Orchestra released its performance of Handel’s 1736 opera, /{Atalanta}/, exclusively through the online record label Magnatune, using a CC license. Conductor Nicholas McGegan said the Internet “has potentially given the industry a tremendous shot in the arm,” letting orchestras reach “new audiences, including ones that are unlikely to hear you in person.”~{ Mia Garlick, “Classical Music Goes Digital (& CC),” May 3, 2006, at http://creativecommons.org/weblog/entry/5883. }~ A company that specializes in Catalan music collaborated with the Catalonian government to release two CDs full of CC-licensed music.~{ The Enderrock Group, a company that specializes in Catalan music and publishes three popular music magazines, released the two CDs, /{Música Lliure and Música Lliure II}/, free within the page of its magazines. See Margot Kaminski, “Enderrock,” Creative Commons Web site, January 17, 2007, at http://cre ativecommons.org/audio/enderrock. }~ A group of Gamelan musicians from central Java who perform in North Carolina decided to release their recordings under a CC license.~{ The group, Gamelan Nyai Saraswait, was blogged about by Matt Haughey on February 1, 2003, at http://creativecommons.org/weblog/entry/3599. }~ +={McGegan, Nicholas} + +Big-name artists have gotten into the licenses as well. DJ Vadim created a splash when he released all the original solo, individual instrumental, and a cappella studio tracks of his album /{The Sound Catcher}/ under an Attribution, NonCommercial license, so that remixers could have at it.~{ Victor Stone, “DJ Vadim Releases Album Tracks Under CC,” August 20, 2007, at http://creativecommons.org/weblog/entry/7619. }~ In 2004, /{Wired}/ magazine released a CD with sixteen tracks by the likes of David Byrne, Gilberto Gil, and the Beastie Boys. “By contributing a track to /{The Wired CD}/., these musicians acknowledge that for an art form to thrive, it needs to be open, fluid and alive,” wrote /{Wired}/. “These artists — and soon, perhaps, many more like them — would rather have people share their work than steal it.”~{ Thomas Goetz, “Sample the Future,” /{Wired}/, November 2004, pp. 181–83. }~ +={Byrne, David;Gil, Gilberto+1;DJ Vadim;Beastie Boys} + +Soon thereafter, Byrne and Gil went so far as to host a gala benefit concert for Creative Commons in New York City. In a fitting fusion of styles, Gil sang a Brazilian arrangement of Cole Porter’s cowboy song, “Don’t Fence Me In.” The crowd of 1,500 was high on the transcultural symbolism, said Glenn Brown: “Musical superstars from North and South, jamming together, building earlier works into new creations, in real time. Lawyers on the sidelines and in the audience, where they belong. The big Creative Commons logo smiling overhead.”~{ Glenn Otis Brown, “WIRED Concert and CD: A Study in Collaboration,” September 24, 2004, available at http://creativecommons.org/weblog/entry/ 4415. }~ The description captures the CC enterprise to a fault: the fusion of some clap-your-hands populism and hardheaded legal tools, inflected with an idealistic call to action to build a better world. +={Brown, Glenn Otis;Porter, Cole} + +By 2008 the power of open networks had persuaded the major record labels to abandon digital rights management of music CDs, and more major artists were beginning to venture forth with their own direct distribution plans, bypassing the standard record label deals. Prince, Madonna, and others found it more lucrative to run their own business affairs and deal with concert venues and merchandisers. In a major experiment that suggests a new business model for major music acts, Nine Inch Nails released its album /{Ghosts I-IV}/ under a Creative Commons NonCommercial ShareAlike license, and posted audio files of the album on its official Web site, inviting free downloads. It did not do advertising or promotion. Despite the free distribution — or because of it — the group made money by selling 2,500 copies of an “Ultra-Deluxe Limited Edition” of the album for $300; the edition sold out in less than three days. There were also nonlimited sales of a “deluxe edition” for $75 and a $10 CD. The scheme showed how free access to the music can be used to drive sales for something that remains scarce, such as a “special edition” CD or a live performance. One week after the album’s release, the Nine Inch Nails’ Web site reported that the group had made over $1.6 million from over 750,000 purchase and download transactions. Considering that an artist generally makes only $1.60 on the sale of a $15.99 CD, Nine Inch Nails made a great deal more money from a “free” album distribution than it otherwise would have made through a standard record deal.~{ See, e.g., Wikipedia entry, “Ghosts I-IV,” at http://en.wikipedia.org/wiki/ Ghosts_I-IV. }~ +={Nine Inch Nails} + +It is too early to know if Lessig’s “BMI strategy” will in fact catalyze a structural transformation in the entertainment industries. But Lessig apparently feels that it is the only feasible strategy. As he said in a 2006 speech, intensified hacking to break systems of proprietary control will not work; new campaigns to win progressive legislation won’t succeed within the next twenty years; and litigation is “a long-term losing strategy,” as the /{Eldred}/ case demonstrated. For Lessig and much of the free culture community, the long-term project of building one’s own open, commons-friendly infrastructure is the only enduring solution. +={BMI (Broadcast Music, Inc.);Eldred v. Reno/Eldred v. Ashcroft:effects of;Lessig, Lawrence:Eldred v. Reno, and|music, and+1} + +In the music industry, the early signs seem to support this approach. When digital guru Don Tapscott surveyed the events of 2006, he concluded that “the losers built digital music stores and the winners built vibrant communities based on music. The losers built walled gardens while the winners built public squares. The losers were busy guarding their intellectual property while the winners were busy getting everyone’s attention.” In a penetrating analysis in 2007, music industry blogger Gerd Leonhard wrote: “In music, it’s always been about interaction, about sharing, about engaging — not Sell-Sell-Sell right from the start. Stop the sharing and you kill the music business — it’s that simple. When the fan/user/listener stops engaging with the music, it’s all over.”~{ Gerd Leonhard, “Open Letter to the Independent Music Industry: Music 2.0 and the Future of Music,” July 1, 2007, at http://www.gerdleonhard.net/ 2007/07/gerd-leonhards.html. }~ +={Leonhard, Gerd;Tapscott, Don} + +Serious change is in the air when the producer/consumer dichotomy is no longer the only paradigm, and a vast network of ordinary people and talented creators are becoming active participants in making their own culture. They are sharing and co-creating. Markets are no longer so separate from social communities; indeed, the two are blurring into each other. Although we may live in a complicated interregnum between Centralized Media and distributed media, the future is likely to favor those creators and businesses who build on open platforms. As Dan Hunter and F. Gregory Lastowka write: “It is clear that two parallel spheres of information production exist today. One is a traditional, copyright-based and profit-driven model that is struggling with technological change. The second is a newly enabled, decentralized amateur production sphere, in which individual authors or small groups freely release their work.”~{ Dan Hunter and F. Gregory Lastowka, “Amateur-to-Amateur,” /{William and Mary Law Review}/ 46, no. 951 (December 2004), pp. 1029–30. }~ +={Hunter, Dan+1;Lastowka, F. Gregory} + +Hunter and Lastowka liken copyright law today to the Roman Empire in decline: “It is meaningless to ask whether the unitary might of imperial Rome was preferable to the distributed, messy agglomeration of tribes and states that eventually emerged after Rome fell. It was not better, just different.” That is certainly a debatable conclusion, depending upon one’s cultural tastes and sense of history. But the Rome metaphor does capture the fragmentation and democratization of creativity that is now under way. And that, in fact, is something of the point of the CC licenses: to make access and use of culture more open and egalitarian. For all his commitment to law and the CC licenses, Lessig ultimately throws his lot in with social practice: “Remember, it’s the /{activity}/ that the licenses make possible that matters, not the licenses themselves. The point is to change the existing discourse by growing a new discourse.”~{ Interview with Lawrence Lessig, September 14, 2006. }~ +={copyright law:decline of;Creative Commons (CC) licenses:social practice, and;Lessig, Lawrence:CC licenses, and} + +1~ 7 THE MACHINE AND THE MOVEMENT +={Creative Commons (CC):social movement, as+10} + +/{An infrastructure of code gives rise to a movement for free culture.}/ + +When the CC licenses were first launched, many regarded them as a boring legal license that may or may not really matter. The real surprise was how the CC licenses became a focal object for organizing a movement. As more users began to adopt the licenses in 2003 and 2004, they ceased being just a set of legal permissions and became a cool social brand. The CC licenses and logo became symbols of resistance against the highly controlled, heavily marketed, Big Brother worldview that Hollywood and the record industry seem to embody. The CC licenses offered a way to talk about one’s legal and creative rights in the Internet age, and to cite to a positive alternative — the sharing economy. With no paid advertising to speak of, the CC logo came to symbolize an ethic and identity, one that stood for artistic integrity, democratic transparency, and innovation. + +Glenn Otis Brown recalls how people spontaneously took up the license to express their anger at the media establishment and their yearning for a more wholesome alternative: “If you’re frustrated with the way the world works now, frustrated with the way the media is becoming more democratized but all these laws aren’t really facilitating that,” said Brown, “you can just cast a little virtual vote for a different sort of copyright system by putting the ‘Some Rights Reserved’ tag on your Web page. But also, practically, you can help create pools of content that people can work with and make it so much easier to participate.” Without really planning it, the Creative Commons became much more than a system of free licenses for sharing. It became a symbol for a movement. Communities of social practice began to organize themselves around the CC project. +={Brown, Glenn Otis:CC licensing, and+3;Creative Commons (CC) licenses:social practice, and} + +“Inside of the organization, we always talked about how we really had /{two}/ organizations,” said Brown. “One was Creative Commons, the /{movement}/; and one was Creative Commons, the /{machine}/.”~{ Interview with Glenn Otis Brown, June 9, 2006. }~ The machine was about meeting utilitarian needs through licenses and software; the movement was about motivating people and transforming culture. Just as the GPL had given rise to the free software community and a hacker political philosophy (which in turn inspired the Creative Commons’s organizers), so the CC licenses were spontaneously igniting different pockets of the culture: Web designers, bloggers, musicians, book authors, videographers, filmmakers, and amateurs of all stripes. The viral spiral was proceeding apace. +={Brown, Glenn Otis:CC as movement and “machine,”, and+3;Creative Commons (CC):growth of+3|“machine”, as+3;General Public License (GPL)free software, and+1;hackers:political philosophy} + +The tension between the machine and the movement has been an animating force in the evolution of the Creative Commons. “You want to have something that’s actually useful to people,” said Brown, “but you also have to get people excited about it, and build up your constituency.”~{ Ibid. }~ Some CC initiatives have had strong symbolic resonances but little practical value, while other initiatives were quite useful but not very sexy. For example, embedding CC metadata into software applications and Web services is complicated and technical — but highly effective in extending the practices of free culture. On the other hand, the Creative Commons’s release of specialty licenses for music sampling, developing nations, and a CC version of the General Public License for software (as discussed below) were discretionary moves of some utility that were probably more important as gestures of solidarity to allies. + +This has been a recurrent motif for the organization — pragmatic, improvisational outreach to distinct constituencies as part of a larger attempt to build a movement. There has always been a corresponding pull, however, “not to put ‘the machine’ at risk by incorporating the new licenses into every last one of our software tools,” said Brown. The integrity of “the machine” ultimately needs to be respected. + +Even as the machine was getting built, Lessig was taking steps to stoke up a movement. In 2004, Lessig published his third book in five years, /{Free Culture}/. The book described, as the subtitle put it, “how big media uses technology and the law to lock down culture and control creativity.” Lessig’s earlier books, /{Code}/ and /{The Future of Ideas}/, had critiqued the alarming trends in copyright law, explained the importance of the commons, and set forth a philosophical rationale for what became the CC licenses. Now /{Free Culture}/ provided a wide-ranging survey of how incumbent industries with old business models — for recorded music, film, broadcasting, cable television — were (and are) curbing traditional creative freedoms and technological innovations. Drawing explicitly on the ideas of freedom developed by Richard Stallman in the 1980s, and upon legal history, politics, and colorful stories, Lessig argued that industry protectionism poses a profound harm to creators, business, and democratic culture — and that action needed to be taken. +={Lessig, Lawrence:Free Culture+2;Lessig, Lawrence:Code and Other Laws of Cyberspace|The Future of Ideas|CC licenses, and|law in contemporary context, and;Stallman, Richard:influence of|freedom, and} + +Although /{Free Culture}/ repeats many of the fundamental arguments made in his earlier books, Lessig’s arguments this time did not sound like a law professor’s or academic’s, but more like an activist trying to rally a social movement. “This movement must begin in the streets,” he writes. “It must recruit a significant number of parents, teachers, librarians, creators, authors, musicians, filmmakers, scientists — all to tell their story in their own words, and to tell their neighbors why this battle is so important. . . . We will not reclaim a free culture by individual action alone. It will take important reforms of laws. We have a long way to go before the politicians will listen to these ideas and implement these reforms. But that also means that we have time to build awareness around the changes that we need.”~{ Lawrence Lessig, /{Free Culture}/ (New York: Penguin, 2004), pp. 275, 287. }~ The preeminent challenge for this would-be movement, Lessig wrote, is “rebuilding freedoms previously presumed” and “rebuilding free culture.” + +Lessig had reason to think that his analysis and exhortations would find receptive ears. He was now a leading voice on copyright and Internet issues, and well known through his earlier books, public speaking, and /{Eldred}/ advocacy. The launch of the Creative Commons was thrusting him into the spotlight again. Adoption of the CC licenses was steadily growing in 2003 and 2004 based on the most comprehensive sources at the time, search engines. Yahoo was reporting in September 2004 that there were 4.7 million links to CC licenses on the Web. This number shot up to 14 million only six months later, and by August 2005 it had grown to 53 million.~{ CC license statistics, on CC wiki page, at http://wiki.creativecommons.org/ License_statistics. }~ These numbers offer only a crude estimate of actual license usage, but they nonetheless indicated a consistent trend. Usage was also being propelled by new types of Web 2.0 sites featuring usergenerated content. For example, Flickr, the photo-sharing site, had 4.1 million photos tagged with CC licenses at the end of 2004, a number that has soared to an estimated 75 million by 2008. +={Lessig, Lawrence:CC licenses, and;Yahoo;Web 2.0:CC licenses, and;Creative Commons (CC) licenses:Web 2.0 environment, and} + +The decisive choice, four years earlier, to build a suite of licenses that could propagate themselves via open networks was bearing fruit. + +2~ Building the CC Machine +={Creative Commons (CC):“machine”, as+22} + +It was a pleasant surprise for the organization to learn that a great deal of individual usage of the CC licenses was fairly spontaneous. Persuading large companies and respected institutions to use the CC licenses was a more difficult proposition. Lessig therefore spent a fair amount of time trying to get prominent institutions to adopt the licenses and give them some validation. Among the early converts were MIT, Rice University, Stanford Law School, and Sun Microsystems, supplemented by some relatively new organizations such as Brewster Kahle’s Internet Archive and the Public Library of Science, a publisher of open-access journals. + +Personal diplomacy can accomplish only so much, however, and in any case the Internet itself needed to be leveraged to disseminate the licenses and educate the public. One challenge, for example, was to introduce the CC licenses — which are not, after all, a self-evident need for most people — in a clear, compelling way. Most authors and artists have little idea what licenses they may want to choose, and their implications for how they might be able to sell or share works in the future. People needed a quick and easy way to make intelligent choices. It fell to Lisa Rein, the first technical director at CC, in late 2001, to develop a license-generating interface for the Web site. The quandary she faced was how to maximize user choice in selecting licenses while minimizing complexity. +={Rein, Lisa} + +The Web interface for the licenses has steadily improved over the years, but in a sense, those improvements have been offset by a growing complexity and number of CC licenses. Some critics have complained that the whole CC scheme can be a bit daunting. Yes, the licenses can ensure certain freedoms without your having to hire an attorney, which is clearly an improvement over relying on the fair use doctrine. But that does not mean that anyone can immediately understand the implications of using a NonCommercial or ShareAlike license for a given work. Any lurker on a CC listserv soon encounters head-scratching questions like “Can I use a BY-NC photo from Flickr on my blog if the blog is hosted by a company whose terms of service require me to grant them a worldwide, nonexclusive license to use any work hosted by their service, including for commercial use?” +={Creative Commons (CC) licenses:complexity of|fair use, and;fair use doctrine:CC licenses, and} + +By far the more important vehicle for promoting usage of the CC licenses has been software code. Lessig and the CC team realized that if the licenses could become an embedded element of leading search engines, Web publishing tools, and Web 2.0 platforms, it could powerfully promote license use. Integrating the code into existing Web sites and software can pose some serious technical challenges, however. Figuring out how to integrate the CC licenses with popular software applications, Web services, and digital file formats has fallen chiefly to Nathan Yergler, the chief technology officer of Creative Commons. Over the years, he and other CC developers have come up with a variety of applications to help make software infrastructures more friendly. One program that was developed, ccHost, is a content management system that has licensing and remix tracking built into its core. JsWidget is a simple javascript widget that developers can easily integrate into their sites to enable users to choose a license without leaving the site. Creative Commons has made it a standard practice to coordinate its work with technology volunteers, startup companies, and nonprofits with a stake in digitally enabling open licensing. It does this work through a CC development wiki, the cc-devel mailing list, Internet Relay Chat, World Wide Web Consortium working groups, and participation in Google’s annual “Summer of Code” program for student programmers. +={code:CC licenses, and+7;Creative Commons (CC) licenses:software code, and+7|Web 2.0 environment, and;Web 2.0:CC licenses, and;Google;software:CC licenses, and+7} + +Lessig and top CC staff have worked hard at convincing executives at major software enterprises to incorporate the CC licenses into a software application or Web site. One early triumph came when the makers of Movable Type, a blogging platform, agreed to make it easy for users to tack a CC license onto their blogs. Two months later, the O’Reilly empire of software blogs adopted the CC licenses. Then programmer Dave Winer embedded the licenses in his new Web log software in 2003. Blogs may not be core infrastructure for the Internet, but they are plentiful and popular, and have given Creative Commons enormous visibility and a high adoption curve. +={Lessig, Lawrence:CC licenses, and+6;O’Reilly, Tim;blogging;Winer, Dave} + +It had always been Lessig’s ambition that the major search engines would be reengineered to help people find CC-tagged content. To help prove that it could be done, Creative Commons built its own jerry-rigged search engine that retrieved content tagged with CC metadata. Lessig and Brown, meanwhile, made numerous diplomatic overtures to Google and Yahoo executives and software engineers. After two years of off-and-on conversations, both search engine companies agreed in 2005 to incorporate changes into their advanced searches so that users could locate CC-licensed content. (The Google advanced search does not use the Creative Commons name, but simply asks users if they want content that is “free to use or share,” among other options.) The search engine exposure was a serious breakthrough for Creative Commons’s visibility and legitimacy. +={Brown, Glenn Otis+2;Google;Yahoo} + +After a few years, the CC licenses were integrated into a number of other software platforms. It became possible to search for CClicensed images (Flickr), video programs (blip.tv), music (Owl), and old Web content (Internet Archive, SpinXpress). With these search tools, Internet users had a practical way to locate blues tunes that could be remixed, photos of the Eiffel Tower that could be modified and sold, and articles about flower arrangements that could be legally republished. Advertisers, publishers, and other companies could search for images, songs, and text that could be licensed for commercial use. + +Lessig and Brown worked hard to get other major Web and software companies to make it easy for users to tag content with CC licenses. The ultimate goal was to make it easy for users to automate their preferences. Joi Ito, a Japanese venture capitalist and democratic reformer who became the chair of the Creative Commons’s board of directors in 2006, put it this way: “Every input device that you have, whether it’s a camera phone, a digital camera or PowerPoint software, should allow you to automatically set it to the CC license that you want. And the minute you take that picture, you’ve already expressed how you would want that picture to be used.” +={Ito, Joichi} + +Creative Commons also urged open-source software communities to incorporate CC-made software into their applications so that users can more easily tag content with the licenses or find licensed works. Firefox, for example, has integrated a Creative Commons search function into the drop-down menu of its browser search interface. It also has a plug-in module called MozCC that scans for any CC metadata as you browse Web pages, and then reports on the browser status bar how content is licensed. CC licenses have been integrated into other software as well, such as Songbird, a free software media player, and Inkscape, a free vector-graphics program similar to Adobe Illustrator. +={open source software:CC licenses, and+2} + +Application by application, Web site by Web site, the Creative Commons board and staff have tried to insinuate the licenses into as many software applications and Web services as they could, in a kind of behind-the-scenes enactment of Lessig’s book /{Code}/. If code is law, then let’s write it ourselves! The diffusion of the licenses has tended to occur through personal connections of Lessig, CC board members, and friendly tech entrepreneurs and programmers. Joi Ito used his contacts at Sony to persuade it to develop a video remix Web site in Japan that uses CC licenses as the default choice. For Sony, the licenses help the company avoid any whiff of legal impropriety because users must stipulate whether their video remixes may be shared or not. +={code:as law;law:code as;Lessig, Lawrence:Code and Other Laws of Cyberspace;Ito, Joichi} + +In 2006, Microsoft went so far as to come out with a plug-in module for its Word program, enabling writers to tag their text documents with CC licenses. At the time, many CC fans grumbled at the hypocrisy of Microsoft, the five-hundred-pound gorilla of proprietary software, embracing the Creative Commons, even in such a modest way. But for Lessig and CC board members, any business that chooses to advance the reach of free culture — in this case, by accessing the 400 million users of Microsoft Office — is welcomed. While this ecumenical tolerance has made the Creative Commons a big-tent movement with an eclectic assortment of players, it has also provoked bitter complaints in free software and Wikipedia circles that the Creative Commons promotes a fuzzy, incoherent vision of “freedom” in the digital world (an issue to which I return in chapter 9). +={Microsoft:CC licenses, and} + +One vexing problem that CC developers confronted was how to digitally tag stand-alone files as CC-licensed work if they are not on the Web. How could one tag an MP3 file, for example, to show that the music is under a CC license? One problem with just inserting a CC tag onto the MP3 file is that anyone could fraudulently mark the file as CC-licensed. To prevent scams, Neeru Paharia, then CC assistant director, and other developers came up with a solution that requires any stand-alone digital files that are embedded with CC licenses to include a URL (Uniform Resource Locator) that links to a Web page verifying the assertions made on the file. +={Paharia, Neeru} + +The practice of embedding CC license information on digital files has been called /{digital rights expression}/ — a kind of benign analogue to digital rights management. The purpose is to embed information about the copyright status of a work /{in}/ the digital file. Unlike DRM, the goal is not to try to build an infrastructure for enforcing those rights or controlling how people may use a work. “Instead of using technology to ensure that the consumer can’t do anything with it,” said Mike Linksvayer, CC vice president and former chief technology officer, “we’re trying to use technology to ensure that people can find a CC-licensed work. If they’re looking, for instance, for music that can remixed, then this information will help a search engine locate that information.”~{ Interview with Mike Linksvayer, February 7, 2007. }~ +={Linksvayer, Mike;digital rights expression;digital rights management (DRM)} + +Perhaps the neatest self-promotional trick that the Creative Commons has devised is to rely upon companies whose very business plans revolve around CC licenses. We will examine “open business” enterprises in chapter 10, but for now it is worth noting that a number of innovative companies use the licenses as a core element of their business strategy. These enterprises include Flickr (photo sharing), Magnatune (an online record label), Jamendo (a Luxembourg-based music site), and Revver (a video-sharing site that shares advertising revenues with creators). + +Infrastructure grows old and occasionally needs to be updated and improved. The CC licenses have been no exception. As users have incorporated them into one medium after another, the unwitting omissions and infelicitous legal language of some parts of the licenses needed revisiting. After many months of discussions with many parts of the CC world, the Creative Commons issued a new set of 2.0 licenses in May 2004.~{ Glenn Otis Brown, “Announcing (and explaining) our new 2.0 licenses,” CC blog, May 25, 2004, at http://creativecommons.org/weblog/entry/4216. }~ They did not differ substantially from the original ones, and in fact the changes would probably bore most nonlawyers. For example, version 2.0 included a provision that allows a licensor to require licensees to provide a link back to the licensor’s work. The 2.0 licenses also clarify many complicated license options affecting music rights, and make clear that licensors make no warranties of title, merchantability, or fitness for use. Perhaps the biggest change in version 2.0 was the elimination of the choice of Attribution licenses. Since nearly 98 percent of all licensors chose Attribution, the Creative Commons decided to drop licenses without the Attribution requirement, thereby reducing the number of CC licenses from eleven to six. +={Creative Commons (CC) licenses:version 2.0 of} + +Another set of major revisions to the licenses was taken up for discussion in 2006, and agreed upon in February 2007.~{ 7. Mia Garlick, “Version 3.0 Launched,” CC blog, http://creativecommons.org/ weblog/entry/7249. }~ Once again, the layperson would care little for the debates leading to the changes, but considerable, sometimes heated discussion went into the revisions. In general, the 3.0 tweaks sought to make the licenses clearer, more useful, and more enforceable. The issue of “moral rights” under copyright law — an issue in many European countries — is explicitly addressed, as are the complications of the CC licenses and collecting societies. New legal language was introduced to ensure that people who remix works under other licenses, such as the GNU Free Documentation License (FDL), would be able to also use CC-licensed materials in the same work — an important provision for preventing free culture from devolving into “autistic islands” of legally incomptabile material. Besides helping align the CC world with Wikipedia (which uses the GNU FDL license), the 3.0 revisions also made harmonizing legal changes to take account of MIT and the Debian software development community. +={GNU Project:GNU FDL;copyright law:moral rights, and;Creative Commons (CC) licenses:version 3.0 of} + +By getting the CC licenses integrated into so many types of software and Web services, and even leveraging market players to embrace the sharing ethic, Creative Commons has managed to kill at least three birds with one stone. It has enlarged the universe of shareable Internet content. It has educated people to consider how copyright law affects them personally. And it has given visibility to its larger vision of free culture. +={copyright law:CC licenses, and;Creative Commons (CC) licenses:copyright law, and;Internet:communication system, as+1;;Creative Commons (CC):growth of+2} + +In one sense, the CC “machine” composed of the licenses, the CC-developed software, and the CC-friendly protocol was the engine for change. In another sense, the influence that Creative Commons has acquired derives from the social communities that gradually began to use its infrastructure. The social practice infused power into the “machine” even as the machine expanded the social practice. A virtuous cycle took hold, as the CC community used its self-devised legal and technological infrastructure to advance their shared cultural agenda. +={Creative Commons (CC):influence of} + +Driving this cycle was an ever-growing staff and new managers working out of offices in downtown San Francisco. Although Lessig has been the chief executive officer and chairman of the board of Creative Commons for most of its existence, most day-to-day operating responsibilities fell to executive director Glenn Otis Brown until his departure in 2005, and then to general counsel Mia Garlick, who left in 2007. (Both took jobs at Google.) Key executives at Creative Commons in 2008 included Mike Linksvayer, vice president; Eric Steuer, creative director; Diane Peters, general counsel; Nathan Yergler, chief technology officer; and Jennifer Yip, operations manager. The annual budget, which was $750,000 in 2003, had grown to $3.6 million in 2008 (a sum that included the Science Commons project). Much of this funding came from foundations such as the John D. and Catherine T. MacArthur Foundation, the William and Flora Hewlett Foundation, the Rockefeller Foundation, and Omidyar Network. +={Linksvayer, Mike;Brown, Glenn Otis:executive director, as;Peters, Diane;Steuer, Eric;Yergler, Nathan;Yip, Jennifer;John D. and Catherine T. MacArthur Foundation;Garlick, Mia;Lessig, Lawrence:CC licenses, and+2;Creative Commons (CC):funding of;Omidyar Network;Rockefeller Foundation;William and Flora Hewlett Foundation} + +Once the CC machine had secured its footing, Lessig and the CC staff paid close attention to the movement — the social communities that find utility and meaning through Creative Commons— and to developing new software and projects that these early adopters would welcome. In 2006, the organization hit upon the idea of hosting a series of “salons” in major cities. The gatherings have become a big success, and are now replicated in cities throughout the world. Artists talk about how they use CC licenses; entrepreneurs explain how their business models work; remix artists perform their work. The events, free and open to the public, combine testimonials about free culture, personal networking, entrepreneurial idea-mongering, live performances, and partying. The CC crowd seems to enjoy partying; they do it well. Every December, there are gala anniversary parties in groovy San Francisco hot spots. There have been virtual parties in the immersive online world, Second Life. Because CC users tend to include some of the most adventurous artistic talent and eclectic innovators around — people who know where the truly cool night spots are — CC parties tend to be lively, good times. The parties in Rio and Dubrovnik, at the iCommons Summits, were memorable international happenings, for example — occasions, as one self-styled Lothario boasted to me, “where a guy could dance with a woman from every continent of the world in a single evening.” +={Creative Commons (CC):social movement, as+2|CC Salons} + +Add to the mix tech-oriented college students, another key sector of free culture activism, and there is even more youthful energy. Hundreds of college students participate in a nationwide student organization, FreeCulture.org, later renamed Students for Free Culture. The group got its start in 2004 when some students at Swarthmore College began investigating the reliability of Diebold electronic voting machines; the company invoked copyright law in an attempt to keep the problems secret, leading to a public confrontation that Diebold lost. Nelson Pavlosky and Luke Smith, who were also inspired by Lessig’s advocacy, co-founded the group, which has since spawned over thirty quasi-autonomous chapters on campuses across the United States and a few foreign nations. The organization tries to be a grassroots force on Internet, digital technology, and copyright issues. It has mounted protests against CDs with digital rights management, for example, and hosted film remixing contests and exhibits of CC-licensed art at NYU and Harvard. Students for Free Culture also organized a “no-profit record company/recording collective,” the Antenna Alliance, which gave bands free recording space and distributed their CC-licensed music to college radio stations. +={Pavlosky, Nelson;Smith, Luke;Students for Free Culture} + +We have looked at the machine and many parts of the movement, but not at one of the most significant forces fueling Creative Commons — the dozens of national projects to adapt the licenses to legal systems around the world. The long-term reverberations of this movement — which includes activists in Brazil, Croatia, South Africa, Egypt, Peru, Scotland, and dozens of other countries — are only beginning to be felt. + +2~ 8 FREE CULTURE GOES GLOBAL +={Creative Commons International+79} + +/{The commoners mount a transnational mobilization to build their own digital commons.}/ + +It is a measure of Lessig’s ambition for Creative Commons that only five months after the release of the licenses, in April 2003, he instigated a move to take the idea global. Glenn Brown remembers objecting, “I don’t know how we’re going to get this done! Larry was like, ‘We have no other choice. We /{have}/ to do this. This needs to be an international organization.’”~{ Interview with Glenn Otis Brown, June 9, 2006. }~ +={Boyle, James:CC International, and+1;Lessig, Lawrence:CC International, and+1} + +Professor James Boyle, a board member, was aghast. “That’s the stupidest thing I’ve ever heard,” he said upon hearing the idea. “I was practically foaming at the mouth,” he recalled, noting that it was “just insane” to try to adapt the licenses to the mind-boggling complexities of copyright laws in scores of nations.~{ Interview with James Boyle, August 15, 2006. }~ But Lessig, determined to make the Creative Commons an international project, proceeded to hire Christiane Asschenfeldt (now Christiane Henckel von Donnersmarck), a Berlin-based copyright lawyer whom he had met the previous summer at an iLaw (Internet Law) conference in Cambridge, Massachusetts. He charged her with helping project leaders in different countries adapt the licenses (or, in computerese, “port” them) to their respective national legal codes. +={Asschenfeldt, Christiane+1;copyright law:international} + +Asschenfeldt set about inventing a system for gathering teams of volunteers, usually associated with a law school or technology institute, to become CC affiliates. Once an affiliate institution and project lead are chosen, the project lead produces a first draft of the licenses, which then undergoes public discussion, rewriting, and a final review by the new international arm of Creative Commons, CC International.~{ The procedures for porting a CC license to another jurisdiction are outlined in a document, “Welcome to Creative Commons International,” undated, at http://wiki.creativecommons.org/Worldwide_Overview. }~ (Confusingly, this project was originally called “iCommons,” a name that in 2006 was reassigned to a new CC spinoff group that convenes the international free culture movement.) +={free culture:international} + +In a pre-Internet context, the whole idea of a creating a new international license architecture and network of legal experts might seem ridiculously unrealistic. But by 2003 there were enough examples of “distributed intelligence” popping up that it no longer seemed so crazy to think that a passionate corps of dispersed volunteers could collaborate as catalysts for change. In any case, following the /{Eldred}/ defeat, Lessig and Brown came to believe, as discussed earlier, that the Creative Commons needed to be both a machine and a movement. +={Brown, Glenn Otis:CC as movement and “machine,”, and;Creative Commons (CC):“machine”, as|social movement, as;Eldred v. Reno/Eldred v. Ashcroft:effects of;Lessig, Lawrence:Eldred v. Reno, and;Lessig, Lawrence:CC International, and+1} + +Going international with the licenses offered an appealing way to grow both simultaneously without forcing unpleasant trade-offs between the two, at least initially. Drafting the licenses for a country, for example, helps convene top lawyers committed to the idea of legal sharing and collaboration while also mobilizing diverse constituencies who are the potential leaders of a movement. + +According to Jonathan Zittrain, an early collaborator on the project and a board member, Creative Commons at the international level is more of a “persuasive, communicative enterprise than a legal licensing one.”~{ Interview with Jonathan Zittrain, September 28, 2006. }~ It is a vehicle for starting a process for engaging public-spirited lawyers, law scholars, and all manner of creators. The licenses do have specific legal meanings in their respective legal jurisdictions, of course, or are believed to have legal application. (Only three courts, in the Netherlands and Spain, have ever ruled on the legal status of the CC licenses. In two instances the courts enforced the licenses; in the other case, in which the defendant lost, the validity of the licenses was not at issue.)~{ The most famous court case involving the CC licenses is /{A. Curry v. Audax/Weekend}/, in which Adam Curry sued the publishers of a Dutch tabloid magazine and two senior editors for using four photos of his family on his Flickr account that had been licensed under a BY-NC-SA license. See http://creativecommons.org/weblog/entry/5944 and http://creativecommons.org/weblog/entry/5823. A District Court of Amsterdam upheld Curry’s usage of the CC licenses in a March 9, 2006, decision; see http://mir rors.creativecommons.org/judgements/Curry-Audax-English.pdf. There have been two Spanish cases involving CC licenses. In both cases, a collecting society, the Sociedad General de Autores y Editores (SGAE), sued cafés for playing “free music” licensed under CC licenses; SGAE claimed that it was owed royalties for the public performance of music because artists cannot legally apply a CC license to their work (or even release it online) without the consent of their collecting society. In both instances, the cases turned on evidentiary issues, not on the enforceability of CC licenses. See http:// creativecommons.org/weblog/entry/5830 and http://creativecommons.org/ weblog/entry/7228. }~ Apart from their legal meaning, the licenses’ most important function may be as a social signaling device. They let people announce, “I participate in and celebrate the sharing economy.” The internationalization of the CC licenses has also been a way of “localizing” the free culture movement. +={Zittrain, Jonathan} + +The first nation to port the CC licenses was Japan. This was partly an outgrowth of a five-month sabbatical that Lessig had spent in Tokyo, from late 2002 through early 2003. There were already stirrings of dissatisfaction with copyright law in Japan. Koichiro Hayashi, a professor who had once worked for the telecom giant NTT, had once proposed a so-called d-mark system to allow copyright owners to forfeit the statutory term of copyright protection and voluntarily declare a shorter term for their works. In the spring of 2003, a team of Japanese lawyers associated with a technology research institute, the Global Communications Center (GLOCOM), working with CC International in Berlin, set about porting the licenses to Japanese law. +={Creative Commons International:Japan+2;Global Communications Center (GLOCOM);Hayashi, Koichiro} + +Yuko Noguchi, a former Lessig student and lawyer who later became the legal project lead, explained that the CC licenses are a culturally attractive way for Japanese to address the structural problems of copyright law. Japan is a country that prizes harmony and dislikes confrontation. The licenses offer a way to promote legal sharing without forcing bitter public policy conflicts with major content industries.~{ Interview with Yuko Noguchi, September 12, 2007. }~ (Partly for such reasons, CC Japan shifted its affiliation to the University of Tokyo in 2006.) In a culture that enjoys the sharing of comics, animation, haiku, and other works, the CC Japan licenses, launched in January 2004, have been used by a diverse range of artists and companies. +={Noguchi, Yuko} + +During his sojourn in Japan, Lessig had a fateful meeting with Joichi Ito, who in many ways embodies the tech sophistication, democratic zeal, and cosmopolitan style of the international Creative Commons movement. Widely known as Joi (pronounced “Joey”), Ito, forty-two, was born in Japan and educated in the United States. Disaffected with formal education in the U.S., where he studied computer science and physics, he dropped out and began his highly unusual career in Japan as an activist, entrepreneur, and venture capitalist. He has worked as a nightclub disc jockey, and brought industrial music and the rave scene to Japan, but he has also become a talented venture capitalist and early stage investor in such companies as Six Apart, Technorati, Flickr, SocialText, Dopplr, and Rupture. Lessig and Ito became close friends; Ito later joined the Creative Commons board. He was appointed chairman of the board in 2007 and then, in 2008, he became chief executive officer when Lessig left to start a congressional reform project. Duke law professor James Boyle, a board member, replaced Ito as chairman. +={Ito, Joichi+1;Boyle, James:as chairman} + +Once it went public, the very idea of Creative Commons attracted many other people like Ito to its ranks: educated, tech-savvy, culturally fluent, activist-minded. In fact, following the American launch of Creative Commons, volunteers from many countries began to approach the organization, asking if they could port the licenses to their own legal systems. Finland became the second nation to adopt the licenses, in May 2004, followed a month later by Germany. In Europe, the early adopters included Denmark, Hungary, Scotland, Slovenia, Sweden, and Malta. In South America, CC licenses were introduced in Argentina, Chile, and Peru. In Asia, Malaysia and China ported the licenses, as did Australia. Israel was the first Middle Eastern country to port the licenses. + +As each jurisdiction introduces its licenses, it typically hosts a gala public event to celebrate and publicize free culture. News media and government officials are invited. There are panel discussions about copyright law and digital culture; performances by musicians who use the licenses; and endorsements by prominent universities, cultural institutions, and authors. Lessig has made it a practice to fly in and deliver an inspirational speech. Few international launches of CC licenses have been more spectacular or consequential than the one staged by Brazil in March 2004. +={Lessig, Lawrence:public speaker, as|CC International, and} + +2~ Brazil, the First Free Culture Nation +={Brazil:free culture in+23;Creative Commons International:Brazil+23} + +Luiz Inácio Lula da Silva had just been elected president of Brazil, and he was eager to stake out a new set of development policies to allow his nation to plot its own economic and cultural future. His government, reflecting his electoral mandate, resented the coercive effects of international copyright law and patent law. To tackle some of these issues on the copyright front, President Lula appointed Gilberto Gil, the renowned singer-songwriter, as his minister of culture. +={Lula da Silva, Luiz Inácio;Gil, Gilberto+11} + +Gil became a revered cultural figure when he helped launch a new musical style, /{tropicalismo}/, in the late 1960s, giving Brazil a fresh, international cachet. The music blended national styles of music with pop culture and was inflected with political and moral themes. As one commentator put it, /{tropicalismo}/ was “a very ’60s attempt to capture the chaotic, swirling feel of Brazil’s perennially uneven modernization, its jumble of wealth and poverty, of rural and urban, of local and global. . . . They cut and pasted styles with an abandon that, amid today’s sample-happy music scene, sounds up-to-theminute.”~{ Wikipedia entry, “Tropicalismo,” at http://en.wikipedia.org/wiki/Tropical ismo. }~ The military dictatorship then running the government considered /{tropicalismo}/ sufficiently threatening that it imprisoned Gil for several months before forcing him into exile, in London. Gil continued writing and recording music, however, and eventually returned to Brazil.~{ For a history of Gil, see his personal Web site at http://www.gilbertogil .com.br/index.php?language=en; the Wikipedia entry on him at http: //en.wikipedia.org/wiki/Gilberto_Gil; and Larry Rohter, “Gilberto Gil Hears the Future, Some Rights Reserved,” /{New York Times}/, March 11, 2007. }~ + +This history matters, because when Gil was appointed culture minister, he brought with him a rare political sophistication and public veneration. His moral stature and joyous humanity allowed him to transcend politics as conventionally practiced. “Gil wears shoulder-length dreadlocks and is apt to show up at his ministerial offices dressed in the simple white linens that identify him as a follower of the Afro-Brazilian religion /{candomblé}/,” wrote American journalist Julian Dibbell in 2004. “Slouching in and out of the elegant Barcelona chairs that furnish his office, taking the occasional sip from a cup of pinkish herbal tea, he looks — and talks — less like an elder statesman than the posthippie, multiculturalist, Taoist intellectual he is.”~{ Julian Dibbell, “We Pledge Allegiance to the Penguin,” /{Wired}/, November 2004, at http://www.wired.com/wired/archive/12.11/linux_pr.html. }~ +={Dibbell, Julian+1} + +As luck had it, Dibbell — author of the article on cyber-rape that had enticed Lessig to investigate digital culture in the first place (see chapter 3) — was living in Rio at the time. He was friendly with Hermano Vianna, a prominent intellectual who knew Gil and was deeply into the music scene and digital technology. Between Dibbell and Vianna, a flurry of introductions was made, and within months Larry Lessig, John Perry Barlow, and Harvard law professor William Fisher were sitting with Gil, Vianna, and Dibbell in Gil’s Rio de Janeiro penthouse across from the beach.~{ Ibid. }~ Lessig’s mission was to pitch the Creative Commons licenses to Gil, and in particular, get Gil’s thoughts about a new CC Sampling license that would let musicians authorize sampling of their songs. +={Fisher, William (Terry);Vianna, Hermano+1;Lessig, Lawrence:CC International, and+3;Barlow, John Perry:CC International, and} + +“Gil knew that sampling was a central driving power for contemporary creativity well before digital instruments came along,” recalled Vianna. "/{Tropicalismo}/ was all about sampling different ideas and different cultures. /{Tropicalismo}/ was about juxtapositions, not fusions, and in this sense was heir to a long tradition of Brazilian modern thought and art that began with the cultural anthropology of the early modernists, in the 1920s and 1930s, and can be traced back to all debates about Brazilian identity in the 20th century."~{ E-mail from Hermano Vianna, January 8, 2007. }~ + +Lessig did not need to argue his case. Gil immediately understood what Creative Commons was trying to accomplish culturally and politically. He was enthusiastic about CC licenses, the proposed Sampling license, and the prospect of using his ministry to advance a vision of free culture. + +By further coincidence, Ronaldo Lemos da Silva, then a Brazilian law student who has been described as a “Lessig of the Southern Hemisphere,” had just completed his studies at Harvard Law School. He was well acquainted with Creative Commons and was considering his future when friends at the Fundação Getulio Vargas (FGV), a Rio de Janeiro university, urged him to join them in founding a new law school. The school would host a new Center for Technology and Society to study law and technology from the perspective of developing nations like Brazil. Lemos accepted, and the center soon became the host for CC Brazil and myriad free culture projects. +={Lemos da Silva, Ronaldo} + +This alignment of intellectual firepower, artistic authority, and political clout was extraordinary — and a major coup for Creative Commons. The culture minister of the world’s fifth-largest country and tenth-largest economy — whose own forty-year career was based on a remix sensibility — became a spirited champion of the CC licenses and free culture. Unlike most culture ministers, who treat culture chiefly as an aesthetic amenity, Gil took the economic and technological bases of creativity seriously. He wanted to show how creativity can be a tool for political and cultural emancipation, and how government can foster that goal. It turned out that Brazil, with its mix of African, Portuguese, and indigenous cultures and its colorful mix of vernacular traditions, was a perfect laboratory for such experimentation. + +One of the first collaborations between Creative Commons and the Brazilian government involved the release of a special CC-GPL license in December 2003.~{ Creative Commons press release, “Brazilian Government First to Adopt New ‘CC-GPL,’ ” December 2, 2003. }~ This license adapted the General Public License for software by translating it into Portuguese and putting it into the CC’s customary “three layers” — a plain-language version, a lawyers’ version compatible with the national copyright law, and a machine-readable metadata expression of the license. The CC-GPL license, released in conjunction with the Free Software Foundation, was an important international event because it gave the imprimatur of a major world government to free software and the social ethic of sharing and reuse. Brazil has since become a champion of GNU/Linux and free software in government agencies and the judiciary. It regards free software and open standards as part of a larger fight for a “development agenda” at the World Intellectual Property Organization and the World Trade Organization. In a related vein, Brazil has famously challenged patent and trade policies that made HIV/AIDS drugs prohibitively expensive for thousands of sick Brazilians. +={free software:international licensing, and+1;GNU/Linux:Brazil, in;World Trade Organization;World Intellectual Property Organization;open networks:international} + +When the full set of CC Brazil licenses was finally launched— at the Fifth International Free Software Forum, in Port Alegre on June 4, 2004 — it was a major national event. Brazilian celebrities, government officials, and an enthusiastic crowd of nearly two thousand people showed up. Gil, flying in from a cabinet meeting in Brasília, arrived late. When he walked into the auditorium, the panel discussion under way immediately stopped, and there was a spontaneous standing ovation.~{ A ten-minute video of the CC Brazil opening can be seen at http:// support.creativecommons.org/videos#brasil. }~ “It was like a boxer entering the arena for a heavyweight match,” recalled Glenn Otis Brown. “He had security guards on both sides of him as he walked up the middle aisle. There were flashbulbs, and admirers trailing him, and this wave of people in the audience cresting as he walked by.”~{ Interview with Glenn Otis Brown, August 10, 2006. }~ +={Brown, Glenn Otis, CC International, and+1} + +Gil originally planned to release three of his songs under the new CC Sampling license — dubbed the “Recombo” license — but his record label, Warner Bros., balked. He eventually released one song, “Oslodum,” that he had recorded for an indie label. “One way to think about it,” said Brown, “is that now, anybody in the world can jam with Gilberto Gil.”~{ Film about CC Brazil launch, at http://support.creativecommons.org/ videos#brasil. }~ + +As culture minister, Gil released all materials from his agency under a CC license, and persuaded the Ministry of Education as well as Radiobrás, the government media agency, to do the same. He also initiated the Cultural Points (Pontos de Cultura) program, which has given small grants to scores of community centers in poor neighborhoods so that residents can learn how to produce their own music and video works. Since industry concentration and payola make it virtually impossible for newcomers to get radio play and commercially distribute their CDs, according to many observers, the project has been valuable in allowing a fresh wave of grassroots music to “go public” and reach new audiences. + +For developing countries, the real challenge is finding ways to tap the latent creativity of the “informal” economy operating on the periphery of formal market systems. Brazil is rich with such creative communities, as exemplified by the flourishing /{tecnobrega}/ music scene in the northeast and north regions of Brazil. Ronaldo Lemos says that /{tecnobrega}/ — “a romantic sound with a techno-beat and electronica sound”~{ Interview with Ronaldo Lemos da Silva, September 15, 2006. }~ —arose on the fringes of the mainstream music marketplace through “sound system parties” attended by thousands of people every weekend. Local artists produce and sell about four hundred new CDs every year, but both the production and distribution take place outside the traditional music industry. The CDs can’t be found in retail stores but are sold entirely by street vendors for only $1.50. The CDs serve as advertising for the weekend parties. The music is “born free” in the sense that the /{tecnobrega}/ scene doesn’t consider copyrights as part of its business model and does not enforce copyrights on their CDs; it invites and authorizes people to share and reuse the content.~{ The /{tecnobrega}/ scene is described by Ronaldo Lemos in “From Legal Commons to Social Commons: Developing Countries and the Cultural Industry in the 21st Century,” http://icommons.org/banco/from-legal-commons-tosocial-commons-brazil-and-the-cultural-industry-1. }~ (The /{tecnobrega}/ business model is discussed at greater length in chapter 10.) +={Lemos da Silva, Ronaldo+6} + +Lemos believes the CC licenses are an important tool for helping grassroots creativity in Brazil to “go legitimate.” He explains, “Creative Commons provides a simple, non-bureaucratic structure for intellectual property that might help to integrate the massive marginal culture that is arising in the peripheries, with the ‘official,’ ‘formal’ structures of the Brazilian economy.”~{ Ibid. }~ Freed of the blockbuster imperatives of the current music market, the CC licenses allow creativity in the informal “social commons” to flow — yet not be appropriated by commercial vendors. People can experiment, generate new works, and learn what resonates with music fans. All of this is a predicate for building new types of open markets, says Lemos. /{Tecnobrega}/ is just one of many open-business models that use the free circulation of music to make money. + +Since its launch in June 2004, Lemos and the CC Brazil office have instigated a number of projects to demonstrate how sharing and collaboration can spur economic and cultural development. They have promoted free software and open business models for music and film and started collaborations with allies in other developing nations. Nigerian filmmakers inspired the People’s Cinema in Brazil, a project to help people use audio-video technology to produce their own films and develop audiences for them. The /{culture-livre}/ (free culture) project, a joint effort of Creative Commons in Brazil and South Africa, is using the ccMixter software to encourage young musicians to mix traditional African instruments with contemporary sensibilities, and launch their careers.~{ http://www.ccmixter.co.za. }~ + +In Brazil, there are open-publishing projects for scientific journals;~{ http://www.scielo.br. }~ a Web site that brings together a repository of short films;~{ http://www.portacurtas.comb.br. }~ and Overmundo,a popular site for cultural commentary by Internet users.~{ http://www.overmundo.com.br }~ TramaVirtual, an open-platform record label that lets musicians upload their music and fans download it for free, now features more than thirty-five thousand artists.~{ http://tramavirtual.uol.com.br. }~ (By contrast, the largest commercial label in Brazil, Sony-BMG, released only twelve CDs of Brazilian music in 2006, according to Lemos.) + +“Cultural production is becoming increasingly disconnected from traditional media forms,” said Lemos, because mass media institutions “are failing to provide the adequate incentives for culture to be produced and circulated. . . . Cultural production is migrating to civil society and/or the peripheries, which more or less already operate in a ‘social commons’ environment, and do not depend on intellectual property within their business models.”~{ Ronaldo Lemos, “From Legal Commons to Social Commons: Developing Countries and the Cultural Industry in the 21st Century,” http://icommons .org/banco/from-legal-commons-to-social-commons-brazil-and-the-culturalindustry-1. }~ + +As more people have adopted legal modes of copying and sharing under CC licenses, it is changing the social and political climate for copyright reform. Now that CC Brazil can cite all sorts of successful free culture ventures, it can more persuasively advocate for a Brazilian version of the fair use doctrine and press for greater photocopying privileges in educational settings (which are legally quite restrictive). +={free culture:international+2} + +Although the CC licenses are now familiar to many Brazilians, they have encountered some resistance, mostly from lawyers. “Among all other audiences — musicians, artists, writers — they were extremely well received,” said Lemos. When he presented the CC licenses to an audience of three hundred lawyers, however, he recalls that a famous law professor publicly scoffed: “You’re saying this because you’re young, foolish, and communist.” Three years later, Lemos discovered that the professor was using his intellectual property textbook in her class. + +As a unique global ambassador of creative sharing, Gilberto Gil did a lot to take the CC licenses to other nations and international forums such as the World Intellectual Property Organization. The day before his 2004 benefit concert for the Creative Commons in New York City with David Byrne, Gil delivered a powerful speech explaining the political implications of free culture: +={Byrne, David;Gil, Gilberto+3;World Intellectual Property Organization} + +_1 A global movement has risen up in affirmation of digital culture. This movement bears the banners of free software and digital inclusion, as well as the banner of the endless expansion of the circulation of information and creation, and it is the perfect model for a Latin-American developmental cultural policy (other developments are possible) of the most anti-xenophobic, anti-authoritarian, anti-bureaucratizing, anti-centralizing, and for the very reason, profoundly democratic and transformative sort.~{ Gil remarks at New York University, September 19, 2004, at http://www .nyu.edu/fas/NewsEvents/Events/Minister_Gil_speech.pdf. }~ + +The Brazilian government was making digital culture “one of its strategic public policies,” Gil said, because “the most important political battle that is being fought today in the technological, economic, social and cultural fields has to do with free software and with the method digital freedom has put in place for the production of shared knowledge. This battle may even signify a change in subjectivity, with critical consequences for the very concept of civilization we shall be using in the near future.”~{ Ibid. }~ + +To advance this new paradigm, Gil, who left his post as culture minister in 2008, called for the rise of “new creative /{mestizo}/ [hybrid] industries” that break with the entrenched habits of the past. Such businesses “have to be flexible and dynamic; they have to be negotiated and re-negotiated, so that they may contemplate the richness, the complexity, the dynamism and the speed of reality itself and of society itself, without becoming impositions.”~{ Ibid. }~ + +2~ National Variations of a Global Idea + +When it comes to free culture, Brazil is clearly a special case. But citizens in more than seventy nations have stepped forward to build a CC presence in their societies. Each has shown its own distinctive interests. + +Tomislav Medak, a philosopher by training and a copyfighter by circumstance, runs the Multimedia Institute in Zagreb, Croatia, a cultural center that consists mostly of a performance space, a lounge, and a café. The organization survives on donations from the likes of George Soros’s Open Society Institute, but it thrives because it is the gathering place for an avant-garde corps of electronic musicmakers, publishers, performers, and hackers. Mainstream Croats would probably describe the community as a bunch of “cyberSerbian-gay-Communists,” said Medak, which he concedes is not inaccurate.~{ Interview with Tomislav Medak, CC Croatia, June 25, 2006. }~ But the institute is not just a coalition of minority interests; it is also broad-spectrum champion of cultural freedom. It sees free software, civil liberties, and artists’ rights as core elements of a democratic society that it would like to build. +={Creative Commons International:Croatia+1;Croatia:CC licenses in+1;Medak, Tomislav+2;Open Society Institute;Soros, George} + +The Multimedia Institute was understandably excited when it learned about Creative Commons and Lessig’s vision of free culture. With help from some lawyer friends, the institute in January 2004 ported the CC licenses to Croatian law, primarily as a way to empower artists and counteract the dominance of corporate media and expansive copyright laws. “We are a country where the IP framework is very young, and most of the policies are protection-driven. Most policies are dictated by official institutions that just translate international documents into local legislation,” Medak said.~{ Ibid. }~ This commercial/copyright regime tends to stifle the interests of emerging artists, amateurs, consumers and local culture. +={Lessig, Lawrence:CC International, and} + +“In the post-socialist period,” said Medak, “our society has been hugely depleted of the public domain, or commons. The privatization process and the colonizing of cultural spaces have been blatant over the last couple of years, especially in Zagreb. So the Creative Commons has fit into a larger effort to try to recapture some of those public needs that were available, at least ideologically, in socialist societies. Now they are for real.”~{ Ibid. }~ Medak has since gone on to become a leader of iCommons and the host of the international iCommons Summit in 2007, which brought several hundred commoners from fifty nations to Dubrovnik. + +In Scotland, government and other public-sector institutions have been huge fans of the CC licenses. In fact, museums, archives, and educational repositories have been the primary advocates of the CC Scotland licenses, says Andrés Guadamuz, a law professor at the Research Centre for Studies in Intellectual Property and Technology Law at the University of Edinburgh. “People who want to try to share information in the public sector are turning to Creative Commons because they realize that here is a license that is already made.”~{ Interview with Andrés Guadamuz of CC Scotland, December 19, 2006. }~ +={Guadamuz, Andrés;Scotland:CC licenses in;Creative Commons International:Scotland} + +The BBC was a pioneer in making its archived television and radio programs available to the public for free. In 2003, inspired by the CC licenses, the BBC drafted its own “Creative Archive” license as a way to open up its vast collection of taxpayer-financed television and radio programs.~{ See http://news.bbc.co.uk/2/hi/help/4527506.stm, and interview with Paula Le Dieu, joint director of the BBC Creative Archive project, May 28, 2004, at http://digital-lifestyles.info/2004/05/28/exclusive-providing-the-fuel-fora-creative-nation-an-interview-with-paula-le-dieu-joint-director-on-the-bbccreative-archive. }~ The license was later adopted by Channel 4, the Open University, the British Film Institute, and the Museum, Libraries and Archives Council. Although the Creative Archive license has similar goals as the CC licenses, it contains several significant differences: it restricts use of video programs to United Kingdom citizens only, and it prohibits use of materials for political or charitable campaigns and for any derogatory purposes. +={BBC} + +The CC licenses have proven useful, also, to the British Museum and National Archives. In 2004, these and other British educational institutions were pondering how they should make their publicly funded digital resources available for reuse. A special government panel, the Common Information Environment, recommended usage of the CC licenses because they were already international in scope. The panel liked that the licenses allow Web links in licensed materials, which could help users avoid the complications of formal registration. The panel also cited the virtues of “human readable deeds” and machine-readable metadata.~{ Intrallect Ltd and AHRC Research Centre for Studies in Intellectual Property and Technology Law, University of Edinburgh, “The Common Information Environment and Creative Commons,” October 10, 2005, at http://www .intrallect.com/index.php/intrallect/content/download/632/2631/file/CIE _CC_Final_Report.pdf. }~ + +As it happened, a team of Scottish legal scholars led by a private attorney, Jonathan Mitchell, successfully ported the licenses and released them a few months later, in December 2005. The Scottish effort had been initiated a year earlier when Mitchell and his colleagues objected that the U.K. CC licenses then being drafted were too rooted in English law and not sufficiently attuned to Scottish law. Since the introduction of the CC Scotland licenses, publicsector institutions have enthusiastically embraced them. Museums use the licenses on MP3 files that contain audio tours, for example, as well as on Web pages, exhibition materials, and photographs of artworks. Interestingly, in England and Wales, individual artists and creative communities seem to be more active than public-sector institutions in using the licenses. +={Scotland:CC licenses in;Creative Commons International:Scotland;Mitchell, Jonathan} + +The use of CC licenses for government information and publicly funded materials is inspiring similar efforts in other countries. Governments are coming to realize that they are one of the primary stewards of intellectual property, and that the wide dissemination of their work — statistics, research, reports, legislation, judicial decisions — can stimulate economic innovation, scientific progress, education, and cultural development. Unfortunately, as Anne Fitzgerald, Brian Fitzgerald, and Jessica Coates of Australia have pointed out, “putting all such material into the public domain runs the risk that material which is essentially a public and national asset will be appropriated by the private sector, without any benefit to either the government or the taxpayers.”~{ iCommons annual report, 2007, http://www.icommons.org/annual07. }~ For example, the private sector may incorporate the public-domain material into a value-added proprietary model and find other means to take the information private. The classic instance of this is West Publishing’s dominance in the republishing of U.S. federal court decisions. Open-content licenses offer a solution by ensuring that taxpayerfinanced works will be available to and benefit the general public. +={Coates, Jessica;Fitzgerald, Anne;Fitzgerald, Brian;West Publishing} + +In the United States, the National Institutes of Health has pursued a version of this policy by requiring that federally funded research be placed in an open-access archive or journal within twelve months of its commercial publication. The European Commission announced in 2007 that it plans to build a major open-access digital repository for publicly funded research.~{ Michael Geist, “Push for Open Access to Research, BBC News, February 28, 2007, at http://news.bbc.co.uk/go/pr/fr/~/2/hi/technology/6404429. }~ In Mexico, the Sistema Internet de la Presidencia, or Presidency Internet System (SIP), decided in 2006 to adopt CC licenses for all content generated by the Mexican presidency on the Internet — chiefly the president’s various Web sites, Internet radio station, and documents.~{ Creative Commons blog, Alex Roberts, March 8, 2006, at http://creative commons.org/text/sip. }~ In Italy, CC Italy is exploring legislation to open up national and local government archives. It also wants new contract terms for those who develop publicly funded information so that it will automatically be available in the future.~{ Interview with Juan Carlos de Martin, CC Italy, July 17, 2007. }~ +={Creative Commons International:Italy|Mexico;Italy:CC licenses in;Mexico:CC licenses in} + +2~ Laboratories of Free Culture +={Creative Commons International:laboratories for free culture, as+7} + +In 2005, about two years after the launch of CC International, twenty-one jurisdictions around the world had adopted the licenses. (A legal jurisdiction is not necessarily the same as a nation because places like Scotland, Puerto Rico, and Catalonia — which have their own CC licenses — are not separate nations.) Under a new director of CC International, copyright attorney Catharina Maracke, who took over the license-porting project in 2006, the pace of license adoption has continued. By August 2008, fortyseven jurisdictions had ported the CC licenses, and a few dozen more had their projects under way. The CC affiliates have now reached a sufficient critical mass that they represent a new sort of international constituency for the sharing economy. The CC network of legal scholars, public institutions, artistic sectors, and Internet users is not just a motivated global community of talent, but a new sort of transnational cultural movement: a digital republic of commoners. +={Maracke, Catharina} + +To be sure, some nations have more institutional backing than others, and some have more enthusiastic and active projects than others. CC Poland reported in 2006 that its biggest challenge was “a complete lack of financial and organizational support, in particular from our partner organization.”~{ iCommons ’06 conference booklet, p. 77. }~ (This was remedied in 2008 when CC Poland entered into a partnership with an interdisciplinary center at the University of Warsaw and with a law firm.) CC affiliates in smaller developing countries with fewer resources — especially in Africa — often have to beg and scrape to pull together resources to supplement the work of volunteers. + +Not surprisingly, the American CC licenses — a version of which was spun off as a generic license, as opposed to jurisdictionspecific licenses — are the most used. In a pioneering study of license usage in January 2007, Giorgos Cheliotis of Singapore Management University and his co-authors conservatively estimated that there were 60 million distinct items of CC content on the Internet — a sum that rose to 90 million by the end of 2007. Over 80 percent of these items use a license that is not jurisdiction-specific; the remaining 20 percent are spread among the thirty-three nations included in the study.~{ Giorgos Cheliotis, Warren Chik, Ankit Guglani, and Girl Kumar Tayi, “Taking Stock of the Creative Commons Experiment: Monitoring the Use of Creative Commons Licenses and Evaluating Its Implications for the Future of Creative Commons and for Copyright Law,” paper presented at 35th Research Conference on Communication, Information and Internet Policy (TPRC), September 28–30, 2007. Paper dated August 15, 2007. }~ The highest volume of license usage per capita can be found in European nations — particularly Spain, Germany, Belgium, France, Italy, and Croatia — which were among the earliest adopters of the licenses. In absolute terms, the heaviest usage can be seen in Spain, Germany, France, South Korea, Italy, and Japan.~{ Cheliotis, “Taking Stock,” pp. 20–22. }~ Overall, however, CC usage outside of the United States is still fairly new, and usage and growth rates vary immensely from country to country. +={Cheliotis, Giorgos+1} + +As a fledgling network, the international CC community is a rudimentary platform for change. Its members are still groping toward a shared understanding of their work and devising new systems of communication and collaboration. But a great deal of cross-border collaboration is occurring. A variety of free culture advocates have constituted themselves as the Asia Commons and met in Bangkok to collaborate on issues of free software, citizen access to government information, and industry antipiracy propaganda. CC Italy has invited leaders of neighboring countries— France, Switzerland, Austria, Croatia, and Slovenia — to share their experiences and work together. A CC Latin America project started /{Scripta}/, a new Spanish-language journal based in Ecuador, to discuss free software and free culture issues affecting the continent. +={Creative Commons International:cross-border collaboration+1} + +CC leaders in Finland, France, and Australia have published books about their licensing projects.~{ The French book is Danièle Bourcier and Mélanie Dulong de Rosnay, eds., /{International Commons at the Digital Age}/ (Paris: Romillat, 2004), at http://fr.creativecommons.org/icommons_book.htm. The Finnish book is Herkko Hietanen et al., /{Community Created Content: Law, Business and Policy}/ (Turre Publishing, 2007), at http://www.turre.com/images/stories/books/webkirja_koko_ optimoitu2.pdf. The Australian book is Brian Fitzgerald, /{Open Content Licensing: Cultivating the Creative Commons}/ (Sydney: Sydney University Press, 2007). }~ CC Brazil and CC South Africa have collaborated on a project about copyright and developing nations. CC Canada is working with partners to develop an online, globally searchable database of Canadian works in the Canadian public domain. CC Salons have been held in Amsterdam, Toronto, Berlin, Beijing, London, Warsaw, Seoul, Taipei, and Johannesburg. + +In the Netherlands, CC project lead Paul Keller engineered a breakthrough that may overcome the persistent objections of European collecting societies to CC-licensed content. Collecting societies in Europe generally insist that any musician that they represent transfer all of their copyrights to the collective. This means that professional musicians cannot distribute their works under a CC license. Artists who are already using CC licenses cannot join the collecting societies in order to receive royalties for commercial uses of their works. In this manner, collecting societies in many European nations have effectively prevented many musicians from using the CC licenses. +={Keller, Paul;collecting societies+1:see also ASCAP} + +In 2007, however, CC Netherlands negotiated a one-year pilot program with two Dutch collecting societies, Buma and Stemra, to let artists use CC NonCommercial licenses for parts of their repertoire.~{ Creative Commons Netherlands press release, “Buma/Stemra and Creative Commons Netherlands Launch a Pilot,” August 23, 2007; e-mail by Paul Keller, CC Netherlands, to CC International listserv, August 23, 2007. }~ As a result, artists will have greater choice in the release of their works and the ability to easily manage their rights via a Web site. Other European CC affiliates hope that this Dutch experiment will break the long stalemate on this issue and persuade their collecting societies to be more flexible. + +2~ The Developing Nations License +={Creative Commons International:Developing Nations license+9} + +One of the boldest experiments in the CC world was the creation of the Developing Nations license, launched in September 2004. A year earlier, Lessig had approached James Love, the director of Knowledge Ecology International (previously the Consumer Project on Technology), to ask him to craft a CC license that might help developing countries. Love proposed that the CC offer a “rider” at the end of its existing licenses so that people using the licenses could exempt developing nations from, say, the NonCommercial or NoDerivatives license restrictions. So, for example, if a textbook author wanted to let developing nations copy her book for either commercial or noncommercial purposes, she could add a rider authorizing this practice. +={Love, James+3;Lessig, Lawrence:CC International, and} + +Love was trying to do for books and journal articles what is already possible for drugs — the legalization of a commercial market for generic equivalents. Love had seen how generic drugs could reach people only because for-profit companies were able to produce and sell the drugs; nonprofit or philanthropic distribution is just not powerful enough. But the market for generic drugs is possible only because of laws that authorize companies to make legal knockoffs of proprietary drugs once the patent terms expire. Love hoped to do the same via a Developing Nations license for copyrighted works: “It would create an opportunity for the publishing equivalent of generic drug manufacturers who make ‘generic’ books. In developing countries, you have whole libraries full of photocopied books. You would not have libraries there if people didn’t engage in these practices.”~{ Interview with James P. Love, June 13, 2006. }~ + +In the end, Creative Commons offered the Developing Nations license as a separate license, not a rider. It had simple terms: “You must attribute the work in the manner specified by the author or licensor (but not in any way that suggests that they endorse you or your use of the work)” — and the license was valid only in non–high income nations, as determined by United Nations’ statistics. Although the release of the license got considerable press coverage, actual usage of the license was extremely small. The most prominent use was totally unexpected — for architectural designs. Architecture for Humanity, a California nonprofit, used the license for its designs of low-cost housing and health centers. The organization wanted to give away its architectural plans to poor countries while not letting its competitors in the U.S. use them for free.~{ Creative Commons blog, Kathryn Frankel, “Commoners: Architecture for Humanity,” June 30, 2006, at http://creativecommons.org/education/architecture. }~ +={United Nations} + +The expected uses of the Developing Nations license never materialized. In 2006, Love said, “The license is there, but people who might be willing to use it are not really aware of it.” He worried that the license “hasn’t really been explained in a way that would be obvious to them,” and ventured that there may be “a need for a re-marketing campaign.” By this time, however, the license had attracted the ire of Richard Stallman for its limitations on “freedom.”~{ See Lessig on Creative Commons blog, December 7, 2005, at http://cre ativecommons.org/weblog/archive/2005/12/page/3. }~ It prohibited copying of a work in certain circumstances (in high-income countries) even for noncommercial purposes, and so authorized only a partial grant of freedom, not a universal one. “Well, the whole point was /{not}/ to be universal,” said Love. “The license is for people that are unwilling to share with high-income countries, but are willing to share with developing countries. So it actually expands the commons, but only in developing countries.”~{ Interview with James Love, June 13, 2006. }~ +={Lessig, Lawrence:CC International, and+1;Stallman, Richard:freedom, and+2} + +The controversy that grew up around the Developing Nations license illuminates the different approaches to movement building that Lessig and Stallman represent. Lessig’s advocacy for free culture has been an exploratory journey in pragmatic idealism; Stallman’s advocacy for free software has been more of a crusade of true believers in a core philosophy. For Stallman, the principles of “freedom” are unitary and clear, and so the path forward is fairly self-evident and unassailable. For Lessig, the principles of freedom are more situational and evolving and subject to the consensus of key creative communities. The flexibility has enabled a broad-spectrum movement to emerge, but it does not have the ideological coherence of, say, the free software movement. +={Richard:free software, and;free software:international licensing, and;Lessig, Lawrence:freedom, and} + +Several factors converged to make it attractive for Creative Commons to revoke the Developing Nations license. Some people in the open-access publishing movement disliked the license because it did not comply with its stated standards of openness. In addition, Richard Stallman’s increasingly strident objections to Creative Commons licenses were starting to alarm some segments of the “free world.” What if Internet content became Balkanized through a series of incompatible licenses, and the movement were riven with sectarian strife? Stallman objected not only to the Developing Nations license, but to attempts by Creative Commons to get Wikipedia to make its content, licensed under the GNU Free Documentation license, compatible with the CC licenses. By 2007 this dispute had been simmering for four years (see pages 212–217). +={Wikipedia:CC licenses, and;Stallman, Richard:influence of} + +Finally, many CC staff members regarded the Developing Nations and Sampling licenses as misbegotten experiments. Fewer than 0.01 percent of uses of CC licenses at the time involved the Developing Nations license, and the Sampling license was used by a relatively small community of remix artists and musicians. If eliminating two little-used niche licenses could neutralize objections from the open access and free software movements and achieve a greater philosophical and political solidarity in the “free world,” many CC partisans regarded a rescission of the licenses as a modest sacrifice, if not a net gain. +={remix works+1;music:remixes;Creative Commons (CC) licenses:music, for} + +In June 2007, Creative Commons announced that it was officially retiring the two licenses.~{ Creative Commons “retired licenses page,” at http://creativecommons.org/ retiredlicenses. }~ In a formal statement, Lessig explained, “These licenses do not meet the minimum standards of the Open Access movement. Because this movement is so important to the spread of science and knowledge, we no longer believe it correct to promote a standalone version of this license.”~{ Lawrence Lessig, “Retiring standalone DevNations and One Sampling License,” message to CC International listserv, June 4, 2007. }~ The Creative Commons also revoked the Sampling license because it “only permits the remix of the licensed work, not the freedom to share it.” (Two other sampling licenses that permit noncommercial sharing— SamplingPlus and NonCommercial SamplingPlus — were retained.) +={Lessig, Lawrence:CC International, and} + +Anyone could still use the Sampling or Developing Nations license if they wished; they still exist, after all. It’s just that the Creative Commons no longer supports them. While the actual impact of the license revocations was minor, it did have major symbolic and political significance in the commons world. It signaled that the Creative Commons was capitulating to objections by free software advocates and the concerns of open access publishing activists. + +2~ The iCommons Network +={iCommons+13;Creative Commons International:iCommons network+13} + +As an international network of CC affiliates grew, it naturally spawned new pockets of activism. Lessig explained: “Once a country gets launched, it becomes a cell of activism. Sometimes it is very traditional — Creative Commons Korea is made up of a bunch of federal judges — and sometimes it is very radical — Creative Commons Croatia is made of up a bunch of real activists who want to change copyright. Creative Commons Poland, too, is a bunch of really smart law graduates. But then there is the artist community, on the other side, many of whom want to blow up copyright; they just think it is ridiculous. +={Croatia:CC licenses in;Korea:CC licenses in;Creative Commons International:Croatia|Korea|Poland;Poland:CC licenses in;Lessig, Lawrence:CC International, and+1|iCommons, and+1} + +“So the opportunity and problem we faced at that point,” said Lessig, “was, ‘Well, what are we going to do with these activists?’ Because Creative Commons wanted to facilitate activism, of course, but it wasn’t as if we could bring activism into our core because it would make it more suspect.”~{ Interview with Lawrence Lessig, March 20, 2006. }~ + +The first steps toward organizing this protocommunity of activists came in March 2005, when eighty people from the various international licensing projects convened in Boston to talk about their shared challenges.~{ http://icommons.org/isummit05. }~ It quickly became clear that everyone wanted a forum in which to learn from one another, coordinate their work, and see themselves as something larger . . . perhaps a new sort of movement. +={Creative Commons (CC):social movement, as+1} + +Here again was the tension between “the movement” and “the machine.” As neutral stewards of the licenses, the CC affiliates could not become full-throated advocates of a new international free culture movement. Their mission was preserving the integrity and utility of the licenses for all users, not advocacy. To avoid this problem, the Creative Commons, with an infusion of seed money and CC leaders, in 2006 started a new nonprofit organization, iCommons. +={Creative Commons (CC):“machine”, as} + +iCommons, a registered charity in the United Kingdom, is led by Heather Ford, a South African who met Lessig at Stanford and went back to her country to evangelize the Creative Commons licenses. Working out of Johannesburg, Ford is the activist counterpart to her Berlin licensing colleagues. She is a gregarious, spirited organizer who keeps tabs on activist gambits in dozens of nations and pulls together annual iCommons “summits.” +={Ford, Heather} + +The iCommons conferences are something of a staging area for a new type of global citizenship in the digital “free world.” The first conference, in Rio de Janeiro in June 2006, attracted more than three hundred commoners from fifty nations.~{ http://icommons.org/isummit06. }~ The second one, in Dubrovnik, Croatia, drew a somewhat larger and still more diverse crowd, and a third was held in Sapporo, Japan, in 2008. The free and open-source software community and the Creative Commons network are two of the largest, most influential blocs participating in iCommons, although Wikipedians represent a growing sector. But there are many other factions. There are musicians from the indie music, netlabels, and the remix scene. Filmmakers trying to reform fair use legal norms and video artists who are into mashups. Bloggers and citizen-journalists and social-networking fans. Gamers and participants in immersive environments like Second Life and World of Warcraft. Open business entrepreneurs who regard free software and CC licenses as key elements of their competitive, profit-making strategies. +={Wikipedia:iCommons, and} + +From Japan, there were anime artists who are into remixes. From South Africa, print-on-demand research publishers. A bare-chested Brazilian guitarist traded thoughts about copyright law with a Zagreb performer. An Amsterdam hacker with a punk t-shirt shared a smoke with an American academic. From India, there was Lawrence Liang, founder of the Alternative Law Forum, a leading intellectual about copyright law and economic and social inequality. From Syria, there was Anas Tawileh, who is working to produce the Arab Commons, a directory of Arabic works released under any of the CC licenses. He hopes it will counteract “the weak representation of the Arabic language on the Internet, the shallow nature of Arabic content currently available and the consumption rather than the production of knowledge.” From the United States, there was Michael Smolens, an entrepreneur who started dotSUB, a captioning system to make any film available in any language. +={Liang, Lawrence;Tawileh, Anas;Smolens, Michael;Smolens, Michael} + +The convergence of so many players in the nascent sharing economy, assembled in the flesh, was a bracing glimpse into a new kind of cosmopolitan, democratic sensibility. The program organizers stated their aspirations this way: “How do we help one another to build a commons that nurtures local communities while respecting the needs of others? How can we move towards the growth of a ‘Global Commons Community’?”~{ iCommons Summit ’06 program. }~ + +Although most international commoners seem to be culturally progressive and politically engaged, they cannot be situated along a left-right ideological spectrum. This is because commoners tend to be more pragmatic and improvisational than ideological. They are focused on building specific projects to facilitate sharing and creativity, based on open-source principles. Their enthusiasm is for cool software, effective legal interventions, and activist innovations, not sectarian debate. + +It is not as if politics has been banished. For example, some critics have questioned the “elite” origins and governance structure of iCommons, which was hatched by CC board members and leaders. David Berry, a free culture advocate who teaches at the University of Sussex, complained on a listserv that iCommons was “creating a corporate machine rather than a democratic one.”~{ David Berry, “The iCommons Lab Report,” sent to UK FreeCulture listserv, November 9, 2006. }~ He cited ambiguity in the powers of the organization, the murky process by which the iCommons code of conduct was adopted, and the board’s selection of community council members. Still other critics have grumbled at the Creative Commons’s collaboration with Microsoft in developing a licensing feature within the Word application. +={Berry, David;Microsoft:CC licenses, and} + +When pressed at the 2006 iCommons Summit to develop more formal organizational structure, Lessig begged off for the time being, saying that “trust and faith in each other” was a better approach than rigid rules and system. “We need a recognition that we have a common purpose. Don’t tell me that I need to tell you what that is, because we’ll never agree, but we do have a common purpose.”~{ Becky Hogge, “What Moves a Movement,” OpenDemocracy.org, June 27, 2006, at www.opendemocracy.net/media-commons/movement_3686.jsp. }~ This provoked Tom Chance, a free software and free culture advocate, to complain that “Lessig’s call to base the organization on ‘trust and faith in each other’ is too idealistic and undemocratic.” +={Chance, Tom;Lessig, Lawrence:iCommons, and} + +The encounter nicely captures the quandaries of leadership and governance in the networked environment. How can the effectiveness and clarity of leadership be combined with networked participation and the legitimacy that it provides? How should an organization draw philosophical boundaries to define itself while remaining open to new ideas? How should participation in online collectives be structured to generate collective wisdom and legitimacy and avoid collective stupidity and bureaucratic paralysis? In this case, iCommons diversified its governance in late 2007. It invited the Free Software Foundation Europe, Computer Professionals for Social Responsibility, and Instituto Overmundo, a Brazilian nonprofit dedicated to cultural diversity, to join Creative Commons as full-fledged partners in managing the organization. Despite its broadened leadership, iCommons remains more of a convener of annual forums and discussion host than the democratically sanctioned voice of an international movement. + +This is not surprising. The international commons community is still a fledgling enterprise trying to forge an identity and agenda. The resources for many CC affiliates are quite modest and the bonds of cooperation remain rudimentary. That said, the international explosion of free culture projects, above and beyond the CC licenses themselves, is nothing short of remarkable. It represents a “vast, transnational mobilization in favor of digital freedom,” as Gilberto Gil put it. In the early stages of the viral spiral, no one could have imagined that a corps of passionate, self-selected volunteers cooperating through the Internet could accomplish so much. And it continues, unabated. +={Gil, Gilberto;free culture:international} + +1~ 9 THE MANY FACES OF THE COMMONS + +/{As the “free world” grows and diversifies, so does debate over how to build the commons.}/ + +As the Creative Commons insinuated itself into one creative sector after another, and throughout dozens of nations, the variety of licenses proliferated. By one count in 2006, there were once eighteen distinct CC licenses, not counting version changes.~[* The eighteen licenses once offered include the core six licenses; a nonattribution version of five of those six licenses (now retired); three sampling licenses (one of which has been retired); the Developing Nations license (now retired); and a public domain dedication (which is otherwise not possible under copyright statutes). There was also a “Music Sharing license,” which was just another name for the Attribution-NonCommercial-No Derivatives license, and a “Founders’ Copyright,” which is not a license but a contract between an author and Creative Commons to place a particular work in the public domain after fourteen years (or twenty-eight years, if the author opts for a fourteen-year extension)]~ In the meantime, other parties were offering their own licenses. While the Creative Commons licenses had become the most-used licenses on the Internet, many people were choosing to use Free Software Foundation licenses for text (the GNU Free Documentation License, or FDL), the European Art Libre license, and special licenses that various institutions have devised for the arts, music, and educational works. +={Creative Commons (CC) licenses:types of+6} + +In theory, a proliferation of licenses is not a bad thing. By the lights of free-market economics and complexity theory, in fact, the best way to identify the most useful licenses is to introduce a variety of them and then let them compete for supremacy. Let natural selection in an ecosystem of licenses cull the losers and elevate the most useful ones. + +Unfortunately, this libertarian vision of diverse licenses competing for supremacy in the cultural ecosystem can run up against a harsh reality of the Internet. Too many disparate licenses may make it /{harder}/ for people to share content in an easy, interoperable way. It is not the proliferation of licenses per se that is problematic, it is the absence of a mechanism to enable differently licensed works to “play together” so that they can commingle and be used to produce new things. If bodies of works released under a CC license cannot be combined with works licensed under other licenses, it defeats one of the key value propositions of the Internet, easy interoperability and facile sharing and reuse. Despite its best intentions, license proliferation has the effect of “fencing off the commons,” because the different license terms keep different bodies of work in separate ghettos. + +Incompatibility is a problem both within the suite of CC licenses and between CC licenses and other licenses. Within the CC suite of licenses, for example, a work licensed under the AttributionNonCommercial-ShareAlike license (BY-NC-SA) cannot legally be combined with a work licensed under the Attribution-No Derivatives license (BY-ND) or an Attribution-NonCommercial (BY-NC). The former license requires that any derivative works be licensed under the same license, period. +={Creative Commons (CC) licenses:incompatibility of+3} + +Some observers are not disturbed by the internal incompatibilities of the CC suite of licenses. They regard the different licenses as tools for various communities to build their own “subeconomies” of content, based on their own distinct needs and priorities. A scientist may not want his research articles altered or combined with other material. A musician may want to promote noncommercial usage on the Internet but retain commercial rights so that he can benefit from any CD sales. Not all creative sectors want to distribute their work in the same ways. + +The incompatibility between CC-licensed work and other freecontent licenses is arguably more problematic. At a conference in Spain in the summer of 2005, Lessig recalls having a “Homer Simpson moment” — /{D’oh!}/ — when he realized where license proliferation was heading. The incompatibility of licenses, and therefore bodies of content, could lead to an irretrievably fragmented universe of content. Lessig saw license proliferation as analogous to the Balkanization of technical standards that once plagued mainframe computing. IBM computers couldn’t communicate with DEC, which couldn’t communicate with Data General.~{ Ibid. }~ “The legal framework of the licensing world is basically a pre-Internet framework,” said Lessig in 2007. “We don’t have interoperability at the layer of legal infrastructure.”~{ Interview with Lawrence Lessig, October 23, 2007. }~ +={Lessig, Lawrence:CC licenses, and+1} + +_1 In my view [said Lessig], there’s a critical need for the free culture movement to achieve interoperability. And until it achieves interoperability, there’s a huge problem — because we’re creating these kinds of autistic islands of freedom. Basically, the stuff produced in the Wikimedia world is free, but can only be used in the Wikimedia world; the stuff created in the Creative Commons world is free, but can only be used in the Creative Commons world — and never the two will meet. That’s very destructive, because what we want is a kind of invisible platform of freedom that everybody can then build on. It’s been my objective from the very beginning to find the way to assure that we would get that platform.~{ Ibid. }~ + +A critic might call it “the revenge of choice” — the inevitable outcome of a neoliberal philosophy that privileges individualism and choice, rather than a collective concern for the commons. This is the view of Niva Elkin-Koren, a law professor at the University of Haifa (which coincidentally is the host of CC Israel). Elkin-Koren argues that the Creative Commons is replicating and reinforcing property rights discourse and failing to advance the cause of copyright reform. Because the Creative Commons is plagued by an “ideological fuzziness” that does not adequately set forth a philosophical vision of freedom or the commons, Elkin-Koren believes the CC project threatens to “spread and strengthen the proprietary regime in information.”~{ Niva Elkin-Koren, “Exploring Creative Commons: A Skeptical View of a Worthy Pursuit,” chapter XIV in Lucie Guibault and P. Bernt Hugenholtz, editors, /{The Future of the Public Domain: Identifying the Commons in Information Law}/ (Alphen aan den Rijn, Netherlands: Kluwer Law International BV, 2006). }~ +={Elkin-Koren, Niva;Creative Commons (CC) licenses:critics of+5} + +This critique was at the heart of one of the most serious internecine squabbles in the movement, the struggle to make Wikipedia content — licensed under the Free Software Foundation’s GNU Free Documentation License — compatible with CC-licensed content. The failure to find a solution, after four years of negotiation, threatened to keep two great bodies of Internet content from legally commingling and cause further fragmentation of open content. +={Wikipedia:CC licenses, and;GNU Project:GNU FDL;Free Software Foundation} + +There are other controversies. Anticapitalist leftists periodically take the Creative Commons to task for being too politically respectable. Friendly voices from underdeveloped nations of the Southern Hemisphere have raised alarms that the public domain is just another excuse for corporate exploitation of their resources. Others from the South argue that the informal, social commons inhabited by poor people — the “nonlegal commons” — deserve respect, too. And then there are copyright traditionalists, who believe that a redoubled effect to fortify the fair use doctrine should be a top priority. +={Creative Commons (CC) licenses:public domain, and;public domain:CC licenses, and} + +For the most part, the general public is oblivious to these internecine disputes. Who cares about the relative merits of using a GNU Free Documentation License for Wikipedia entries instead of a Creative Commons license? The layperson may not understand the long-term implications of vesting individual authors with the choice of how to share a work (in the style of the Creative Commons) as opposed to vesting communities of practice with those rights (in the style of the Free Software Foundation’s General Public License). Yet tech sophisticates realize that, in the context of the Internet, uninformed choices today can have serious practical consequences tomorrow. The terms of a license or the design of a software application or digital appliance can prevent people from sharing or reusing works. Bodies of content may become legally incompatible. Consumer freedoms to innovate and distribute may be limited. And then there are second-order questions that have great symbolic importance within the movement, such as, Whose vision of “freedom” in digital spaces shall we endorse? What is philosophically desirable and consistent? +={Wikipedia:CC licenses, and;free culture:sharing ethic of;Internet:future of} + +For a movement that aspires to simplify copyright law, the free culture movement has gotten embroiled in knotty debates that might give lawyers headaches. It is not easy to tell if the disputants are persnickety zealots who have spent too much time in front of their screens or latter-day Jeffersons, Madisons, and Hamiltons— brilliant thinkers who are astute enough to understand the longterm implications of some difficult issues and passionate enough to take a stand. One person’s arcana can be another person’s foundational principle, and one person’s quest for intellectual clarity is another person’s distraction from the messy challenges of building a movement. + +That is the basic problem of the crazy-quilt network that constitutes the free world. There are, in fact, so many divergent, sometimes competing, sometimes congruent agendas that it can be difficult to orchestrate them into a single, harmonious song. For better or worse, the passions that animate culture jammers, copyright reformers, hackers, law scholars, artists, scientists, and countless others in seventy-plus countries are widely divergent. Although the intramovement disagreements may sometimes seem gratuitous, sectarian, and overblown, they are, in fact, understandable. The commoners tend to see their projects as part of a larger, ennobling enterprise— the construction of a new democratic polity and cultural ecology. It makes sense to fret about the technical, legal, and philosophical details when so much is potentially at stake. + +2~ Individual Choice Versus the Commons +={commons:individual choice vs.+8} + +It turns out that overcoming license incompatibilities is not such an easy task. Any attempt to bridge differences immediately runs into mind-bending legal complexities. Crafting new licensing language can trigger philosophical disagreements, some of which may be proxies for turf issues and personal control. One of the major philosophical disagreements involves the one raised by Elkin-Koren: the merits of individual choice versus the commons. Should individuals be allowed to choose how their work may circulate in the wider world, or is such legal partitioning of culture an affront to the value proposition of the commons and its sharing ethic? Why should the choices of individual creators be privileged over the creative needs of the general culture? +={Elkin-Koren, Niva;Creative Commons (CC) licenses:incompatibility of+4} + +The question is a divisive one. The answer that you give, Yochai Benkler of Harvard Law School told me, “depends on whether you think that what you’re doing is building a political movement or whether you’re building a commons that has narrower appeal, but is potentially, more functionally unitary.”~{ Interview with Yochai Benkler, February 7, 2006. }~ A movement is about building a “big tent,” he said — a vision that accommodates many different types of people with different preferences. If you are building a movement, then you will use terminologies that are attractive to a very broad range of liberal and illiberal conceptions of choice, he said. +={Benkler, Yochai:social movements, on;Creative Commons (CC):social movement, as} + +But a commons — of the sort that Richard Stallman’s GPL enables for software code — requires that its members honor a community’s social and moral priorities. A commons does not cater to individual preferences; its first priority is to advance the shared goals and relationships of the community. A commons is not oblivious to the self-interest of individuals. It just fulfills that self-interest in a different way. A commons does not confer benefits through individual negotiations or transactions, but instead through an individual’s good-faith participation in an ongoing, collective process. There is no individual quid pro quo, in other words. A person’s contributions accrue to the collective — and benefits flow from belonging to that collective. This is not an exotic or communistic model; it more or less resembles a scientist’s relationship with his research discipline. In the style of a gift economy, a scientist’s articles and lectures are gifts to the discipline; in return, he enjoys privileged access to his colleagues and their research. +={Stallman, Richard:GPL, and;General Public License (GPL):free software, and+3;Internet:gift economy} + +It is worth noting that a commons does not necessarily preclude making money from the fruit of the commons; it’s just that any commercial activity cannot interfere with the integrity of social relationships within the commons. In the case of GPL’d software, for example, Red Hat is able to sell its own versions of GNU/Linux only because it does not “take private” any code or inhibit sharing within the commons. The source code is always available to everyone. By contrast, scientists who patent knowledge that they glean from their participation in a scientific community may be seen as “stealing” community knowledge for private gain. The quest for individual profit may also induce ethical corner-cutting, which undermines the integrity of research in the commons. +={Red Hat;GNU/Linux:Red Hat, and;code:free access to;commoners:sharing by} + +Ironically, the Creative Commons is not itself a commons, nor do its licenses necessarily produce a commons in the strict sense of the term. The licenses are /{tools}/ for creating commons. But the tools do not require the creation of a commons (unlike the GPL). In this sense, a commons of CC-licensed content may be a “lesser” type of commons because it may have restrictions on what content may be shared, and how. The choices of individual authors, not the preexisting claims of the community, are considered paramount. +={Creative Commons (CC) licenses:tools for creating commons, as+3} + +Is one type of commons superior to the others? Does one offer a superior vision of “freedom”? This philosophical issue has been a recurrent source of tension between the Free Software Foundation, the steward of the GPL, and the Creative Commons, whose licenses cater to individual choice. + +Strictly speaking, a commons essentially offers a binary choice, explained Benkler: “You’re in the commons or you’re out of the commons.” By broadening that binary choice, the CC licenses make the commons a more complicated and ambiguous enterprise. This is precisely what some critics like Stallman have found objectionable about certain CC licenses. They don’t necessarily help forge a community of shared values and commitments. Or as two British critics, David Berry and Giles Moss, have put it, the CC licenses create “commons without commonality.”~{ David Berry and Giles Moss, “On the ‘Creative Commons’: A Critique of the Commons without Commonality,” Free Software Magazine, July 15, 2005, at http://www.freesoftwaremagazine.com/articles/commons_without_com monality. }~ +={Benkler, Yochai:social movements, on;Berry, David;Moss, Giles;Stallman, Richard:criticisms by} + +Inviting authors to choose how their work may circulate can result in different types of “commons economies” that may or may not be interoperable. ShareAlike content is isolated from NoDerivatives content; NonCommercial content cannot be used for commercial purposes without explicit permission; and so on. CC-licensed works may themselves be incompatible with content licensed under other licenses, such as the GNU Free Documentation License. + +2~ Freedom, the Commons, and Movement Building +={free culture+9} + +The slightly confused layperson may ask, Why does all of this matter? The answer may depend on your commitment to the commons as a different (better?) way of creating value. Do you believe in individual freedom and choice, as conceived by contemporary liberal societies? Or do you believe in the /{different type of freedom}/ that comes through participation in a community of shared values? +={commons:individual choice vs.+8} + +Does this state the choice too starkly, as an either/or proposition? Some believe that it does. Perhaps a broader taxonomy of commons is possible. Perhaps a commons can accommodate some measure of individual choice. Or is that an oxymoron? + +These are pivotal questions. The answers point toward different visions of free culture and different strategic ideas about movement building. Is it enough to put forward a demanding, utopian ideal of the commons, and hope that it will attract a corps of true believers willing to toil away in the face of general indifference or hostility? This is essentially what Stallman has done. Or is it better to build a “coalition of the reasonable,” so that a more accessible, practical vision can gain widespread social acceptance and political traction in a relatively short period of time? This is the vision that drives Larry Lessig and his allies. +={Stallman, Richard:criticisms by|freedom, and;Lessig, Lawrence:freedom, and;free culture:differing visions of+3} + +Some critics accuse Creative Commons of betraying the full potential of the commons because its licenses empower individual authors to decide how “shareable” their works can be. The licenses do not place the needs of the general culture or the commons first, as a matter of universal policy, and some licenses restrict how a work may be used. The lamentable result, say critics like Niva Elkin-Koren, is a segmented body of culture that encourages people to think of cultural works as property. People internalize the norms, such as “This is /{my work}/ and /{I’ll}/ decide how it shall be used by others.” +={Elkin-Koren, Niva;commoners:sharing by+1;Creative Commons (CC) licenses:critics of+2} + +This can be seen in the actual choices that CC licensors tend to use. Some 67 percent of CC-licensed works do not allow commercial usage.~{ Based on Yahoo queries, June 13, 2006, at http://wiki.creativecommons.org/ License_Statistics. }~ Arguments go back and forth about whether the NC restriction enhances or shrinks freedom. Many musicians and writers want to promote their works on the Internet while retaining the possibility of commercial gain, however remote; this would seem a strike for freedom. Yet critics note that the NC license is often used indiscriminately, even when commercial sales are a remote possibility. This precludes even modest commercial reuses of a work, such as reposting of content on a blog with advertising.~{ Eric Muller, “The Case for Free Use: Reasons Not to Use a Creative Commons–NC License,” at http://freedomdefined.org/Licenses/NC. }~ + +The larger point of criticism is that the Creative Commons licenses do not “draw a line in the sand” about what types of freedoms are inherent to the commons. In the interest of building a broad movement, Creative Commons does not insist upon a clear standard of freedom or prescribe how a commons should be structured. + +“While ideological diversity may be crucial for the successes of a social movement,” observed Elkin-Koren, “it may impair attempts to make creative works more accessible. The lack of a core perception of freedom in information, may lead to ideological fuzziness. This could interfere with the goal of offering a workable and sustainable alternative to copyright.”~{ Niva Elkin-Koren, “Exploring Creative Commons: A Skeptical View of a Worthy Pursuit,” chapter 14 in Lucie Guibault and P. Bernt Hugenholtz, editors, /{The Future of the Public Domain: Identifying the Commons in Information Law}/ (The Netherlands: Kluwer Law International BV, 2006), p. 326. }~ In an essay that offers “a skeptical view of a worthy pursuit,” Elkin-Koren says that the CC regime encourages narrow calculations of self-interest and the same attitudes toward property and individual transactions as the market economy; it does not promote a coherent vision of “freedom” that fortifies the commons as such. +={Elkin-Koren, Niva+2;Creative Commons (CC):social movement, as} + +“The normative message that we communicate by using Creative Commons licenses is the strategy of choice,” Elkin-Koren told me. “You’re the owner, you’re the author, and therefore, you are entitled to govern your work. . . . No one tells you that maybe it’s wrong; maybe you should allow people to use your work.” By using the CC licenses, she continued, we internalize these norms. “We are teaching ourselves and others that our works are simply commodities, and like every other commodity, everyone has to acquire a license in order to use it.”~{ Interview with Niva Elkin-Koren, January 30, 2007. }~ +={authorship:control, and} + +But champions of the Creative Commons licenses celebrate their approach as a pragmatic and effective way to break free from the stifling “all rights reserved” ethic of copyright law. Historically, of course, not much else has been successful in challenging copyright norms — which is precisely why Lessig and others find the CC strategy attractive. “If I believed that there was a different discourse that had political purchase in someplace other than tiny corners of law faculty commons rooms, I’d be willing to undertake it,” said Lessig. He concedes that his viewpoint may be affected by his living in the United States instead of Israel (where Elkin-Koren lives) but, in the end, he considers the Creative Commons as “just my judgment about what’s going to be effective.”~{ Interview with Lawrence Lessig, October 23, 2007. }~ +={Lessig, Lawrence:CC licenses, and} + +2~ The Splintering of the Free World? +={Creative Commons (CC) licenses:critics of+20} + +At one point, the philosophical disagreements between the Creative Commons and its critics did not matter so much. There was enough shared purpose and common history that everyone could agree to disagree. And since the project was still young, the stakes were not so high. But then it became clear that the CC licenses would be quite popular indeed. When the Creative Commons issued its Developing Nations and Sampling licenses in 2003, it brought Richard Stallman’s simmering dissatisfaction with the organization to a boil, threatening a serious schism. Pointing to the “four freedoms” that define the free software movement, Stallman criticized the new CC licenses as “not free” because they do not allow universal copying of a work. +={Stallman, Richard:criticisms by+2|freedom, and+2;Creative Commons (CC) licenses:popularity of;Creative Commons International:Developing Nations license} + +Stallman objected to the Sampling license because, while it allowed a remix of a licensed work, it did not allow the freedom to share it. The Developing Nations license was objectionable because its freedoms to copy are limited to people in the developing world, and do not extend to everyone. Stallman also disliked the fact that the CC tag that licensors affix to their works did not specify /{which}/ license they were using. With no clear standard of “freedom” and now a mix of licenses that included two “non-free” licenses, Stallman regarded the CC tag as meaningless and the organization itself problematic. + +“I used to support Creative Commons,” said Stallman on his blog in July 2005, “but then it adopted some additional licenses which do not give everyone that minimum freedom, and now I no longer endorse it as an activity. I agree with Mako Hill that they are taking the wrong approach by not insisting on any specific freedoms for the public.”~{ Richard Stallman, “Fireworks in Montreal,” at http://www.fsf.org/blogs/ rms/entry-20050920.html. }~ + +Mako Hill is a brilliant young hacker and Stallman acolyte who wrote a 2005 essay, “Towards a Standard of Freedom: Creative Commons and the Free Software Movement,”~{ Benjamin Mako Hill, “Towards a Standard of Freedom: Creative Commons and the Free Software Movement,” /{Advogato}/, July 29, 2005, at http://www .advogato.org/article/851.html. }~ a piece that shares Elkin-Koren’s complaint about the CC’s “ideological fuzziness.” Then enrolled in a graduate program at the MIT Media Lab, Hill has written a number of essays on the philosophy and social values of free software. (When he was an undergraduate at Hampshire College, I was an outside advisor for his senior thesis and remain friends with him.) +={Elkin-Koren, Niva;Hill, Benjamin Mako+2;free culture:differing visions of+31;free software:social movement, as+31} + +In his “Freedom’s Standard” essay, Hill wrote: “[D]espite CC’s stated desire to learn from and build upon the example of the free software movement, CC sets no defined limits and promises no freedoms, no rights, and no fixed qualities. Free software’s success is built on an ethical position. CC sets no such standard.” While CC prides itself on its more open-minded “some rights reserved” standard, Hill says that a real movement for freedom must make a bolder commitment to the rights of the audience and other creators— namely, that “essential rights are unreservable.”~{ Interview with Benjamin Mako Hill, June 1, 2007. }~ + +By this, Hill means that certain essential freedoms should not be restricted by copyright law or any license. The problem with the CC licenses, argued Hill, is that they cannot commit to any "/{defined}/ spirit of sharing" (emphasis in original). This is not the way to build a transformative, sustainable movement, said Hill.~{ Ibid. See also Hill, “Freedom’s Standard Advanced?” /{Mute}/, November 23, 2005, at http://www.metamute.org/en/node/5597. }~ + +But what, then, about the choice of authors? Doesn’t that freedom count for anything? CC partisans have responded. Joi Ito, the chair of the Creative Commons, wrote in 2007, “CC is about providing choice. FSF is mostly about getting people to make /{their}/ choice. I realize it’s not THAT clear-cut, but I think the point of CC is to provide a platform for choice. . . . I realize that we are headed in the same general free culture direction and many of us debate what choices should be allowed, but I think we are more ‘tolerant’ and support more diverse views than the FSF.”~{ Joichi Ito, message on iCommons listserv, June 1, 2007. }~ +={Ito, Joichi} + +Lessig has argued many times that, just as the free software community decided for itself how its content ought to be distributed, so other artistic sectors — musicians, photographers, filmmakers, etc. — must make such decisions themselves. If they can’t have certain choices, then they will have little interest in joining a movement for free culture, said Lessig at the 23rd Chaos Communication Congress in Berlin. “We don’t have the standing to tell photographers or musicians what ‘freedom’ is.” Why should the Free Software Foundation, or any other group, be able to dictate to an artistic community how their works should circulate? +={Lessig, Lawrence:freedom, and;Free Software Foundation} + +Elkin-Koren is not so sure we can segment the world according to creative sectors and let each determine how works shall circulate. “I don’t think we can separate the different sectors, as if we work in different sectors,” she told me. “We all work in the production of information. My ideas on copyright are really affected by the art that I use and the music that I listen to. . . . Information is essential not only for creating something functional or for selling a work of art, but for our citizenship and for our ability to participate in society. So it’s not as if we can say, ‘Well, this sector can decide for themselves.’”~{ Interview with Niva Elkin-Koren, January 30, 2007. }~ +={Elkin-Koren, Niva} + +As Wikipedia began to take off in popularity, what might have been an unpleasant philosophical rift grew into a more serious fissure with potentially significant consequences. All Wikipedia content is licensed under the Free Software Foundation’s GNU Free Documentation License, or FDL,~{ Wikipedia entry on GNU Free Documentation license, at http://en.wiki pedia.org/wiki/GNU_Free_Documentation_License. }~ largely because the CC licenses did not exist when Wikipedia was launched in 2001. The FDL, originally intended for the documentation manuals that explicate software applications, is essentially the same as the CC ShareAlike license (any derivative works must also be released under the same license granting the freedom to share). But using the FDL can get cumbersome, especially as more video, audio, and photos are incorporated into a text; each artifact would require that the license be posted on it. As more content is shared, the potential for misuse of the content, and lawsuits over violations of licensing agreements, would grow.~{ Michael Fitzgerald, “Copyleft Hits a Snag,” /{Technology Review}/, December 21, 2005. }~ +={Free Documentation License+10;GNU Project+10;Wikipedia:GNU FDL, and+10|CC licenses, and+10} + +Unfortunately, as a legal matter, the FDL is incompatible with the CC licenses. This means that all content on Wikipedia and its sister Wikimedia projects (Wikispecies, Wikiquote, Wikinews, among other projects) cannot legally be combined with works licensed under CC licenses. Angered by the two “non-free” CC licenses, Stallman dug in his heels and defended Wikipedia’s use of the FDL. He also made it clear that he would remain a critic of Creative Commons unless it revoked or changed its licenses to conform with the Free Software Foundation’s standards of “freedom.” +={Free Software Foundation;Stallman, Richard:criticisms by+5|Wikipedia, and+5} + +Thus began a four-year search for a resolution. Lessig recalled, “We started to think about a way that Wikimedia could migrate to a license that we would then deem as compatible to a Creative Commons license. That took two years of negotiation, basically.” One proposed solution was for Wikimedia projects to offer both licenses, the FDL and CC BY-SA, for the same work. However, it was determined that derivative works licensed under one license would still be incompatible with dual-licensed works, resulting in “project bleed” (new works would migrate away the existing corpus of works). Another approach was for a “one-way compatibility” of licenses, so that people creating works under the FDL could use CC-licensed content. +={Lessig, Lawrence:CC licenses, and+4|freedom, and+4} + +But Lessig realized that these solutions dealt only with the issue at hand; the real challenge was finding a more systemic solution. As various players engaged with the FDL/CC controversy, it grew from a licensing squabble into an intertribal confrontation. It became a symbol for everything that Stallman found politically unacceptable about the Creative Commons’s vision of freedom. + +From 2005 to 2007, the issue roiled many factions within the free culture/free software communities. The debate and invective flew back and forth in various venues, and there were proposals, negotiations, and political maneuvers. MIT computer scientist (and CC board member) Hal Abelson rejoined the FSF board. Lessig and other CC staff entered into talks with the FSF general counsel, Eben Moglen. Wikipedia co-founder Jimmy Wales joined the Creative Commons board. Yet Stallman continued to resist, and the Wikimedia board would not approve any proposed solutions. +={Abelson, Hal:CC board, on|Free Software Foundation, and;Moglen, Eben;Wales, Jimmy} + +The stalemate was broken in June 4, 2007, when Lessig made a surprise announcement that the Creative Commons was “retiring” the Developing Nations and Sampling licenses.~{ Lessig post to CC International listserv, June 4, 2007. More about the CC’s retired licenses can be seen at http://creativecommons.org/retiredlicenses. }~ One reason was a lack of interest in the licenses: only 0.01 percent of CC licensors were using each license. But, without alluding to the Free Software Foundation or Stallman, Lessig also noted that the two licenses did not ensure a minimal freedom to share a work noncommercially— a standard met by all other CC licenses. In addition, Lessig pointed out to me, some publishers were beginning to see the Developing Nations license as a subterfuge to avoid meeting open-access publishing standards. +={Free Software Foundation;Creative Commons International:Developing Nations license} + +For Creative Commons, the revocation of the two licenses was at least a shrewd political move; it also affirmed a stricter standard of “freedom” in the ability to use digital materials. In return for sacrificing two little-used licenses, the organization gained Stallman’s eventual support for a deal that would let the FDL be treated as compatible with the CC ShareAlike license. This was a major triumph because it could avoid the contorted, legalistic solutions that had been previously proposed and rejected. It was also a breakthrough because it averted a major rift between two growing bodies of open content and avoided a slow drift into a wider Balkanization of content across the Internet. “I kind of thought that no matter what we did, Richard would find a reason to object,” recalled Lessig, “but he didn’t. He stuck to his principles, so I give credit to him.”~{ Interview with Lawrence Lessig, October 23, 2007. }~ +={open networks:license incompatibility, and} + +The debates about “freedom” produced several specific results. In November 2006, when Creative Commons released an updated legal version of its licenses, version 3.0, it formally recognized other licenses as legally compatible with the ShareAlike license if they have the same purpose, meaning, and effect, and if the other license recognizes the CC license. The move should help avoid future strife over interoperability. + +A few months later, the Creative Commons also adopted a “Free Cultural Works” definition and seal as a way to recognize works that are “free,” as understood by the Free Software Foundation. The definition declares that works with either the CC Attribution or Attribution-ShareAlike licenses should be considered “free” because they give people the freedom to modify works without any discrimination against specific uses or users. The definition and seal /{exclude}/ the CC NonCommercial and NoDerivatives licenses, however, because those licenses do not allow this sort of freedom. The purpose of the seal is not to denigrate use of the NC and ND licenses, but to educate users about the less restrictive licenses and to assert a philosophical solidarity with the free software community. +={Free Software Foundation} + +As part of this larger effort, the Creative Commons also issued a draft statement in April 2008 declaring the special importance of the ShareAlike license in the free culture movement and the organization’s intentions in its stewardship of the license. The statement amounted to a diplomatic peace treaty, to be finalized in the months ahead. +={free culture:sharing ethic of+1} + +By May 2008 the details of the agreement to make Wikipedia’s entries, licensed under the FDL, legally compatible with materials licensed under the CC ShareAlike license had not been consummated. But it was expected that the legal technicalities would be ironed out, and two great bodies of open content would no longer be legally off-limits to each other. + +2~ Criticism from the Left and from the South + +As the Creative Commons has grown in popularity, a longer line has formed to take issue with some of its fundamental strategies. One line of criticism comes from anticapitalist ideologues, another from scholars of the underdeveloped nations of the South. + +British academics Berry and Moss apparently hanker for a more bracing revolution in culture;they object to the commodification of culture in any form and to the role that copyright law plays in this drama. To them, Lessig is distressingly centrist. He is “always very keen to disassociate himself and the Creative Commons from the (diabolical) insinuation that he is (God forbid!) anti-market, anticapitalist, or communist,” Berry and Moss complain.~{ David Berry and Giles Moss, “On the ‘Creative Commons’: A Critique of the Commons Without Commonality,” /{Free Software Magazine}/, July 15, 2005, at http://www.freesoftwaremagagine.com/articles/commons_without_com monality }~ The gist of their objection: Why is Lessig collaborating with media corporations and neoclassical economists when there is a larger, more profound revolution that needs to be fought? A new social ethic and political struggle are needed, they write, “not lawyers exercising their legal vernacular and skills on complicated licenses, court cases and precedents.” +={Berry, David;Moss, Giles;Lessig, Lawrence:CC licenses, and} + +Dense diatribes against the antirevolutionary character of Creative Commons can be heard in various hacker venues and cultural blogs and Web sites. The argument tends to go along the lines sketched here by Anna Nimus of Berlin, Germany: +={Nimus, Anna+1} + +_1 Creative Commons preserves Romanticism’s ideas of originality, creativity and property rights, and similarly considers “free culture” to be a separate sphere existing in splendid isolation from the world of material production. Ever since the 18th century, the ideas of “creativity” and “originality” have been inextricably linked to an anti-commons of knowledge. Creative Commons is no exception. There’s no doubt that Creative Commons can shed light on some of the issues in the continuing struggle against intellectual property. But it is insufficient at best, and, at its worst, it’s just another attempt by the apologists of property to confuse the discourse, poison the well, and crowd out any revolutionary analysis.~{ Anna Nimus, “Copyright, Copyleft and the Creative Anti-Commons,” at http://subsol.c3.hu/subsol_2/contributors0/nimustext.html. }~ + +To ensure that her revolutionary analysis gets out, Nimus released her piece under a self-styled “Anticopyright” notation, with the added phrase, “All rights dispersed.” + +A more penetrating brand of criticism has come from the South, which fears that the West’s newfound enthusiasm for the commons may not necessarily benefit the people of developing nations; indeed, it could simply legitimate new thefts of their shared resources. In an important 2004 law review article, “The Romance of the Public Domain,” law professors Anupam Chander and Madhavi Sunder argue that “public domain advocates seem to accept that because a resource is open to all by force of law, that resource will indeed be exploited by all. In practice, however, differing circumstances — including knowledge, wealth, power and ability — render some better able than others to exploit a commons. We describe this popular scholarly conception of the commons as ‘romantic.’ . . . It is celebratory, even euphoric, about the emancipatory potential of the commons. But it is also naïve, idealistic and removed from reality.”~{ Anupam Chander and Madhavi Sunder, “The Romance of the Public Domain,” California Law Review 92, no. 1131 (2004), p. 1341. }~ +={Chander, Anupam+2;Sunder, Madhavi+2;free culture:international+2;public domain+3:commons, and+3} + +If genes, seeds, indigenous medicines, agricultural innovations, artistic designs, music, and the various ecological and cultural resources of the South are not treated as private property, but instead as elements of the public domain, then anyone can exploit them freely. This can lead to serious injustices, as powerful corporations swoop in to exploit resources that are available to all in the public domain. + +Chander and Sunder write: “By presuming that leaving information and ideas in the public domain enhances ‘semiotic democracy’ — a world in which all people, not just the powerful, have the ability to make cultural meanings — law turns a blind eye to the fact that for centuries the public domain has been a source for exploiting the labor and bodies of the disempowered — namely, people of color, the poor, women and people from the global South.”~{ Ibid., p. 1343. }~ Chander and Sunder argue that the binary logic of copyright law — something is either private property or in the public domain — “masks the ways in which the commons often functions more in the interests of traditional property owners than in the interests of commoners.” +={democracy:semiotic} + +This critique makes clear why the distinction between the public domain and the commons matters. The public domain is an open-access regime available to all; it has no property rights or governance rules. The commons, however, is a legal regime for ensuring that the fruits of collective efforts remain under the control of that collective. The GPL, the CC licenses, databases of traditional knowledge, and sui generis national statutes for protecting biological diversity all represent innovative legal strategies for protecting the commons. The powerful can exploit and overwhelm the public domain, but they are not likely to overwhelm a commons that has a legal regime to protect a collective’s shared resources. +={commons:protection of;General Public License (GPL):commons, and} + +A more radical and profound critique of the commons came in an open letter to “inhabitants of the ‘legal’ Commons” from “Denizens of Non Legal Commons, and those who travel to and from them.” The three-page letter, drafted by Shuddhabrata Sengupta, a filmmaker and writer with the Raqs Media Collective in New Delhi, is a plea for recognizing the informal sharing economy that flourishes beneath the oblivious gaze of mainstream society, and certainly beyond the reach of property rights and law. +={Sengupta, Shuddhabrata} + +“Greetings!” the letter opens. “This missive arrives at your threshold from the proverbial Asiatic street, located in the shadow of an improvised bazaar, where all manner of oriental pirates and other dodgy characters gather to trade in what many amongst you consider to be stolen goods.” To this /{other}/ commons, stolen goods are really “borrowed,” because nothing is really “owned” — and therefore nothing can be “stolen.” This is the realm of “the great circulating public library of the Asiatic street.” The letter continues: + +_1 We appreciate and admire the determination with which you nurture your garden of licenses. The proliferation and variety of flowering contracts and clauses in your hothouses is astounding. But we find the paradox of a space that is called a commons and yet so fenced in, and in so many ways, somewhat intriguing. The number of times we had to ask for permission, and the number of security check posts we had to negotiate to enter even a corner of your commons was impressive. . . . Sometimes we found that when people spoke of “Common Property” it was hard to know where the commons ended and where property began . . . + +_1 Strangely, the capacity to name something as “mine,” even if in order to “share” it, requires a degree of attainments that is not in itself evenly distributed. Not everyone comes into the world with the confidence that anything is “theirs” to share. This means that the “commons,” in your parlance, consists of an arrangement wherein only those who are in the magic circle of confident owners effectively get a share in that which is essentially, still a configuration of different bits of fenced in property. What they do is basically effect a series of swaps, based on a mutual understanding of their exclusive property rights. So I give you something of what I own, in exchange for which, I get something of what you own. The good or item in question never exits the circuit of property, even, paradoxically, when it is shared. Goods that are not owned, or those that have been taken outside the circuit of ownership, effectively cannot be shared, or even circulated.~{ “A Letter to the Commons, from the participants of the ‘Shades of the Commons Workshop,’ ” in /{In the Shade of the Commons:Towards a Culture of Open Networks}/ (Amsterdam, Netherlands: Waag Society, 2006), at http://www3 .fis.utoronto.ca/research/iprp/cracin/publications/pdfs/final/werbin_InThe Shade.pdf. }~ + +The letter invites a deeper consideration of how humans form commons. However ingenious and useful the jerry-rigged legal mechanisms of the GPL and Creative Commons, the disembodied voice of the Non Legal Commons speaks, as if through the sewer grate, to remind us that the commons is about much more than law and civil society. It is part of the human condition. Yet the chaotic Asiatic street is not likely to yield conventional economic development without the rule of law, civil institutions, and some forms of legal property. The question posed by the informal commons remains a necessary one to ponder: What balance of commons and property rights, and in what forms, is best for a society? + +2~ Fair Use and the Creative Commons +={Creative Commons (CC) licenses:fair use, and+15;fair use doctrine:CC licenses, and+15} + +Walk through the blossoming schools of commons thought and it quickly becomes clear that the commons is no monolithic ideal but a many-splendored mosaic of perspectives. To the befuddlement of conventional observers, the perspectives are not necessarily adversarial or mutually exclusive. More often than not, they are fractal— interesting variations of familiar commons themes. In our fascination with newfangled commons, it is easy to overlook a more traditionally minded defender of the commons: the champion of fair use. It is all well and good to promote works that are “born free” under CC licenses, say these friendly critics. But the hard fact of the matter is that for the foreseeable future, creators will still need access to copyrighted content — and this requires a strong fair use doctrine and aggressive public education. + +It is a compelling argument, but in fact only an indirect criticism of Creative Commons. For filmmakers who need to use film clips from existing films and musicians who want to use a riff from another performer, the fair use doctrine is indeed more important than any CC license. Peter Jaszi, the law professor at American University’s Washington School of Law, believes that even with growing bodies of CC-licensed content, “teachers, filmmakers, editors, freelance critics and others need to do things with proprietary content.” As a practical matter, they need a strong, clear set of fair use guidelines. +={Jaszi, Peter+2} + +Jaszi and his colleague Pat Aufderheide, a communications professor who runs the Center for Social Media at American University, have dedicated themselves to clarifying the scope and certainty of fair use. They have launched a major fair use project to get specific creative communities to define their “best practices in fair use.” If filmmakers, for example, can articulate their own artistic needs and professional interests in copying and sharing, then the courts are more likely to take those standards into consideration when they rule what is protected under the fair use doctrine.~{ Center for Social Media, at http://www.centerforsocialmedia.org/fairuse. See also Pat Aufderheide and Peter Jaszi, “Fair Use and Best Practices: Surprising Success,” /{Intellectual Property Today}/, October 2007, at http://www.iptoday .com/articles/2007-10-aufderheide.asp; and Peter Jaszi, “Copyright, Fair Use and Motion Pictures,” /{Utah Law Review}/ 3, no. 715 (2007), and which also appeared in R. Kolker, ed., /{Oxford Handbook of Film and Media Studies}/ (2007), at http://www.centerforsocialmedia.org/files/pdf/fairuse_motionpictures.pdf. }~ A set of respectable standards for a given field can help stabilize and expand the application of fair use. +={Aufderheide, Pat+1} + +Inspired in part by a professional code developed by news broadcasters, some of the nation’s most respected filmmakers prepared the Documentary Filmmakers’ Statement of Best Practices in Fair Use, which was released in November 2005. The guidelines have since been embraced by the film industry, television programmers, and insurance companies (who insure against copyright violations) as a default definition about what constitutes fair use in documentary filmmaking.~{ Aufderheide and Jaszi, /{Intellectual Property Today}/, October 2007, at http:// www.iptoday.com/articles/2007-10-aufderheide.asp. }~ Aufderheide and Jaszi are currently exploring fair use projects for other fields, such as teaching, as a way to make fair use a more reliable legal tool for sharing and reuse of works. + +Lessig has been highly supportive of the fair use project and, indeed, he oversees his own fair use law clinic at Stanford Law School, which litigates cases frequently. “It’s not as if I don’t think fair use is important,” said Lessig, “but I do think that if the movement focuses on fair use, we don’t attract the people we need. . . . From my perspective, long-term success in changing the fundamental perspectives around copyright depends on something like Creative Commons as opposed to legal action, and even quasi-legal action, like the Fair Use Project.” +={Lessig, Lawrence:fair use, on+5} + +For Lessig, fair use is deeply flawed as the basis for building a political movement to reform copyright law. He argues that its advocates are dogged by the (unfair) perception that they are “just a bunch of people who want to get stuff for free, without paying for it. . . . It’s too easy to dismiss that movement.” Lessig recalled the time that the head of a major record label snorted, “Fair use is the last refuge of the scoundrel.” Fair use defenders obviously take issue with this characterization, but the accusation nonetheless pushes fair use champions into a rhetorical corner from which it is difficult to escape. + +A more appealing alternative, Lessig argues, is to use the credibility of copyright ownership to argue the point in a different way. He cited the successful campaign by European software engineers in the 1980s to fight attempts to expand patent protection for software. Their campaign did not resemble “a bunch of peer-to-peer downloaders who are saying, ‘Yeah, I want my music for free,’” said Lessig. “It was a bunch of people who are the /{beneficiaries}/ of patent rights saying, ‘Look, we /{don’t want}/ these rights.’ That creates a kind of credibility.” From a moral and political standpoint, Lessig argued, a movement based on copyright owners declaring that they want to forfeit certain rights in order to /{share}/ and promote creativity, has greater credibility than a campaign seeking to “balance” the public’s rights against private copyright privileges. + +“I imagine a world where there are one hundred million Creative Commons–licensed artists out there, creating works according to Creative Commons views,” he said. Then, when Hollywood pressures Congress for stronger copyright protections, he said, “there would be all these people out there who are creating according to a radically different model. [Hollywood’s] claims about extremism would just not be true for a large number of creators.” Instead of a copyright debate that pits “creators” against “pirates,” Lessig said, “I want to create this world where there is a third category of people who are creators, but who create according to different values, values that emphasize the importance of sharing and building upon the past.”~{ Interview with Lawrence Lessig, October 23, 2007. }~ +={piracy} + +In the larger scheme of things, the tensions between the fair use and free culture advocates are not mutually exclusive. In the end, the two approaches complement each other with different contributions. Both seek to promote sharing and reuse, but the former works within the traditional framework of copyright law; the latter is trying to build a whole new body of culture and discourse. There is a kind of gentleman’s agreement between the fair use and free culture communities to work on different sides of the street, while traveling a parallel path down the same road. +={free culture:fair use vs.+2;fair use doctrine:copyright law, and;copyright law:fair use doctrine, and} + +For Lessig, there is little advantage in shirking the property rights discourse of copyright law, as Elkin-Koren and the “Non Legal Commons” urge. Indeed, he sees a distinct strategic advantage in /{embracing}/ that discourse — and then trying to turn it to different ends. This, in a way, is what Stallman succeeded in doing with the GPL, a license based on copyright law. Yet, while Stallman attracted a somewhat homogeneous community of programmers to his movement, Creative Commons has attracted a sprawling community of eclectic interests, diverse priorities, and no agreed-upon philosophical core. +={Elkin-Koren, Niva;copyright law:property rights, and;property rights:copyright law, and;free software:social movement, as} + +By choosing a middle path that embraces but seeks to transform property discourse, Creative Commons may avoid the marginalization of ardent leftists and the modest agenda of fair use activism. It remains an open question whether the ideological fuzziness at the core of Creative Commons, or the limitations of its licenses, is offset by its success in popularizing a new cultural vision. Yochai Benkler, the great commons theorist, understands the legal criticisms, and agrees with them to an extent. But ultimately, the significance of Creative Commons, he believes, has been “in galvanizing a movement, in symbolizing it and in providing a place to organize around. From my perspective, if I care about Creative Commons, it is as a cultural icon for a movement, more than as a set of licenses. Which is why I am less bothered than some, about the people who are beginning to criticize Creative Commons and how good the licenses really are, and how compatible they are.”~{ Interview with Yochai Benkler, February 7, 2006. }~ +={Benkler, Yochai:social movements, on;free culture:social movement, as+4} + +For Cory Doctorow, the copyfighter and sci-fi writer, the eclectic viewpoints within the free culture movement is a decisive strength: “The difference between a movement and an organization,” he wrote on the iCommons listserv, “is that an organization is a group of people who want the same thing for the same reason. A movement is a collection of groups of people who want the same thing for different reasons. Movements are infinitely more powerful than organizations.” +={Doctorow, Cory:free culture movemement, and+1} + +The reason the environmental movement is so powerful, Doctorow continued, is the very fact that it encompasses “anticapitalists, green investors, spiritualists, scientists, hunters and fishers, parents worried about environmental toxins, labor reformers, proglobalists, anti-globalists, etc. . . . Denuding the ideological landscape of the environmental movement in a purge to eliminate all those save the ones who support environmentalism /{qua}/ environmentalism would be the worst setback environmentalism could suffer. Likewise copyfighters: there are Marxists, anarchists, Ayn Rand objectivists, economists, artists, free marketeers, libertarians, liberal democrats, etc., who see copyright liberalization as serving their agenda. If we insist that copyright reform is about copyright reform and nothing else, there will be no copyright reform movement.”~{ Cory Doctorow, iCommons listserv [thread, “Andrew Orlowski Attacks Lessig], June 1, 2007. }~ +={Rand, Ayn} + +There is a price to be paid for all this diversity, however. Diversity means constant debate. Debate can escalate into strife and sectarianism. And in the free culture movement, where so many people are feverishly improvising and inventing, nearly everything is open for debate. It turns out that this business of inventing the commons is complicated stuff; there are many ways to construct a commons. It is only natural for people to have their own ideas about how to build the digital republic. + +The fundamental question may be whether the existing framework of copyright law and property discourse can be adequately reformed — or whether its very categories of thought are the problem. The late poet and activist Audre Lorde, in the context of feminist struggle, declared that the prevailing discourse must be overthrown, not reformed, because, in her words, “the master’s tools will never dismantle the master’s house.” Within the free software and free culture movements, however, there are those who believe that copyright law can be sufficiently adapted to build a sharing economy, a more competitive marketplace, and a more humane democratic culture. Others are convinced that the legal discourse of property rights, however modified, will simply entrench the very principles that they wish to transcend. As the movement grows and diversifies, debates over what constitutes the most strategic, morally honorable path forward are likely to intensify. +={Lorde, Audre;copyright law:property rights, and;property rights:copyright law, and;free software:social movement, as} + +:B~ PART III + +:C~ A Viral Spiral of New Commons + +1~intro_iii [Intro] -# + +By 2008 the viral spiral had come a long way. Richard Stallman’s fringe movement to build a commons for code became an enormous success, partly inspiring Lawrence Lessig and his compatriots to develop the Creative Commons licenses and a larger vision of free culture. Empowered by these tools, ordinary people began to develop some exciting new models for creativity and sharing. New types of commons arose. Soon there was a popular discourse about the sharing economy, a politics of open networks, and a new international social movement. The movement was so successful at diversifying itself that it was able to engage in serious internecine squabbles. +={Stallman, Richard:influence of;code:free access to;commons:achievement of;free culture;Lessig, Lawrence:CC licenses, and;open networks+1} + +As the commons movement matured, and people came to understand the sensibilities of open networks, the viral spiral seemed to acquire new speed and powers. Over the past few years, it has advanced into all sorts of new arenas. Part III examines three of the most exciting ones — business, science, and education. Each has taken the tools and insights developed by the commons movement — free software, CC licenses, collaborative models — and adapted them to its own special needs. + +These spin-off movements of entrepreneurs, scientists, and educators recognize their debt to the free software and CC licenses, but none feels confined by that history or beholden to its leaders. Each is too intent on adapting the tools to its own circumstances. Just as CC licenses have been used in some ways by musicians, and in other ways by filmmakers, and in still other ways by bloggers, so the commoners in the worlds of business, science, and education are forging their own paths. Development requires differentiation. It is fascinating to watch how the principles of the commons are being crafted to meet the distinctive needs of the marketplace, the academy, the research lab, and the classroom. + +What may be most notable about these developments is the blurring of these very categories. On open platforms, social communities are becoming sites for market activity. Scientists are increasingly collaborating with people outside their disciplines, including amateurs. Formal education is becoming more focused on learning, and learning is moving out of the classroom and into more informal and practice-driven venues. + +If there is a common denominator in each of the domains examined in Part III, it is the use of distributed networks, social community, and digital technologies to enhance the goals at hand. The new open business models seek to bring consumer and seller interests into closer alignment. The new science commons seek to create more powerful types of research collaboration. The open educational resources movement wants knowledge to circulate more freely and students to direct their own learning. + +For the short term, the fledgling models in these fields are likely to be seen as interesting novelties on the periphery of the mainstream. In time, however, given what we know about network dynamics, the new models are likely to supplant or significantly transform many basic parameters of business, science, and education. The participatory practices that open networks enable are showing that knowledge is more about socially dynamic relationships than about fixed bodies of information. These relationships are also spawning new challenges to institutional authority and expertise. If one looks closely enough, the matrix for a very different order of knowledge, institutional life, and personal engagement can be seen. + +1~ 10 THE NEW OPEN BUSINESS MODELS +={open business models+79} + +/{The commons and the market can be great partners if each shows respect for the other and ingenuity in working together.}/ + +Entrepreneur John Buckman concedes that his Internet record label, Magnatune, amounts to “building a business model on top of chaos.”~{ John Buckman presentation at iCommons Summit, Dubrovnik, Croatia, June 15, 2007. }~ That is to say, he makes money by honoring open networks and people’s natural social inclinations. The company rejects the proprietary muscle games used by its mainstream rivals, and instead holds itself to an ethical standard that verges on the sanctimonious: “We are not evil.” In the music industry these days, a straight shooter apparently has to be that blunt. +={Buckman, John+4;Magnatune+8;music:CC licenses for+8;Creative Commons (CC) licenses:music, for+8} + +Magnatune is a four-person enterprise based in Berkeley, California, that since 2003 has been pioneering a new open business model for identifying and distributing high-quality new music. It does not lock up the music with anticopying technology or digital rights management. It does not exploit its artists with coercive, unfair contracts. It does not harass its customers for making unauthorized copies. Internet users can in fact listen to all of Magnatune’s music for free (not just music snippets) via online streaming.~{ John Buckman entry in Wikipedia, at http://en.wikipedia.org/wiki/John_ Buckman. }~ + +Buckman, a former software programmer turned entrepreneur in his thirties, previously founded and ran Lyris Technologies, an e-mail list management company that he sold in 2005. In deciding to start Magnatune, he took note of the obvious realities that the music industry has tried to ignore: radio is boring, CDs cost too much, record labels exploit their artists, file sharing is not going to go away, people love to share music, and listening to music on the Internet is too much work. “I thought, why not make a record label that has a clue?” said Buckman.~{ John Buckman at Magnatune home page, at http://www.magnatune.com/ info/why. }~ + +Well before the band Radiohead released its In /{Rainbows}/ album with a “pay what you want” experiment, Magnatune was inviting its customers to choose the amount they would be willing to pay, from $5 to $18, for any of Magnatune’s 547 albums. Buckman explains that the arrangement signals a respect for customers who, after all, have lots of free music choices. It also gives them a chance to express their appreciation for artists, who receive 50 percent of the sales price. “It turns out that people are quite generous and they pay on average about $8.40, and they really don’t get anything more for paying more other than feeling like they’re doing the right thing,” said Buckman.~{ John Buckman, interview with Matthew Magee of Out-Law.com, radio podcast, September 13, 2007, at http://www.out-law.com/page-8468. }~ About 20 percent pay more than $12.~{ John Buckman at iCommons, June 15, 2007. For an extensive profile of Buckman and Magnatune, see http://www.openrightsgroup.org/creative business/index.php/John_Buckman:_Magnatune. }~ +={Radiohead} + +“The reality is today nobody really needs to pay for music at all,” he acknowledges. “If you choose to hit the ‘buy’ button at Magnatune then you’re one of the people who has decided to actually pay for music. Shouldn’t we reflect that honest behavior back and say, well, if you’re one of the honest people how much do you want to pay?”~{ John Buckman, interview with Matthew Magee, September 13, 2007. }~ The set-your-own-price approach is part of Magnatune’s larger strategy of building the business by cultivating open, interactive relationships with its customers and artists. “If you set up a trusting world,” explains Buckman, “you can be rewarded.” + +Magnatune’s business model embraces the openness of the Internet and makes it a virtue, rather than treating it as a bothersome liability that must be elaborately suppressed. All of Magnatune’s music is released as MP3 files, with no digital rights management, under a CC Attribution-NonCommercial-ShareAlike license. This means that customers can legally make their own remixes and covers of songs, and take samples, so long as the uses are noncommercial and carry the same CC license. Magnatune also invites customers to give free downloads of purchased music to three friends. Podcasters have free access to the entire Magnatune catalog. + +By using a CC license, Magnatune saves a bundle by not having to oversee complex terms and conditions for usage of music. Nor does it have to maintain a DRM system and police the behavior of its customers, both of which squander a key marketing asset: consumer goodwill. Instead, the music circulates freely and, in so doing, expands public awareness of Magnatune’s 244 artists. + +Two-thirds of Magnatune’s revenues comes from licensing its music to films, ads, television, and shops. Like so many open business models, it has carved out a mid-tier niche between “expensive and proprietary” and “cheap and crummy.” Most mainstream music licensing involves either expensive, highly lawyered deals with record labels or insipid stock music from royalty-free CDs. Magnatune’s innovation is to offer high-quality music in multiple genres at flatrate licenses for sixteen different usage scenarios. The deals can be easily consummated via the Web; artists share in half the proceeds. No accounting flimflam. To date, Magnatune has licensed its music to more than one thousand indie films and many commercials. + +Magnatune is a small, fledgling enterprise in the $4 billion music industry. It does not have all the answers, and it may be sideswiped by bigger players at some point. But Magnatune is lean, nimble, profitable, and growing. It has shown how innovative business models can flourish in the open environment of the Internet. Unlike its bloated, besieged competitors, Magnatune is willing to listen closely to its customers, artists, and licensing clients. It is fair-minded and straightforward; it wants to share the wealth and let the music flow. + +2~ Open Networks Spur New Business Models +={open networks:new business models in+16;open business models:open networks and+16} + +Openness does not come intuitively to many businesses. Competitive advantage has long been associated with exclusive control and secrecy. But as the Internet’s power expands, conventional businesses are feeling pressures to rethink their “closed” business models. A new breed of “open businesses” is demonstrating that a reliance on open-source software, open content, and an ethic of transparency in dealings with all corporate stakeholders can be tremendously competitive. +={open business models:transparency in;transparency;Internet:rise of} + +Open businesses understand the Great Value Shift discussed in chapter 5 — that working through open networks and commons is likely to generate greater consumer attention, engagement, and loyalty — and thus sales — and may outperform a more exclusive regime of control. Working on an open network is also the best way for a company to get smarter faster, and to stay alert to changing market conditions. It bears noting that business models are not an either/or choice — that is, all open or all closed. There is a continuum of choices, as we will see below. Sometimes there are heated strategic and moral debates about what level of openness to adopt, yet the general trend in business today is clear: toward openness. +={Great Value Shift;open business models:Great Value Shift, and} + +Even as broadcast networks decry the posting of copyrighted television programs on YouTube, they clearly welcome the ratings spikes that ensue. Wireless telephony is fragmented among many proprietary systems, but pressures are now growing to make them compete on an open platform.~{ See,e.g., Walter S.Mossberg, “Free My Phone,” /{Wall Street Journal}/, October 22, 2007, p. R1. }~ European regulators are calling for “open document format” standards to prevent Microsoft from abusing its proprietary standards in its Office suite of software. There are even calls for open standards for avatars in virtual worlds like Second Life, The Lounge, and Entropia Universe, so that our digital alter egos can glide from one virtual community to another.~{ Steve Lohr, “Free the Avatars,” New York Times, October 15, 2007. }~ +={YouTube;Microsoft:competition against;open business models:interoperability of+1} + +Why this inexorable trend toward openness? Because on open networks, excessive control can be counterproductive. The overall value that can be created through interoperability is usually greater than the value that any single player may reap from maintaining its own “walled network.”~{ See Elliot E. Maxwell, “Open Standards, Open Source, and Open Innovation: Harnessing the Benefits of Openness,” /{Innovations:Technology, Governance, Globalization}/ 1, no. 3 (Summer 2006), at http://www.emaxwell.net. }~ For a company to reap value from interoperability, however, it must be willing to compete on an open platform and it must be willing to share technical standards, infrastructure, or content with others. Once this occurs, proprietary gains come from competing to find more sophisticated ways to add value in the production chain, rather than fighting to monopolize basic resources. Advantage also accrues to the company that develops trusting relationships with a community of customers. +={open business models:value created in+9;value:creation of+9} + +Free software was one of the earliest demonstrations of the power of online commons as a way to create value. In his classic 1997 essay “The Cathedral and the Bazaar,” hacker Eric S. Raymond provided a seminal analysis explaining how open networks make software development more cost-effective and innovative than software developed by a single firm.~{ Eric Raymond, “The Cathedral and the Bazaar,” May 1997, at http:// www.catb.org/~esr/writings/cathedral-bazaar. The essay has been translated into nineteen languages to date. }~ A wide-open “bazaar” such as the global Linux community can construct a more versatile operating system than one designed by a closed “cathedral” such as Microsoft. “With enough eyes, all bugs are shallow,” Raymond famously declared. Yochai Benkler gave a more formal economic reckoning of the value proposition of open networks in his pioneering 2002 essay “Coase’s Penguin, or, Linux and the Nature of the Firm.”~{ Yochai Benkler, “Coase’s Penguin, or, Linux and the Nature of the Firm,” /{Yale Law Journal}/ 112, no. 369 (2002), at http://www.benkler.org/CoasesPen guin.html. }~ The title is a puckish commentary on how GNU/Linux, whose mascot is a penguin, poses an empirical challenge to economist Ronald Coase’s celebrated “transaction cost” theory of the firm. In 1937, Coase stated that the economic rationale for forming a business enterprise is its ability to assert clear property rights and manage employees and production more efficiently than contracting out to the marketplace. +={Benkler, Yochai:open networks, on+3;Raymond, Eric S.:“The Cathedral and the Bazaar”;free software:creation of value, and;Linux:open business models, and;Microsoft:competition against;Coase, Ronald;GNU/Linux:open business models, and;transaction costs:theory of;open business models:“transaction cost” theory, and} + +What is remarkable about peer production on open networks, said Benkler, is that it undercuts the economic rationale for the firm; commons-based peer production can perform certain tasks more efficiently than a corporation. Those tasks must be modular and divisible into small components and capable of being efficiently integrated, Benkler stipulated. The larger point is that value is created on open networks in very different ways than in conventional markets. Asserting proprietary control on network platforms may prevent huge numbers of people from giving your work (free) social visibility, contributing new value to it, or remixing it. “The only thing worse than being sampled on the Internet,” said Siva Vaidhyanathan, with apologies to Oscar Wilde, “is not being sampled on the Internet.” +={Vaidhyanathan, Siva} + +The /{New York Times}/'s experience with its paid subscription service, TimesSelect, offers a great example. The /{Times}/ once charged about fifty dollars a year for online access to its premier columnists and news archives. Despite attracting more than 227,000 subscribers and generating about $10 million a year in revenue, the /{Times}/ discontinued the service in 2007.~{ Richard Pérez-Peña, “Times to Stop Charging for Parts of Its Web Site,” /{New York Times}/, September 18, 2007. }~ A /{Times}/ executive explained that lost subscription revenues would be more than offset by advertising to a much larger online readership with free access. The /{Financial Times}/ and the /{Economist}/ have dropped their paywalls, and the /{Wall Street Journal}/ in effect has done so by allowing free access via search engines and link sites. From some leading citadels of capitalism, a rough consensus had emerged: exclusivity can /{decrease}/ the value of online content.~{ Frank Ahrens, “Web Sites, Tear Down That Wall,” /{Washington Post}/, November 16, 2007, p. D1. See also Farhad Manjoo, “The Wall Street Journal’s Website Is Already (Secretly) Free,” /{Salon}/, March 21, 2008, at http://machinist.salon .com/blog/2008/03/21/wsj/index.html. }~ +={New York Times} + +While enormous value can be created on open networks, it can take different forms, notes David P. Reed, who studies information architectures.~{ David P. Reed, “The Sneaky Exponential — Beyond Metcalfe’s Law to the Power of Community Building,” at http://www.reed.com/Papers/GFN/ reedslaw.html. }~ One of the most powerful types of network value is what Reed calls “Group-Forming Networks,” or GFNs — or what Benkler might call commons-based peer production and I would call, less precisely, the commons. Reed talks about “scale-driven value shifts” that occur as a network grows in size. Greater value is created as a network moves from a broadcast model (where “content is king”) to peer production (where transactions dominate) and finally, to a group-forming network or commons (where jointly constructed value is produced and shared). +={Reed, David P.;Benkler, Yochai:The Wealth of Networks;commons-based peer production+3;group-forming networks (GFNs)} + +It is unclear, as a theoretical matter, how to characterize the size and behavior of various “value networks” on the Web today. For simplicity’s stake — and because Web platforms are evolving so rapidly — I refer to two general value propositions, Web 2.0 and the commons. Web 2.0 is about creating new types of value through participation in distributed open networks; the commons is a subset of Web 2.0 that describes fairly distinct, self-governed communities that focus on their own interests, which usually do not involve moneymaking. +={Web 2.0:open business, and+4} + +The rise of Web 2.0 platforms and the commons clearly has some serious implications for business strategy and organization. Just consider how Craigslist is displacing millions of dollars of classified newspaper ads; how open-access journals are threatening the economic base of commercial academic journals; and how usergenerated content is competing with network television. At the same time, activities that once occurred through informal social means (finding a date, organizing a gathering, obtaining word-ofmouth recommendations) are increasingly becoming commercial endeavors on the Web. Especially when the commons has strong mechanisms to preserve its value-creating capacity, such as the GPL, open networks are helping to convert more market activity into commons-based activity, or at least shifting the boundary between commodity markets and proprietary, high-value-added markets. As this dynamic proceeds, the social and the commercial are blurring more than ever before. + +Many “value chains” that have long sustained conventional businesses are being disrupted. As described in chapter 5, more efficient types of distributed media are disrupting the production/distribution chain that sustains Centralized Media. The Long Tail lets online consumers “pull” niche products that they want rather than enduring a relentless marketing “push” of products they don’t want. Commons-based peer production is a nonmarket version of the Long Tail: dispersed communities of people with niche interests can find one another, form social communities, bypass the market, and collaborate to create the niche resources that they want. +={Long Tail;Centralized Media:production/distribution chain of} + +The question facing many businesses is how to develop stable, long-term business models that can coexist with productive commons, if not leverage them for market gain. Their goal is to find ingenious ways to “monetize” the social relationships of online communities (by selling targeted advertising, personal data, niche products, etc.). Open businesses aim to do this in a respectful, public-spirited way; other, more traditional firms may have fewer scruples because, for them, “it’s all about the money.” + +But here’s the rub: a company can go only so far in monetizing the value-generating capacities of a commons without enclosing it or enraging the commoners. A company may consider itself shrewd for acquiring the copyrights for user-generated content, for example, or for blocking user access to third-party widgets that it disapproves of.~{ See, e.g., Paula Lehman, “MySpace Plays Chicken with Users,” BusinessWeek Online, April 12, 2007. }~ But participants in Web 2.0 communities will protest or simply leave if a corporate host starts to dictate obnoxious policies. A company can try to run its Web 2.0 platform as a feudal fiefdom, but it risks inciting users to revolt and start their own (nonmarket) online communities, reinventing themselves as commoners. Although there is an implicit social ethic to Web 2.0 platforms, none is necessarily “free” in the Stallman sense of “freedom.” +={Stallman, Richard:freedom, and} + +Unfortunately, there is no clear consensus about how exactly to define an “open business.” Accordingly, assessments of their social, political, or economic virtue can be slippery. Some analysts such as Henry Chesbrough regard a business as “open” if it relaxes or modifies its intellectual property controls, or changes its organizational practices, as a way to reap value from open networks.~{ Henry Chesbrough, /{Open Business Models: How to Thrive in the New Innovation Landscape}/ (Cambridge, MA: Harvard Business School Press, 2006). }~ Others believe that an open business should use open-source software, and support the copying and sharing of works through CC or other open-content licenses. Sometimes the idea of open business is yoked to a vaguely defined notion of “social responsibility.” It is not always clear whether this ethic is a moral gloss or a structural feature, but in general open businesses strive to practice a more open, accountable, and socially enlightened vision of commerce. +={Chesbrough, Henry;open business models:definition, no consensus+2|social responsibility, and} + +One champion of this vision is OpenBusiness, a Web site jointly created by Creative Commons UK in partnership with CC Brazil and the FGV Law School in Rio de Janeiro, Brazil. The mission of OpenBusiness is to “analyze and explain models by which people can share their knowledge and creativity with others whilst at the same time enjoying the more traditional incentives of profit, individual success and societal advancement.”~{ http://www.openbusiness.org. }~ By its lights, an open business is commons-friendly if it is committed to “transparency,” “sustainable systems,” and to putting “the health and welfare of people above everything else.” An open business also tries to generate as many “positive externalities” as possible — knowledge, social relationships, revenues — which it is willing to share with its stakeholders. +={OpenBusiness;commoners:sharing by;open business models:international} + +It is perhaps best to approach open businesses as an eclectic social phenomenon in search of a theory. As it has been said about Wikipedia, “It works in practice, but not in theory.”~{ From blog of Professor Karim Lakhani, Harvard Business School, April 27, 2007. }~ It is risky to overtheorize phenomena that are still fluid and emerging. Still, specific examples of open business can help us understand some basic principles of open networks, and how some businesses are using CC licenses to build innovative sorts of enterprises. +={Wikipedia:social movement, as} + +2~ Share the Wealth, Grow a Commercial Ecosystem +={open business models:commercial ecosystem, as+5} + +The idea that a company can make money by giving away something for free seems so counterintuitive, if not ridiculous, that conventional business people tend to dismiss it. Sometimes they protesteth too much, as when Microsoft’s Steve Ballmer compared the GNU GPL to a “cancer” and lambasted open-source software as having “characteristics of communism.”~{ Joe Wilcox and Stephen Shankland, “Why Microsoft is wary of open source,” CNET, June 18, 2001; and Lea, Graham, “MS’ Ballmer: Linux is communism,” /{Register}/ (U.K.), July 31, 2000. }~ In truth, “sharing the wealth” has become a familiar strategy for companies seeking to develop new technology markets. The company that is the first mover in an emerging commercial ecosystem is likely to become the dominant player, which may enable it to extract a disproportionate share of future market rents. Giving away one’s code or content can be a great way to become a dominant first mover. +={Ballmer, Steve;General Public License (GPL):critics of;open business models:first movers+1;Microsoft:competition against+1} + +Netscape was one of the first to demonstrate the power of this model with its release of its famous Navigator browser in 1994. The free distribution to Internet users helped develop the Web as a social and technological ecosystem, while helping fuel sales of Netscape’s Web server software. (This was before Microsoft arrived on the scene with its Internet Explorer, but that’s another story.) At a much larger scale, IBM saw enormous opportunities for building a better product by using GNU/Linux. The system would let IBM leverage other people’s talents at a fraction of the cost and strengthen its service relationships with customers. The company now earns more than $2 billion a year from Linux-related services.~{ Yochai Benkler, /{The Wealth of Networks}/ (Yale University Press, 2006), Figure 2.1 on p. 47. }~ +={GNU/Linux:IBM, and;IBM:GNU/Linux, and;Netscape;World Wide Web:social activity on} + +Today, sharing and openness are key to many business strategies. “Open Source: Now It’s an Ecosystem,” wrote /{BusinessWeek}/ in 2005, describing the “gold rush” of venture capital firms investing in startups with open-source products. Most of them planned to give away their software via the Web and charge for premium versions or for training, maintenance, and support.~{ “Open Source: Now It’s an Ecosystem,” BusinessWeek Online, October 3, 2005. }~ + +The pioneers in using open platforms to develop commercial ecosystems on the Internet are Amazon, Google, Yahoo, and eBay. Each has devised systems that let third-party software developers and businesses extend their platform with new applications and business synergies. Each uses systems that dynamically leverage users’ social behaviors and so stimulate business — for example, customer recommendations about books, search algorithms that identify the most popular Web sites, and reputation systems that enhance consumer confidence in sellers. Even Microsoft, eager to expand the ecology of developers using its products, has released 150 of its source code distributions under three “Shared Source” licenses, two of which meet the Free Software Foundation’s definition of “free.”~{ Microsoft’s Shared Source Licenses, at http://www.microsoft.com/resources/ sharedsource/licensingbasics/sharedsourcelicenses.mspx; see also Lessig blog, “Microsoft Releases Under ShareAlike,” June 24, 2005, at http://lessig .org/blog/2005/06/microsoft_releases_under_share.html. }~ +={Amazon;eBay;Microsoft:“Shared Source” licenses of;Yahoo;Google;World Wide Web:social activity on} + +More recently, Facebook has used its phenomenal reach — more than 80 million active users worldwide — as a platform for growing a diversified ecology of applications. The company allows software developers to create custom software programs that do such things as let users share reviews of favorite books, play Scrabble or poker with others online, or send virtual gifts to friends. Some apps are just for fun; others are the infrastructure for independent businesses that sell products and services or advertise. In September 2007, Facebook had more than two thousand software applications being used by at least one hundred people.~{ Vauhini Vara, “Facebook Gets Help from Its Friends,” Wall Street Journal, June 22, 2007. See also Riva Richmond, “Why So Many Want to Create Facebook Applications,” /{Wall Street Journal}/, September 4, 2007. }~ +={Facebook} + +2~ Open Content as a Gateway to Commercial Opportunities + +Of course, not every business can own a major platform, as Google, eBay, and Facebook do. Still, there are many other opportunities. One of the most popular is to use open platforms to attract an audience, and then strike a deal with an advertiser or commercial distributor, or sell premium services (“get discovered”). Another approach is to use open content to forge a spirited community to which things may be sold (“build a market on a commons”). +={eBay;Facebook+1;Google} + +!{/{Get discovered.}/}! This dynamic has been played out countless times on YouTube, MySpace, Facebook, and other high-traffic social networking sites. An unknown remix artist suddenly becomes famous when his track is discovered by a network swarm: the story of DJ Danger Mouse that we saw in chapter 6. A band attracts a huge following through viral word of mouth: the story of Jake Shapiro and Two Ton Shoe’s stardom in South Korea. There are even calculated scams to get discovered, like the lonelygirl15 series of videos purportedly shot by a teenage girl in her bedroom, which became a huge Internet sensation in 2006.~{ Joshua Davis, “The Secret World of Lonelygirl,” Wired, December 2006, at http://www.wired.com/wired/archive/14.12/lonelygirl.html. }~ +={DJ Danger Mouse;MySpace;YouTube;Shapiro, Jake;Two Ton Shoe;Internet:virtual word of mouth on;open business models:using open platforms to get discovered+15} + +As any television network will tell you, the capacity to aggregate audiences is worth a lot of money. The customary way of monetizing this talent is to sell advertising. Or one can parlay newfound name recognition into side deals with the mass media, which have always depended upon “star power” as a draw. Thus, Ana Marie Cox was able to parley her notoriety as a political gossip on her Wonkette blog into a job as Washington editor of /{Time}/ magazine. Perez Hilton, a Hollywood blogger who attracted a following, was offered a lucrative perch at the E! cable television channel. We saw in chapter 6 how producer Samuli Torssonen’s /{Star Wreck}/ attracted millions of Internet viewers, enabling him to strike a deal with Universal Studios to distribute a DVD version. With the same visions of stardom, or at least paying gigs, in mind, thousands of bands now have fan sites, music downloads, and banner ads on MySpace and other sites to promote themselves.~{ Elizabeth Holmes, “Famous, Online,” /{Wall Street Journal}/, August 8, 2006. }~ +={Cox, Ana Marie;Hilton, Perez;MySpace;Star Wreck Studios;Torssonen, Samuli} + +The CC NonCommercial license is one way to help pursue the “get discovered” business strategy. The license allows authors to seek a global Internet audience without having to cede rights to any commercial opportunities. It is not, however, a terribly reliable way to make money, which is why some artists, especially musicians, find fault with the implicit promise of the NC license. Many serious artists regard the NC license as too speculative a mechanism to get paid for one’s creative work. It is a fair complaint, as far as it goes. The real problem is the closed, highly concentrated music industry, which has a hammerlock on marketing, radio play, and distribution. Newcomers and mid-tier talent cannot get past the corporate gatekeepers to reach an audience, let alone make money. + +In an attempt to bridge the sharing economy with the market, and thereby open up some new channels of commercial distribution for commoners, the Creative Commons in late 2007 introduced a new protocol, CC+. The new project aims to make it easier for the owners of NC-licensed content to signal that agreements, products, or services beyond the scope of the CC licenses are on offer — for example, commercial licensing, warranties, or higherquality copies. A photographer who has hundreds of NC-licensed photos on Flickr would be able to continue to let people use those photos for noncommercial purposes — but through CC+, he could also sell licensing rights to those who want to use the photos for commercial purposes. CC+ is a metadata architecture and standard that allows third-party intermediaries to develop services for consummating commercial transactions. People can use CC+ as a simple “click-through” mechanism for acquiring commercial rights for music, photos, text, and other content. +={Creative Commons (CC):CC+, and+2} + +One of the earliest “copyright management” companies to take advantage of the CC+ standard was RightsAgent, a Cambridge, Massachusetts, company founded by Rudy Rouhana. RightsAgent essentially acts as a go-between for people who create NC-licensed works on the Web and those who wish to buy rights to use them for commercial purposes. Just as PayPal facilitates the exchange of money on the Internet, so RightsAgent aspires to be a paid intermediary for facilitating the sale of user-generated content. +={Rouhana, Rudy;RightsAgent} + +The rise of CC+ and associated companies brings to mind Niva Elkin-Koren’s warning that the Creative Commons licenses can be a slippery slope that merely promotes a property-oriented, transactional mentality — the opposite of the commons. On the other hand, many people operating in the noncommercial sharing economy, such as musicians and photographers, have long complained that, as much as they enjoy participating in the commons, they still need to earn a livelihood. +={Elkin-Koren, Niva;Creative Commons (CC) licenses:critics of} + +Revver is another company that has developed an ingenious way to promote the sharing of content, yet still monetize it based on the scale of its circulation. Revver is a Los Angeles–based startup that hosts user-generated video. All videos are embedded with a special tracking tag that displays an ad at the end. Like Google’s AdWords system, which charges advertisers for user “click-throughs” on ad links adjacent to Web content, Revver charges advertisers for every time a viewer clicks on an ad. The number of ad views can be tabulated, and Revver splits ad revenues 50-50 with video creators. Key to the whole business model is the use of the CC AttributionNonCommercial-No Derivatives license. The license allows the videos to be legally shared, but prohibits anyone from modifying them or using them for commercial purposes. +={Revver+2;Google;videos and film+2;Internet:videos and films on+2;World Wide Web:videos and film on+2} + +One of the most-viewed videos on Revver sparked a minor pop trend. It showed kids dropping Mentos candies into bottles of CocaCola, which produces an explosive chemical reaction. The video is said to have generated around $30,000.~{ Revver entry at Wikipedia, at http://en.wikipedia.org/wiki/Revver. }~ So is new media going to feature silly cat videos and stupid stunts? Steven Starr, a co-founder of Revver, concedes the ubiquity of such videos, but is quick to point to “budding auteurs like Goodnight Burbank, Happy Slip, Studio8 and LoadingReadyRun, all building audiences.” He also notes that online, creators “can take incredible risks with format and genre, can grow their own audience at a fraction of network costs, can enjoy free syndication, hosting, audience-building and ad services at their disposal.”~{ Interview with Steven Starr, “Is Web TV a Threat to TV?” Wall Street Journal, August 7, 2007, at http://online.wsj.com/article/SB118530221391976425 .html. }~ +={Starr, Steven} + +Blip.tv is another video content-sharing Web site that splits ad revenues with video creators (although it is not automatic; users must “opt in”). Unlike many videos on YouTube and Revver, blip.tv tends to feature more professional-quality productions and serialized episodes, in part because its founders grew out of the “videoblogging” community. Blip.tv espouses an open business ethic, with shout-outs to “democratization, openness, and sustainability.” While there is a tradition for companies to spout their high-minded principles, blip.tv puts some bite into this claim by offering an open platform that supports many video formats and open metadata standards. And it allows content to be downloaded and shared on other sites. Users can also apply Creative Commons licenses to their videos, which can then be identified by CC-friendly search engines. For all these reasons, Lessig has singled out blip.tv as a “true sharing site,” in contrast to YouTube, which he calls a “faking sharing site” that “gives you tools to /{make}/ it seem as if there’s sharing, but in fact, all the tools drive traffic and control back to a single site.”~{ Lessig blog post, “The Ethics of Web 2.0,” October 20, 2006, at http:// www.lessig.org/blog/archives/003570.shtml. }~ +={blip.tv+1;YouTube+1;Web 2.0:open business, and+3;open business models:open networks and;Lessig, Lawrence:open business sites, and+4} + +Lessig’s blog post on blip.tv provoked a heated response from blogger Nicholas Carr, a former executive editor of the /{Harvard Business Review}/. The contretemps is worth a close look because it illuminates the tensions between Web 2.0 as a business platform and Web 2.0 as a commons platform. In castigating YouTube as a “fake sharing site,” Carr accused Lessig of sounding like Chairman Mao trying to root out counterrevolutionary forces (that is, capitalism) with “the ideology of digital communalism.” +={Carr, Nicholas+2;Web 2.0:commons platform, as+3} + +_1 Like Mao, Lessig and his comrades are not only on the wrong side of human nature and the wrong side of culture; they’re also on the wrong side of history. They fooled themselves into believing that Web 2.0 was introducing a new economic system — a system of “social production” — that would serve as the foundation of a democratic, utopian model of culture creation. They were wrong. Web 2.0’s economic system has turned out to be, in effect if not intent, a system of exploitation rather than a system of emancipation. By putting the means of production into the hands of the masses but withholding from those same masses any ownership over the product of their work, Web 2.0 provides an incredibly efficient mechanism to harvest the economic value of the free labor provided by the very, very many and concentrate it into the hands of the very, very few. + +_1 The Cultural Revolution is over. It ended before it even began. The victors are the counterrevolutionaries. And they have $1.65 billion [a reference to the sale price of YouTube to Google] to prove it.~{ Nicholas G. Carr, “Web 2.0lier than Thou,” Rough Type blog, October 23, 2006. Joichi Ito has a thoughtful response in his blog, “Is YouTube Web 2.0?” October 22, 2006, at http://joi.ito.com/archives/2006/10/22/is_youtube _web_20.html; and Lessig responded to Carr in his blog, at http://lessig .org/blog/2006/10/stuck_in_the_20th_century_or_t.html. The “communism discourse” persists, and not just among critics of free culture. Lawrence Liang of CC India used this epigraph in a book on open-content licenses: “There is a specter haunting cultural production, the specter of open content licensing.” which he attributes to “Karl Marx (reworked for the digital era).” From Liang, /{Guide to Open Content Licenses}/ (Rotterdam, Netherlands: Piet Zwart Institute, Institute for Postgraduate Studies and Research, Willem de Kooning Academy Hogeschool, 2004). }~ + +Lessig’s response, a warm-up for a new book, /{Remix}/, released in late 2008, pointed out that there are really /{three}/ different economies on the Internet — commercial, sharing, and hybrid. The hybrid economy now emerging is difficult to understand, he suggested, because it “neither gives away everything, nor does it keep everything.” The challenge of open business models, Lessig argues, is to discover the “golden mean.” +={Lessig, Lawrence:Remix;Internet:hybrid economy enabled by+1|sharing economy of+1|commercial economy of+1} + +It can be hard to conceptualize a “hybrid sector” when we are accustomed to dividing the world into “private” and “public” sectors, and “profit-making” and “nonprofit” enterprises. Open business models quickly run up against deep-seated prejudices that associate property with “freedom” and sharing with “communism.” How can there be a middle ground? Although some like Nicholas Carr seem to hanker for the predatory enterprises of an earlier capitalism, only this time on Web 2.0 platforms, that is not likely to happen in a world of distributed computing. Power is too dispersed for predators to survive very long, and besides, the commoners are too empowered. + +!{/{ Build a market on a commons.}/}! A number of online business models are based on building communities of deep social affection and respect, and then using the community as a platform for selling merchandise, advertising, or products. Interestingly, some of the most successful “customer relationship” models revolve around music. The Grateful Dead’s strategy of building a business around a rabid fan base (discussed in chapter 6) occurred well before the Internet became prevalent. It is paradigmatic of the digital age, nonetheless. If the band had locked up its music and prohibited free taping of its concert performances and sharing of homemade tapes, it would have effectively weakened the fan base that sustained its business model. Sharing concert tapes actually made Deadheads more inclined to buy t-shirts, official music releases, and concert tickets because the tape sharing deepened the community’s identity and quasi-spiritual ethic. The Grateful Dead’s focus on touring as opposed to studio albums not only intensified the sharing ethic of its fan base, it obliged the band to “keep on truckin’ ” in order to keep earning money. +={commons:building a market on+11;open business models:building a market on a commons+11;communities:commons, and;Grateful Dead;music:market building on a commons+11} + +The Brazilian /{tecnobrega}/ music scene discussed briefly in chapter 7 is another example of artists making money through respectful, in-person relationships with their fans. In the town of Belém, Brazil, /{tecnobrega}/ artists release about four hundred CDs every year, but none are sold in stores; street vendors sell them for $1.50 apiece. The CDs function mostly as advertising for live “sound system” parties on the outskirts of town that attract as many as five thousand people and use state-of-the-art audio technology. Immediately following the performances, some artists also sell a significant number of “instant CDs” that are of better quality (and more expensive) than those sold in the streets. (Interestingly, street sales do not compete with after-concert sales.) +={Brazil:tecnobrega music scene in+6} + +“In their live presentations, the tecnobrega DJ’s usually acknowledge the presence of people from various neighborhoods, and this acknowledgement is of great value to the audience, leading thousands of buy copies of the recorded live presentation,” said Ronaldo Lemos of CC Brazil, who has studied Brazil’s record industry.~{ Interview with Ronaldo Lemos, September 15, 2006. }~ The same basic model is also at work in other grassroots musical genres in Brazil, such as baile funk, which originated in the shantytowns of Rio de Janeiro. +={Lemos da Silva, Ronaldo+4} + +Artists make most of their money from these live performances, not from CDs, said Lemos. Bands earn an average of $1,100 per solo performance at these events, and $700 when playing with other bands — this, in a region where the average monthly income is $350. Altogether, Lemos estimates that the sound system parties as a business sector earn $1.5 million per month, on fixed assets of $8 million. + +“The band Calypso has been approached several times by traditional record labels,” said Lemos, “but they turned down all the offers. The reason is that they make more money by means of the existing business model. In an interview with the largest Brazilian newspaper, the singer of the band said, ‘We do not fight the pirates. We have become big /{because}/ of piracy, which has taken our music to cities where they would never have been.’ ” Calypso has sold more than 5 million albums in Brazil and is known for attracting as many as fifty thousand people to its concerts, Lemos said.~{ Ronaldo Lemos, “From Legal Commons to Social Commons: Developing Countries and the Cultural Industry in the 21st Century,” 2006, at http://www.icommons.org/resources/from-legal-commons-to-social-comm ons-brazil-and-the-cultural-industry-1. See Paula Martini post on iCommons blog, “Over the Top: The New (and Bigger) Cultural Industry in Brazil,” September 28, 2007, at http://www.icommons.org/articles/over-the-top-thenew-and-bigger-cultural-industry-in-brazil. }~ +={piracy} + +Another highly successful open business model in the Brazilian music scene is TramaVirtual, an open platform on which more than 15,000 musicians have uploaded some 35,000 albums. Fans can then download the music for free. While this does not sound like a promising business proposition, it makes a lot of sense in the context of Brazil’s music marketplace. Major record labels release a minuscule number of new Brazilian music CDs each year, and they sell for about $10 to $15.~{ Ibid. }~ Only the cultured elite can afford music CDs, and the native musical talent — which is plentiful in Brazil — has no place to go. With such a constricted marketplace, TramaVirtual has become hugely popular by showcasing new and interesting music. +={TramaVirtual+2} + +TramaVirtual’s artistic and social cachet — itself the product of open sharing in a commons — has enabled it to develop a highly respected brand identity. “By exploiting the trademark,” said Lemos, “Trama has been able to create parallel businesses that work with music, but not in the same way that a record label does.”~{ Interview with Ronaldo Lemos, November 6, 2006. }~ For instance, Trama created a business that sponsors free concerts at universities under its trademark sponsorship. It then sells marketing rights at the concerts to cosmetic makers and car companies. Musicians have gained wide public exposure through Trama, and then used that association to negotiate international record and marketing deals for themselves. CSS (Cansei de Ser Sexy) won a record contract with the American label Sub Pop, for example. + +For the past five years, a related business model for music on an international scale has been emerging in Luxembourg. In only three years, Jamendo has amassed a huge international following in much the same way as TramaVirtual — by attracting music fans to its open platform for free music sharing. (The name /{Jamendo}/ is a mix of the words /{jam}/ and /{crescendo}/.) The site is not a music retailer but a repository for free music — with a business model overlay to pay the bills. Jamendo’s purpose is not to maximize returns to shareholders, in other words, but to service musicians and fans in a self-sustaining way. It makes most of its money from “tip jar” donations from fans and from advertising on the Web pages and streamed music. Ad revenues are shared 50-50 with artists, and any donations are passed along to individual artists, minus a small transaction fee. +={Jamendo+4} + +The Jamendo community is sizable and growing. By 2008 it had more than 357,000 active members from around the world. Part of the draw is the catalog of more than 10,000 albums, all free. Unlike Magnatune, Jamendo does not select the artists that are featured on its site; everyone is welcome to upload his or her music. To help fans identify music they like, the site offers many sophisticated tools. There are some 60,000 member-written reviews, custom playlists, community ratings of albums, and “folksonomy” tags for albums and songs.~[* Folksonomies, a cross of /{taxonomy}/ and /{folk}/, are essentially user-generated tags attached to each song and album, which enables categories of music to emerge from the “bottom up,” as fans regard the music, rather than through top-down marketing categories.]~ Fans are /{urged}/ to download music through peerto-peer networks such as BitTorrent and eMule because it reduces Jamendo’s bandwidth expenses. +={Magnatune;music:“folksonomy” tags in} + +“Users can listen, download, review, remix, and ‘widgetize,’” said Sylvain Zimmer, the founder and chief technology officer of Jamendo. As part of its commitment to musicians, the site has a forum for artists and listings of concerts, as well as open APIs~[* An API is an “application programming interface,” a set of protocols that enable a software application to operate on a computer operating system, library, or service. Many companies use proprietary APIs to retain control over who may develop applications that will interoperate with their software. Other companies that wish to encourage development of compatible applications— and thus promote a software ecosystem entwined with the operating system or service — use open APIs.]~ so the Jamendo ecosystem can be integrated into other software. +={Zimmer, Sylvain+2;APIs (application programming interfaces)} + +What’s striking about Jamendo is its nonchalant international feel, as if it were only natural to browse for “deathmetal,” “powerpop,” “hypnotique,” “ambient,” “psytrance,” and “jazzrock” on the same site. (These are just a few of the scores of folksonomy tags that can be used to browse the catalog.) “We are a Babel, not a label,” said Zimmer, who reports that India and Japan are heavy downloaders of Jamendo music. Complete, official versions of the site are available in French, the original language for the site, and now English and German. Incomplete versions of the site are available in Spanish, Polish, Portuguese, Russian, Turkish, Italian, Swedish, Czech, and Ukrainian. +={music:“folksonomy” tags in} + +Virtually all the albums on Jamendo use one or more of the six basic CC licenses. The CC ethic is a perfect match for the company’s community-driven business model, said Zimmer. “The best way of detecting CC-incompatible content and commercial uses of NC-licensed work is the community. The Creative Commons makes the community feel more confident and active.”~{ Sylvain Zimmer of Jamendo, presentation at iCommons Summit, Dubrovnik, Croatia, June 15, 2007. }~ He adds that if the site’s managers run too many ads, “the community will tell you.” + +2~ Commoners as Co-creators of Value +={open business models:value created in+13;value:creation of+13} + +For businesses operating on open networks, it is a mistake to regard people merely as customers; they are collaborators and even coinvestors. As more companies learn to interact closely with their customers, it is only natural that conversations about the product or service become more intimate and collaborative. The roles of the “consumer” and “producer” are starting to blur, leading to what some business analysts call the “prosumer”~{ Don Tapscott and Anthony D. Williams, /{Wikinomics: How Mass Collaboration Changes Everything}/ (New York Portfolio, 2006), chapter 5, “The Prosumers.” }~ and the “decentralized co-creation of value.”~{ David Bollier, /{The Rise of Collective Intelligence: Decentralized Co-creation of Value as a New Paradigm of Commerce and Culture}/ (Washington, DC: Aspen Institute Communications and Society Program, 2008).}~ The basic idea is that online social communities are becoming staging areas for the advancement of business objectives. Businesses see these communities as cost-effective ways to identify promising innovations, commercialize them more rapidly, tap into more reliable market intelligence, and nurture customer goodwill. + +Amateurs who share with one another through a loose social commons have always been a source of fresh ideas. Tech analyst Elliot Maxwell (citing Lessig) notes how volunteers helped compile the /{Oxford English Dictionary}/ by contributing examples of vernacular usage; how the Homebrew Computer Club in the San Francisco Bay area developed many elements of the first successful personal computer; and how sharing among auto enthusiasts helped generate many of the most important early automotive innovations.~{ Elliot Maxwell, “Open Standards, Open Source, and Open Innovation: Harnessing the Benefits of Openness,” /{Innovations:Technology, Governance, Globalization}/ 1, no. 3 (Summer 2006), at http://www.emaxwell.net, p. 150. }~ In our time, hackers were the ones who developed ingenious ways to use unlicensed electromagnetic spectrum as a commons, which we now know as Wi-Fi. They tinkered with the iPod to come up with podcasts, a new genre of broadcasting that commercial broadcasters now emulate.~{ Elliot E. Maxwell drew my attention to these examples in his excellent essay “Open Standards, Open Source, and Open Innovation.” }~ Numerous self-organized commons have incubated profitable businesses. Two movie buffs created the Internet Movie Database as separate Usenet newsgroups in 1989; six years later they had grown so large that they had merged and converted into a business that was later sold to Amazon.~{ Wikipedia entry, IMDB, at http://en.wikipedia.org/wiki/Internet_Movie _Database. }~ The Compact Disc Database was a free database of software applications that looks up information about audio CDs via the Internet. It was originally developed by a community of music fans as a shared database, but in 2000 it had grown big enough that it was sold and renamed Gracenote.~{ Wikipedia entry, CDDB, at http://en.wikipedia.org/wiki/CDDB. }~ +={Amazon;Gracenote;Homebrew Computer Club;iPod;Maxwell, Elliot;Oxford English Dictionary;Wi-Fi;hackers:community of;commons:sources of new ideas, as+11} + +A commons can be highly generative because its participants are tinkering and innovating for their own sake — for fun, to meet a challenge, to help someone out. Amateurs are not constrained by conventional business ideas about what may be marketable and profitable. They do not have to meet the investment expectations of venture capitalists and Wall Street. Yet once promising new ideas do surface in the commons, market players can play a useful role in supplying capital and management expertise to develop, improve, and commercialize an invention. + +Because online commons are such a rich source of new ideas, the most farsighted companies are trying to learn how they might be harnessed to help them innovate and compete more effectively. MIT professor Eric von Hippel is one of the foremost researchers of this process. His 2005 book /{Democratizing Innovation}/ describes how the leading participants in high-performance sports — extreme skiing, mountain biking, skateboarding, surfing, and hot-rodding — are forming “innovation communities” that work closely with manufacturers.~{ Eric von Hippel, /{Democratizing Innovation}/ (Cambridge, MA: MIT Press, 2005), available at http://mitpress.mit.edu/democratizing_innovation_pdf. }~ The most active practitioners of these sports are intimately familiar with the equipment and have their own imaginative ideas about what types of innovations the sport needs. Indeed, many of them have already jerry-rigged their own innovations — better cockpit ventilation in sailplanes, improved boot and bindings on snowboards, a method for cutting loose a trapped rope used by canyon climbers. For companies willing to listen to and collaborate with users, says von Hippel, “communities of interest are morphing into communities of creation and communities of production.” +={von Hippel, Eric+1} + +“Users that innovate can develop exactly what they want, rather than relying on manufacturers to act as their (often very imperfect) agents,” von Hippel writes. “Moreover, individuals users do not have to develop everything they need on their own: they can benefit from innovations developed and freely shared by others.”~{ Ibid., p. 1 }~ Besides finding empirical examples of this trend, von Hippel has developed a theoretical vocabulary for understanding how collaborative innovation occurs. He probes the user motivations for “free revealing” of their knowledge, the attractive economics that fuel “users’ low-cost innovation niches,” and the public policies that sometimes thwart user-driven innovation (patent rights for a field may be fragmented, anticopying restrictions such as the Digital Millennium Copyright Act may prevent user tinkering, etc.). +={Digital Millennium Copyright Act (DMCA) [1998]} + +User-driven innovation is not as esoteric as the “extreme sports” examples may suggest. It is, in fact, a growing paradigm. In one of the more celebrated examples, Lego, the Danish toymaker, invited some of its most fanatic users to help it redesign its Mindstorms robotics kit. The kits are meant to let kids (and adults) build a variety of customized robots out of a wild assortment of plastic Lego pieces, programmable software, sensors, and motors.~{ Tapscott and Williams, /{Wikinomics}/, pp. 130–31. }~ In 2004, when some Lego users reverse-engineered the robotic “brain” for the Mindstorms kit and put their findings on the Internet, Lego at first contemplated legal action. Upon reflection, however, Lego realized that hackers could be a valuable source of new ideas for making its forthcoming Mindstorms kit more interesting and cool. +={Lego+1;hackers:innovations by+1} + +Lego decided to write a “right to hack” provision into the Mindstorms software license, “giving hobbyists explicit permission to let their imaginations run wild,” as Brendan I. Koerner wrote in /{Wired}/ magazine. “Soon, dozens of Web sites were hosting thirdparty programs that help Mindstorms users build robots that Lego had never dreamed of: soda machines, blackjack dealers, even toilet scrubbers. Hardware mavens designed sensors that were far more sophisticated than the touch and light sensors included in the factory kit.”~{ Brendan I. Koerner, “Geeks in Toyland,” /{Wired}/, February 2006. }~ It turns out that not only are Lego fans happy to advise the company, the open process “engenders goodwill and creates a buzz among the zealots, a critical asset for products like Mindstorms that rely on word-of-mouth evangelism,” said Koerner. In the end, he concluded, the Mindstorm community of fanatics has done “far more to add value to Lego’s robotics kit than the company itself.” +={Koerner, Brendan I.;Internet:virtual word of mouth on} + +Another improbable success in distributed, user-driven innovation is Threadless, a Chicago-based t-shirt company. Threadless sells hundreds of original t-shirt designs, each of which is selected by the user community from among more than eight hundred designs submitted every week. The proposed designs are rated on a scale of one to five by the Web site’s more than 600,000 active users. Winners receive cash awards, recognition on the Web site, and their names on the t-shirt label. Every week, Threadless offers six to ten new t-shirts featuring the winning designs. +={Threadless+1} + +In 2006, the company sold more than 1.5 million t-shirts without any traditional kind of marketing. Its business model is so rooted in the user community that Threadless co-founders Jake Nickell and Jacob DeHart have declined offers to sell their t-shirts through conventional, big-name retailers. Threadless’s business model has helped it overcome two major challenges in the apparel industry, write Harvard Business School professor Karim R. Lakhani and consultant Jill A. Panetta — the ability “to attract the right design talent at the right time to create recurring fashion hits,” and the ability “to forecast sales so as to be better able to match production cycles with demand cycles.”~{ Karim R. Lakhani and Jill A. Panetta, “The Principles of Distributed Innovation,” Research Publication No. 2007-7, Berkman Center for Internet & Society, Harvard Law School, October 2007, at http://papers.ssrn.com/abstract _id=1021034. See also Darren Dahl, “Nice Threads,” /{Southwest Airlines Spirit}/, December 2006. }~ +={DeHart, Jacob;Nickell, Jake;Lakhani, Karim R.;Panetta, Jill A.} + +A number of companies have started successful enterprises based on the use of wikis, the open Web platforms that allow anyone to contribute and edit content and collaborate. Evan Prodromou, the founder of Wikitravel, a free set of worldwide travel guides, has identified four major types of wiki businesses: service providers who sell access to wikis (Wikispace, wetpaint, PBwiki); content hosters of wikis (wikiHow, Wikitravel, Wikia); consultants who advise companies how to run their own wikis (Socialtext); and content developers (WikiBiz, an offshoot of Wikipedia). +={Prodromou, Evan+1;wikis+1} + +Since the success of a wiki-based business depends upon honoring the integrity of wiki users, Prodromou scorns what he sees as the backhanded strategies of business models based on “wikinomics” and “crowdsourcing.” He sees such models as sly attempts to get “suckers” to do free work for the entrepreneur owning the business. A sustainable commercial wiki, said Prodromou at a conference, respects the community of users and does not try to exploit them. It strives to fulfill a “noble purpose” for users and demonstrate in a transparent way that it offers value. Any hint of trickery or calculation begins to sow distrust and erode the community. Yet any wiki-based business must be able to set boundaries that allow the owners to make responsible business decisions; those decisions, however, must respect the wiki community’s values.~{ Evan Prodromou presentation, “Commercialization of Wikis: Open Community that Pays the Bills,” South by Southwest Interactive conference, March 10, 2007. }~ + +It is hard to predict what new models of “decentralized cocreation of value” will take root and flourish, but the experiments are certainly proliferating. Staples, the office supplies store, now hosts a contest inviting the public to suggest inventions that Staples can develop and sell under the its brand name.~{ William J. Bulkeley, “Got a Better Letter Opener?” /{Wall Street Journal}/, July 13, 2006. }~ A number of massmarket advertisers have hosted competitions inviting users to create ads for their products. One of the more interesting frontiers in userdriven innovation is tapping the audience for investment capital. +={Staples} + +SellaBand (“You are the record company”) is a Web site that invites bands to recruit five thousand “Believers” to invest $10 apiece in their favorite bands; upon reaching the $50,000 mark, a band can make a professional recording, which is then posted on the SellaBand site for free downloads. Bands and fans can split advertising revenues with SellaBand.~{ http://www.sellaband.com. }~ Robert Greenwald, the activist documentary filmmaker, used e-mail solicitations, social networks, and the blogosphere to ask ordinary citizens to help finance his 2006 film /{Iraq for Sale: The War Profiteers}/.~{ William Booth, “His Fans Greenlight the Project,” /{Washington Post}/, August 20, 2006. }~ +={Greenwald, Robert;SellaBand} + +2~ Reintegrating the Sharing and Commercial Economies + +If there is persistent skepticism about the very idea of open business models, from both business traditionalists focused on the bottom line and commoners committed to sharing, it is because the commons and the commercial economy seem to represent such divergent moral values and social orders. One depends upon reciprocal exchanges of monetary value, with the help of individual property rights and contracts; the other depends upon the informal social circulation of value, without individual property rights or quid pro quos. A market is impersonal, transactional, and oriented to a bottom line; a commons tends to be personal and social and oriented to continuous relationships, shared values, and identity. + +Yet, as the examples above show, the market and the commons interpenetrate each other, yin/yang style. Each “adds value” to the other in synergistic ways. Historically, this has always been true. Adam Smith, the author of /{The Wealth of Nations}/, was also the author of /{The Theory of Moral Sentiments}/, about the moral and social norms that undergird market activity. The market has always depended upon the hidden subsidies of the commons (folk stories, vernacular motifs, amateur creativity) to drive its engine of wealth creation. And the commons builds its sharing regimes amid the material wealth produced by the market (free software is developed on commercially produced computers). +={Smith, Adam:The Theory of Moral Sentiments} + +What has changed in recent years is our perceptions. The actual role of the commons in creative endeavors has become more culturally legible. For businesses to function well on Web 2.0 platforms, they must more consciously integrate social and market relationships in functional, sustainable ways. If the results sometimes seem novel, if not bizarre, it is partly because networking technologies are making us more aware that markets are not ahistorical, universal entities; they are rooted in social relationships. Open business models recognize this very elemental truth, and in this sense represent a grand gambit to go back to the future. +={open business models:open networks and;Web 2.0:open business, and} + +1~ 11 SCIENCE AS A COMMONS +={Science Commons+90} + +/{Web 2.0 tools, open access, and CC licenses are helping to accelerate scientific discovery.}/ + +It was one of those embarrassing episodes in science: Two sets of researchers published papers in a German organic chemistry journal, /{Angewandte Chemie}/, announcing that they had synthesized a strange new substance with “12-membered rings.” Then, as blogger and chemist Derek Lowe tells the story, “Professor Manfred Cristl of Wurzburg, who apparently knows his pyridinium chemistry pretty well, recognized this as an old way to make further pyridinium salts, not funky twelve-membered rings. He recounts how over the last couple of months he exchanged awkward emails with the two sets of authors, pointing out that they seem to have rediscovered a 100-year-old reaction. . . .”~{ Derek Lowe, “Neat! Wish It Were True!” /{In the Pipeline}/ [blog], November 29, 2007, at http://pipeline.corante.com. See also, Donna Wentworth, “Why We Need to Figure Out What We Already Know,” Science Commons blog, January 4, 2008, at http://sciencecommons.org/weblog/archives/2008/01/04/ why-we-need-to-figure-out-what-we-already-know. }~ +={Lowe, Derek} + +In the Internet age, people generally assume that these kinds of things can’t happen. All you have to do is run a Web search for “pyridinium,” right? But as scientists in every field are discovering, the existence of some shard of highly specialized knowledge does not necessarily mean that it can be located or understood. After all, a Google search for “pyridinium” turns up 393,000 results. And even peer reviewers for journals (who may have been partly at fault in this instance) have the same problem as any researcher: the unfathomable vastness of the scientific and technical literature makes it difficult to know what humankind has already discovered. + +Paradoxically, even though academic science played the central role in incubating the Internet (in conjunction with the military), it has not fared very well in developing it to advance research. Most search engines are too crude. Journal articles can be expensive and inaccessible. They do not link to relevant Web resources or invite reader comment. Nor do they contain metadata to facilitate computer-based searches, collaborative filtering, and text mining. Scientific databases are plentiful but often incompatible with one another, preventing researchers from exploring new lines of inquiry. Lab researchers who need to share physical specimens still have to shuffle papers through a bureaucratic maze and negotiate with lawyers, without the help of eBay- or Craigslist-like intermediaries. + +“The World Wide Web was designed in a scientific laboratory to facilitate access to scientific knowledge,” observed Duke law professor James Boyle in 2007. “In every other area of life — commercial, social networking, pornography — it has been a smashing success. But in the world of science itself? With the virtues of the open Web all around us, we have proceeded to build an endless set of walled gardens, something that looks a lot like Compuserv or Minitel and very little like a world wide web for science.”~{ James Boyle, “The Irony of a Web Without Science,” /{Financial Times}/, September 4, 2007, at http://www.ft.com/cms/s/2/39166e30-5a7f-11dc-9bcd0000779fd2ac.html. }~ +={Boyle, James:Science Commons, and;science:scientific knowledge+2} + +Therein lies a fascinating, complicated story. To be sure, various scientific bodies have made great progress in recent years in adapting the principles of free software, free culture, and Web 2.0 applications to their research. Open-access journals, institutional repositories, specialty wikis, new platforms for collaborative research, new metatagging systems: all are moving forward in different, fitful ways. Yet, for a field of inquiry that has long honored the ethic of sharing and “standing on the shoulders of giants,” academic science has lagged behind most other sectors. + +Part of the problem is the very nature of scientific knowledge. While the conventional Web works fairly well for simple kinds of commerce and social purposes, the Research Web for science requires a more fine-grained, deliberately crafted structure.~{ John Wilbanks, director of the Science Commons, introduced me to this term. }~ Science involves /{practices}/, after all; it is not just about information. The “wisdom of the crowds” is not good enough. Scientific knowledge tends to be significantly more specialized and structured than cultural information or product recommendations. The Web systems for organizing, manipulating, and accessing that knowledge, accordingly, need to be more hierarchical and structured, often in quite specific ways depending upon the discipline. A scientist cannot just type “signal transduction genes in pyramidal neurons” into a search engine; she needs to be able to locate specific genes and annotations of them. Data may be strewn across dozens of different data systems, and those are not likely to be interoperable. This means that technical standards need to be coordinated, or some metasystem developed to allow different data reservoirs to communicate with one another. A scientist must be able to use computers to browse and organize a vast literature. And so on. + +Much as scientists would like to build new types of Internet-based commons, they have quickly run up against a thicket of interrelated problems: overly broad copyright and patent limitations; access and usage restrictions by commercial journal publishers and database owners; and university rules that limit how cell lines, test animals, bioassays, and other research tools may be shared. In a sense, scientists and universities face a classic collective-action problem. Everyone would clearly be better off if a more efficient infrastructure and enlightened social ethic could be adopted — but few single players have the resources, incentive, or stature to buck the prevailing order. There is no critical mass for instigating a new platform for scientific inquiry and “knowledge management.” +={copyright law:property rights, and+1|science, in;property rights:copyright law, and+1;science:copyright and patent restrictions in+1} + +Like so many other sectors confronting the Great Value Shift, science in the late 1990s found itself caught in a riptide. The proprietarian ethic of copyright and patent law was intensifying (as we saw in chapter 2), spurring scientists and universities to claim private ownership in knowledge that was previously treated as a shared resource.~{ See, e.g., Jennifer Washburn, /{University Inc.: The Corporate Corruption of Higher Education}/ (New York: Basic Books, 2005); Derek Bok, /{Universities in the Marketplace: The Commercialization of Higher Education}/ (Princeton, NJ: Princeton University Press, 2003); Sheldon Krimsky, /{Science in the Private Interest: Has the Lure of Profits Corrupted Biomedical Research}/ (New York: Rowman & Littlefield, 2003); and Corynne McSherry, /{Who Owns Academic Work? Battling for Control of Intellectual Property}/ (Cambridge, MA: Harvard University Press, 2001). }~ Yet at the same time the Internet was demonstrating the remarkable power of open sharing and collaboration. Even as market players sought to turn data, genetic knowledge, and much else into private property rights, a growing number of scientists realized that the best ideals of science would be fulfilled by recommitting itself to its core values of openness and sharing. Open platforms could also strengthen the social relationships that are essential to so much scientific inquiry.~{ John Seely Brown and Paul Duguid, /{The Social Life of Information}/ (Cambridge, MA: Harvard Business School Pulishing, 2000). See also, e.g., Jane E. Fountain, “Social Capital: Its Relationship to Innovation in Science and Technology,” /{Science and Public Policy}/ 25, no. 2 (April 1998), pp. 103–15. }~ +={Great Value Shift} + +Perhaps the most salient example of the power of open science was the Human Genome Project (HGP), a publicly funded research project to map the 3 billion base pairs of the human genome. Many other scientific projects have been attracted by the stunning efficacy and efficiency of the open research model. For example, the HapMap project is a government-supported research effort to map variations in the human genome that occur in certain clusters, or haplotypes. There is also the SNP Consortium, a public-private partnership seeking to identify single-nucleotide polymorphisms (SNPs) that may be used to identify genetic sources of disease. Both projects use licenses that put the genomic data into the public domain. +={Human Genome Project (HGP);science:Human Genome Project} + +A 2008 report by the Committee for Economic Development identified a number of other notable open research projects.~{ Committee for Economic Development, /{Harnessing Openness to Transform American Health Care}/ (Washington, DC: CED, 2008). }~ There is the PubChem database, which amasses data on chemical genomics from a network of researchers; the Cancer Biomedical Informatics Grid, a network of several dozen cancer research centers and other organizations that shares data, research tools, and software applications; and TDR Targets a Web clearinghouse sponsored by the World Health Organization that lets researchers share genetic data on neglected diseases such as malaria and sleeping sickness. It is telling that Bill Gates, who in his commercial life is a staunch advocate of proprietary control of information, has been a leader, through his Bill & Melinda Gates Foundation, in requiring research grantees to share their data. +={Gates, Bill} + +There has even been the emergence of open-source biotechnology, which is applying the principles of free software development to agricultural biotech and pharmaceutical development.~{ See, e.g., Rockefeller Foundation, “2005 Bellagio Meeting on Open Source Models of Collaborative Innovation in the Life Sciences” [report], Bellagio, Italy, September 2005. See also Janet Elizabeth Hope, “Open Source Biotechnology,” Ph.D. diss., Australian National University, December 2004. }~ Richard Jefferson, the founder of Cambia, a nonprofit research institute in Australia, launched the “kernel” of what he calls the first opensource biotech toolkit. It includes patented technologies such as TransBacter, which is a method for transferring genes to plants, and GUSPlus, which is a tool for visualizing genes and understanding their functions.~{ Interview with Richard Jefferson, September 7, 2006. See also http://www .cambia.org. }~ By licensing these patented research tools for open use, Jefferson hopes to enable researchers anywhere in the world— not just at large biotech companies or universities — to develop their own crop improvement technologies. +={Jefferson, Richard} + +2~ The Viral Spiral in Science + +Sociologist Robert Merton is often credited with identifying the social values and norms that make science such a creative, productive enterprise. In a notable 1942 essay, Merton described scientific knowledge as “common property” that depends critically upon an open, ethical, peer-driven process.~{ Robert Merton, “Science and Democratic Social Structure,” in /{Social Theory and Social Structure}/, 3d ed. (New York: Free Press, 1968), pp. 604–15. }~ Science is an engine of discovery precisely because research is available for all to see and replicate. It has historically tried to keep some distance from the marketplace for fear that corporate copyrights, patents, or contractual agreements will lock up knowledge that should be available to everyone, especially future scientists.~{ Richard R. Nelson, “The Market Economy and the Scientific Commons,” /{Research Policy}/ 33, no. 3 (April 2004), pp. 455–71. See also Karim R. Lakhani et al., “The Value of Openness in Scientific Problem Solving,” Harvard Business School Working Paper 07-050, January 2007, at http://www.hbs.edu/ research/pdf/07-050.pdf. }~ Secrecy can also make it difficult for the scientific community to verify research results. +={Merton, Robert;science:cientific knowledge+2} + +Although scientific knowledge eventually becomes publicly available, it usually flows in semi-restricted ways, at least initially, because scientists usually like to claim personal credit for their discoveries. They may refuse to share their latest research lest a rival team of scientists gain a competitive advantage. They may wish to claim patent rights in their discoveries. + +So scientific knowledge is not born into the public sphere, but there is a strong presumption that it ought to be treated as a shared resource as quickly as possible. As law scholar Robert Merges noted in 1996, “Science is not so much given freely to the public as shared under a largely implicit code of conduct among a more or less well identified circle of similarly situated scientists. In other words . . . science is more like a limited-access commons than a truly open public domain.”~{ Robert Merges, “Property Rights Theory and the Commons: The Case of Scientific Research,” /{Social Philosophy and Policy}/ 13, no. 2 (Summer 1996), pp. 145–61. }~ In certain disciplines, especially those involving large capital equipment such as telescopes and particle accelerators, the sharing of research is regarded as a kind of membership rule for belonging to a club. +={Merges, Robert} + +As Web 2.0 innovations have demonstrated the power of the Great Value Shift, the convergence of open source, open access, and open science has steadily gained momentum.~{ John Willinsky, “The Unacknowledged Convergence of Open Source, Open Access and Open Science,” /{First Monday}/ 10, no. 8 (August 2005), at http:// firstmonday.org/issues/issue10_8/willinsky/index.html. }~ Creative Commons was mindful of this convergence from its beginnings, but it faced formidable practical challenges in doing anything about it. “From the very first meetings of Creative Commons,” recalled law professor James Boyle, a CC board member, “we thought that science could be the killer app. We thought that science could be the place where Creative Commons could really make a difference, save lives, and have a dramatic impact on the world. There is massive, unnecessary friction in science and we think we can deal with it. Plus, there’s the Mertonian ideal of science, with which Creative Commons couldn’t fit more perfectly.”~{ Interview with James Boyle, August 15, 2006. }~ +={Merton, Robert;Boyle, James:Science Commons, and+1;Great Value Shift;Web 2.0:Great Value Shift, and} + +But despite its early interest in making the Web more research-friendly, Creative Commons realized that science is a special culture unto itself, one that has so many major players and niche variations that it would be foolhardy for an upstart nonprofit to try to engage with it. So in 2002 Creative Commons shelved its ambitions to grapple with science as a commons, and focused instead on artistic and cultural sectors. By January 2005, however, the success of the CC licenses emboldened the organization to revisit its initial idea. As a result of deep personal engagement by several Creative Commons board members — computer scientist Hal Abelson, law professors James Boyle and Michael Carroll, and film producer Eric Saltzman — Creative Commons decided to launch a spin-off project, Science Commons. The new initiative would work closely with scientific disciplines and organizations to try to build what it now calls “the Research Web.” +={Abelson, Hall:CC board, on;Carroll, Michael W.;Saltzman, Eric;Science Commons:CC Commons spinoff, and+5} + +Science Commons aims to redesign the “information space” — the technologies, legal rules, institutional practices, and social norms — so that researchers can more easily share their articles, datasets, and other resources. The idea is to reimagine and reinvent the “cognitive infrastructures” that are so critical to scientific inquiry. Dismayed by the pressures exerted by commercial journal publishers, open-access publishing advocate Jean-Claude Guédon has called on librarians to become “epistemological engineers.”~{ Jean-Claude Guédon, “In Oldenburg’s Long Shadow: Librarians, Research Scientists, Publishers and the Control of Scientific Publishing,” at http:// www.arl.org/resources/pubs/mmproceedings/138guedon.shtml. }~ They need to design better systems (technical, institutional, legal, and social) for identifying, organizing, and using knowledge. The payoff? Speedier research and greater scientific discovery and innovation. It turns out that every scientific discipline has its own special set of impediments to address. The recurring problem is massive, unnecessary transaction costs. There is an enormous waste of time, expense, bureaucracy, and logistics in acquiring journal articles, datasets, presentations, and physical specimens. +={Science Commons:libraries, and+5;science:transaction costs in+1;transaction costs:in science+1;libraries:Science Commons, and} + +If transaction costs could be overcome, scientists could vastly accelerate their research cycles. They could seek answers in unfamiliar bodies of research literature. They could avoid duplicating other people’s flawed research strategies. They could formulate more imaginative hypotheses and test them more rapidly. They could benefit from a broader, more robust conversation (as in free software — “with enough eyes, all bugs are shallow”) and use computer networks to augment and accelerate the entire scientific process. + +That is the vision of open science that Science Commons wanted to address in 2005. It recognized that science is a large, sprawling world of many institutional stakeholders controlling vast sums of money driving incommensurate agendas. In such a milieu, it is not easy to redesign some of the most basic processes and norms for conducting research. Science Commons nonetheless believed it could play a constructive role as a catalyst. + +It was fortunate to have some deep expertise not just from its board members, but from two Nobel Prize winners on its scientific advisory panel (Sir John Sulston and Joshua Lederberg) and several noted scholars (patent scholar Arti Rai, innovation economist Paul David, and open-access publishing expert Michael B. Eisen). The director of Science Commons, John Wilbanks, brought a rare mix of talents and connections. He was once a software engineer at the World Wide Web Consortium, specializing in the Semantic Web; he had founded and run a company dealing in bioinformatics and artificial intelligence; he had worked for a member of Congress; and he was formerly assistant director of the Berkman Center at Harvard Law School. +={David, Paul;Eisen, Michael B.;Lederberg, Joshua;Rai, Arti;Sulston, Sir John;Wilbanks, John+1} + +After obtaining free office space at MIT, Wilbanks set off to instigate change within the scientific world — and then get out of the way. “We’re designing Science Commons to outstrip ourselves,” Wilbanks told me. “We don’t want to control any of this; we’re designing it to be decentralized. If we try to control it, we’ll fail.” + +With a staff of seven and a budget of only $800,000 in 2008, Science Commons is not an ocean liner like the National Academy of Science and the National Science Foundation; it’s more of a tug-boat. Its strategic interventions try to nudge the big players into new trajectories. It is unencumbered by bureaucracy and entrenched stakeholders, yet it has the expertise, via Creative Commons, to develop standard licensing agreements for disparate communities. It knows how to craft legal solutions that can work with technology and be understood by nonlawyers. + +In 2006, Science Commons embarked upon three “proof of concept” projects that it hopes will be models for other scientific fields. The first initiative, the Scholar’s Copyright Project, aspires to give scientists the “freedom to archive and reuse scholarly works on the Internet.” It is also seeking to make the vast quantities of data on computerized databases more accessible and interoperable, as a way to advance scientific discovery and innovation. +={Scholar’s Copyright Project;Science Commons:Scholar’s Copyright Project, and} + +A second project, the Neurocommons, is a bold experiment that aims to use the Semantic Web to make a sprawling body of neurological research on the Web more accessible. The project is developing a new kind of Internet platform so that researchers will be able to do sophisticated searches of neuroscience-related journal articles and explore datasets across multiple databases. +={Neurocommons;Science Commons:Neurocommons, and the} + +Finally, Science Commons is trying to make it cheaper and easier for researchers to share physical materials such as genes, proteins, chemicals, tissues, model animals, and reagents, which is currently a cumbersome process. The Biological Materials Transfer Project resembles an attempt to convert the pony express into a kind of Federal Express, so that researchers can use an integrated electronic data system to obtain lab materials with a minimum of legal complications and logistical delays. +={Biological Materials Transfer Project} + +In many instances, Science Commons has been a newcomer to reform initiatives already under way to build open repositories of scientific literature or data. One of the most significant is the openaccess publishing movement, which has been a diverse, flourishing effort in academic circles since the 1990s. It is useful to review the history of the open access (OA) movement because it has been an important pacesetter and inspiration for the open-science ethic. +={education:open access movement;open access (OA) movement+22;Science Commons:open access movement, and} + +2~ The Open-Access Movement +={open access (OA) movement+19;Science Commons:open access movement, and+19} + +The open-access movement has a fairly simple goal: to get the scientific record online and available to everyone. It regards this task as one of the most fundamental challenges in science. Open-access publishing generally consists of two modes of digital access — openaccess archives (or “repositories”) and open-access journals. In both instances, the publisher or host institution pays the upfront costs of putting material on the Web so that Internet users can access the literature at no charge.~[* “Open access” can be a confusing term. In the context of a rivalrous, depletable natural resource like timber or grazing land, an open-access regime means that anyone can use and appropriate the resource, resulting in its overexploitation and ruin. An /{open-access regime}/ is not the same as a /{commons}/, however, because a commons does have rules, boundaries, sanctions against free riders, etc., to govern the resource. However, in the context of an infinite, nonrivalrous resource like information, which can be copied and distributed at virtually no cost, an open-access regime does not result in overexploitation of the resource. For this reason, open access in an Internet context is often conflated with the commons — even though “open access,” in a natural resource context, tends to produce very different outcomes.]~ + +The appeal of OA publishing stems from the Great Value Shift described in chapter 5. “OA owes its origin and part of its deep appeal to the fact that publishing to the Internet permits both wider dissemination and lower costs than any previous form of publishing,” writes Peter Suber, author of /{Open Access News}/ and a leading champion of OA.~{ http://www.earlham.edu/~peters/fos/fosblog.html. }~ “The revolutionary conjunction is too good to pass up. But even lower costs must be recovered if OA is to be sustainable.” In most cases, publishing costs are met by scientific and academic institutions and/or by subsidies folded into research grants. Sometimes an OA journal will defray its publishing costs by charging authors (or their grant funders) a processing fee for articles that they accept. +={Great Value Shift;Suber, Peter} + +Just as free software and music downloads have disrupted their respective industries, so OA publishing has not been a welcome development among large academic publishers such as Elsevier, Springer, Kluwer, and Wiley. Online publishing usually costs much less than traditional print publishing and it allows authors to retain control over their copyrights. Both of these are a big incentive for disciplines and universities to start up their own OA journals. In addition, OA publishing makes it easier for research to circulate, and for authors to reach larger readerships. This not only augments the practical goals of science, it bolsters the reputation system and open ethic that science depends upon. + +Commercial publishers have historically emphasized their shared interests with scholars and scientists, and the system was amicable and symbiotic. Academics would produce new work, validate its quality through peer review, and then, in most cases, give the work to publishers at no charge. Publishers shouldered the expense of editorial production, distribution, and marketing and reaped the bulk of revenues generated. The arrangement worked fairly well for everyone until journal prices began to rise in the early 1970s. Then, as subscription rates continued to soar, placing unbearable burdens on university libraries in the 1990s, the Internet facilitated an extremely attractive alternative: open-access journals. Suddenly, conventional business models for scholarly publishing had a serious rival, one that shifts the balance of power back to scientists and their professional communities. + +Publishers have long insisted upon acquiring the copyright of journal articles and treating them as “works for hire.” This transfer of ownership enables the publisher, not the author, to determine how a work may circulate. Access to an article can then be limited by the subscription price for a journal, the licensing fees for online access, and pay-per-view fees for viewing an individual article. Publishers may also limit the reuse, republication, and general circulation of an article by charging high subscription or licensing fees, or by using digital rights management. If a university cannot afford the journal, or if a scholar cannot afford to buy individual articles, research into a given topic is effectively stymied. + +Open-access champion John Willinsky notes, “The publishing economy of scholarly journals is dominated by a rather perverse property relation, in which the last investor in the research production chain — consisting of university, researcher, funding agency and /{publisher}/ — owns the resulting work outright through a very small investment in relation to the work’s overall cost and value.”~{ Willinsky, “The Unacknowledged Convergence.” }~ Scientists and scholars virtually never earn money from their journal articles, and only occasionally from their books. Unlike commercial writers, this is no problem for academics, whose salaries are intended to free them to study all sorts of niche interests despite the lack of “market demand.” Their works are not so much “intellectual property” that must yield maximum revenues as “royaltyfree literature,” as Peter Suber calls it. Academics write and publish to contribute to their fields and enhance their standing among their peers. +={Suber, Peter;Willinsky, John} + +Not surprisingly, many commercial publishers regard OA publishing as a disruptive threat. It can, after all, subvert existing revenue models for scholarly publishing. This does not mean that OA publishing cannot support a viable business model. Much of OA publishing is sustained through “author-side payments” to publishers. In certain fields that are funded by research grants, such as biomedicine, grant makers fold publishing payments into their grants so that the research can be made permanently available in open-access journals. A leading commercial publisher, BioMed Central, now publishes over 140 OA journals in this manner. Hindawi Publishing Corporation, based in Cairo, Egypt, publishes more than one hundred OA journals and turns a profit. And Medknow Publications, based in Mumbai, India, is also profitable as a publisher of more than forty OA journals. + +It remains an open question whether the OA business model will work in fields where little research is directly funded (and thus upfront payments are not easily made). As Suber reports, “There are hundreds of OA journals in the humanities, but very, very few of them charge a fee on the author’s side; most of them have institutional subsidies from a university say, or a learned society.”~{ Interview with Peter Suber, June 28, 2006. }~ Yet such subsidies, in the overall scheme of things, may be more attractive to universities or learned societies than paying high subscription fees for journals or online access. +={Suber, Peter+1} + +The tension between commercial publishers and academic authors has intensified over the past decade, fueling interest in OA alternatives. The most salient point of tension is the so-called “serials crisis.” From 1986 to 2006, libraries that belong to the Association of Research Libraries saw the cost of serial journals rise 321 percent, or about 7.5 percent a year for twenty consecutive years.~{ Association of Research Libraries, /{ARL Statistics}/ 2005–06, at http://www.arl .org/stats/annualsurveys/ar/stats/arlstats06.shtml. }~ This rate is four times higher than the inflation rate for those years. Some commercial journal publishers reap profits of nearly 40 percent a year.~{ Peter Suber, “Creating an Intellectual Commons through Open Access,” in Charlotte Hess and Elinor Ostrom, eds., /{Understanding Knowledge as a Commons: From Theory to Practice}/ (Cambridge, MA: MIT Press, 2007), p. 175. }~ By 2000 subscription rates were so crushing that the Association of American Universities and the Association of Research Libraries issued a joint statement that warned, “The current system of scholarly publishing has become too costly for the academic community to sustain.”~{ Association of Research Libraries, “Tempe Principles for Emerging Systems of Scholarly Publishing,” May 10, 2000, at http://www.arl.org/resources/pubs/ tempe/index.shtml. }~ Three years later, the high price of journals prompted Harvard, the University of California, Cornell, MIT, Duke, and other elite research universities to cancel hundreds of journal subscriptions — a conspicuous act of rebellion by the library community. +={libraries:“serials crisis”, and|Science Commons, and;Science Commons:libraries, and} + +As journal prices have risen, the appeal of OA publishing has only intensified. Unfortunately, migrating to OA journals is not simply an economic issue. Within academia, the reputation of a journal is deeply entwined with promotion and tenure decisions. A scientist who publishes an article in /{Cell}/ or /{Nature}/ earns far more prestige than she might for publishing in a little-known OA journal. + +So while publishing in OA journals may be economically attractive, it flouts the institutional traditions and social habits that scientists have come to rely on for evaluating scientific achievement. The OA movement’s challenge has been to document how OA models can help a university, and so it has collaborated with university administrators to showcase exemplary successes and work out new revenue models. It is urging promotion and tenure committees, for example, to modify their criteria to stop discriminating against new journals just because they are new, and hence to stop discriminating against OA journals (which are all new). Much of this work has fallen to key OA leaders like the Open Society Institute, the Hewlett Foundation, Mellon Foundation and the library-oriented SPARC (Scholarly Publishing and Academic Resources Coalition) as well as individuals such as John Willinsky, Jean-Claude Guédon, Stevan Harnad, and Peter Suber. +={Suber, Peter;Willinsky, John;Guédon, Jean-Claude;Harnad, Stevan} + +One of the first major salvos of the movement came in 2000, when biomedical scientists Harold E. Varmus, Patrick O. Brown, and Michael B. Eisen called on scientific publishers to make their literature available through free online public archives such as the U.S. National Library of Medicine’s PubMed Central. Despite garnering support from nearly 34,000 scientists in 180 countries, the measure did not stimulate the change sought. It did alert the scientific world, governments, and publishers about the virtues of OA publishing, however, and galvanized scientists to explore next steps. +={Brown, Patrick O.;Varmus, Harold E.} + +At the time, a number of free, online peer-reviewed journals and free online archives were under way.~{ http://www.earlham.edu/~peters/fos/timeline.htm. }~ But much of the momentum for organized OA movement began in 2001, when the Open Society Institute convened a group of leading librarians, scientists, and other academics in Hungary. In February 2002 the group released the Budapest Open Access Initiative, a statement that formally describes “open access” as the freedom of users to “read, download, copy, distribute, print, search or link to the full texts of . . . articles, crawl them for indexing, pass them as data to software, or use them for any other lawful purpose, without financial, legal or technical barriers other than those inseparable from gaining access to the Internet itself.”~{ The Budapest Open Access Initiative can be found at http://www.soros.org/ openaccess. }~ Two subsequent statements, the Bethesda Declaration and the Berlin Declaration, in June 2003 and October 2003, respectively, expanded upon the definitions of open access and gave the idea new prominence. (Suber calls the three documents the “BBB definition” of open access.)~{ http://www.earlham.edu/~peters/fos/overview.htm. }~ +={Suber, Peter;Budapest Open Access Initiative (2002);libraries:open access movement, and} + +Creative Commons licenses have been critical tools in the evolution of OA publishing because they enable scientists and scholars to authorize in advance the sharing, copying, and reuse of their work, compatible with the BBB definition. The Attribution (BY) and Attribution-Non-Commercial (BY-NC) licenses are frequently used; many OA advocates regard the Attribution license as the preferred choice. The protocols for “metadata harvesting” issued by the Open Archives Initiative are another useful set of tools in OA publishing. When adopted by an OA journal, these standardized protocols help users more easily find research materials without knowing in advance which archives they reside in, or what they contain. + +There is no question that OA is transforming the market for scholarly publishing, especially as pioneering models develop. The Public Library of Science announced its first two open-access journals in December 2002. The journals represented a bold, high-profile challenge by highly respected scientists to the subscription-based model that has long dominated scientific publishing. Although Elsevier and other publishers scoffed at the economic model, the project has expanded and now publishes seven OA journals, for biology, computational biology, genetics, pathogens, and neglected tropical diseases, among others. + +OA received another big boost in 2004 when the National Institutes for Health proposed that all NIH-funded research be made available for free one year after its publication in a commercial journal. The $28 billion that the NIH spends on research each year (more than the domestic budget of 142 nations!) results in about 65,000 peer-reviewed articles, or 178 every day. Unfortunately, commercial journal publishers succeeded in making the proposed OA policy voluntary. The battle continued in Congress, but it became clear that the voluntary approach was not working. Only 4 percent of researchers published their work under OA standards, largely because busy, working scientists did not consider it a priority and their publishers were not especially eager to help. So Congress in December 2007 required NIH to mandate open access for its research within a year of publication.~{ Peter Suber has an excellent account of the final OA legislation in /{SPARC Open Access Newsletter}/, no. 17, January 2, 2008, at http://www.earlham.edu/ ~peters/fos/newsletter/01-02-08.htm. }~ +={National Institutes for Health (NIH)} + +What may sound like an arcane policy battle in fact has serious implications for ordinary Americans. The breast cancer patient seeking the best peer-reviewed articles online, or the family of a person with Huntington’s disease, can clearly benefit if they can acquire, for free, the latest medical research. Scientists, journalists, health-care workers, physicians, patients, and many others cannot access the vast literature of publicly funded scientific knowledge because of high subscription rates or per-article fees. A freely available body of online literature is the best, most efficient way to help science generate more reliable answers, new discoveries, and commercial innovations. + +While large publishers continue to dominate the journal market, OA publishing has made significant advances in recent years. In June 2008, the Directory of Open Access Journals listed more than 3,400 open-access journals containing 188,803 articles. In some fields such as biology and bioinformatics, OA journals are among the top-cited journals. In fact, this is one of the great advantages of OA literature. In the networked environment, articles published in OA journals are more likely to be discovered by others and cited, which enhances the so-called impact of an article and the reputation of an author. + +Although journals may or may not choose to honor OA principles, any scientist, as the copyright holder of his articles, can choose to “self-archive” his work under open-access terms. But commercial publishers generally don’t like to cede certain rights, and authors usually don’t know what rights to ask for, how to assert them in legal language, and how to negotiate with publishers. So it is difficult for most academics to assert their real preferences for open access. To help make things simpler, SPARC and MIT developed what is called an “author’s addendum.” It is a standard legal contract that authors can attach to their publishing contracts, in which they reserve certain key rights to publish their works in OA-compliant ways. + +2~ The Scholar’s Copyright Project +={Scholar’s Copyright Project+18;Science Commons:Scholar’s Copyright Project, and+18} + +In an attempt to help the open-access movement, Science Commons in 2007 developed its own suite of amendments to publishing contracts. The goal has been to ensure that “at a minimum, scholarly authors retain enough rights to archive their work on the Web. Every Science Commons Addendum ensures the freedom to use scholarly articles for educational purposes, conference presentations, in other scholarly works or in professional activities.”~{ Science Commons brochure [undated]. }~ The ultimate goal is to enable authors “to have the clear and unambiguous freedom to engage in their normal everyday scholarly activities without contending with complex technology, continuous amendments to contracts or the need for a lawyer.”~{ Science Commons, “Scholar’s Copyright Project — Background Briefing,” at http://sciencecommons.org/literature/scholars_copyright.html. }~ +={open access (OA) movement} + +To make the whole process easier for scientists, Science Commons developed the Scholar’s Copyright Addendum Engine. This point-and-click Web-based tool lets authors publish in traditional, subscription-based journals while retaining their rights to post copies on the Internet for download, without most copyright and financial restrictions. There are also options for “drag and drop” self-archiving to repositories such as MIT’s DSpace and the National Library of Medicine’s PubMed Central. Besides making selfarchiving easier and more prevalent, Science Commons hopes to standardize the legal terms and procedures for self-archiving to avoid a proliferation of incompatible rights regimes and document formats. “The engine seems to be generating a dialogue between authors and publishers that never existed,” said John Wilbanks. “It’s not being rejected out of hand, which is really cool. To the extent that the addendum becomes a norm, it will start to open up the [contractual] limitations on self-archiving.”~{ Interview with John Wilbanks, November 19, 2007. }~ +={Wilbanks, John} + +Harvard University gave self-archiving a big boost in February 2008 when its faculty unanimously voted to require all faculty to distribute their scholarship through an online, open-access repository operated by the Harvard library unless a professor chooses to “opt out” and publish exclusively with a commercial journal. Robert Darnton, director of the Harvard library, said, “In place of a closed, privileged and costly system, [the open-access rule] will help open up the world of learning to everyone who wants to learn.”~{ Patricia Cohen, “At Harvard, a Proposal to Publish Free on the Web,” /{New York Times}/, February 12, 2008. See also Peter Suber’s coverage of the decision in Open Access News, at http://www.earlham.edu/~peters/fos/2008/02/moreon-imminent-oa-mandate-at-harvard.html, and subsequent days. }~ Harvard’s move was the first time that a university faculty, and not just the administration, initiated action to take greater control of its scholarly publishing. While some critics complain the new policy does not go far enough, most OA advocates hailed the decision as a major step toward developing alternative distribution models for academic scholarship. +={Darnton, Robert;Harvard University;open access (OA) movement} + +By far, the more ambitious aspect of the Scholar’s Copyright project is the attempt to free databases from a confusing tangle of copyright claims. In every imaginable field of science — from anthropology and marine biology to chemistry and genetics — databases are vital tools for organizing and manipulating vast collections of empirical data. The flood of data has vastly increased as computers have become ubiquitous research tools and as new technologies are deployed to generate entirely new sorts of digital data streams— measurements from remote sensors, data streams from space, and much more. But the incompatibility of databases — chiefly for technical and copyright reasons — is needlessly Balkanizing research to the detriment of scientific progress. “There is plenty of data out there,” says Richard Wallis of Talis, a company that has built a Semantic Web technology platform for open data, “but it is often trapped in silos or hidden behind logins, subscriptions or just plain difficult to get hold of.” He added that there is a lot of data that is “just out there,” but the terms of access may be dubious.~{ Donna Wentworth blog post, “Ensuring the freedom to integrate — why we need an ‘open data’ protocol,” Science Commons blog, December 20, 2007, at http://sciencecommons.org/weblog/archives/2007/12/20/ensuring-thefreedom-to-integrate. }~ +={Wallis, Richard;science:databases+14;Science Commons:ownership of data, and+14} + +Questions immediately arise: Can a database be legally used? Who owns it? Will the database continue to be accessible? Will access require payment later on? Since data now reside anywhere in the world, any potential user of data also has to consider the wide variations of copyright protection for databases around the world. + +The question of how data shall be owned, controlled, and shared is a profoundly perplexing one. History has shown the virtue of sharing scientific data — yet individual scientists, universities, and corporations frequently have their own interests in limiting how databases may be used. Scientists want to ensure the integrity of the data and any additions to it; they may want to ensure preferential access to key researchers; companies may consider the data a lucrative asset to be privately exploited. Indeed, if there is not some mechanism of control, database producers worry that free riders will simply appropriate useful compilations and perhaps sell it or use it for their own competitive advantage. Or they may fail to properly credit the scientists who compiled the data in the first place. Inadequate database protection could discourage people from creating new databases in the future. + +A National Research Council report in 1999 described the problem this way: “Currently many for-profit and not-for-profit database producers are concerned about the possibility that significant portions of their databases will be copied or used in substantial part by others to create ‘new’ derivative databases. If an identical or substantially similar database is then either re-disseminated broadly or sold and used in direct competition with the original rights holder’s database, the rights holder’s revenues will be undermined, or in extreme cases, the rights holder will be put out of business.”~{ National Research Council, /{A Question of Balance: Private Rights and the Public Interest in Scientific and Technical Databases}/ (Washington, DC: National Academy Press, 1999), p. 14. }~ + +In the late 1990s, when the Human Genome Project and a private company, Celera, were competing to map the human genome, the publicly funded researchers were eager to publish the genome sequencing data as quickly as possible in order to prevent Celera or any other company from claiming exclusive control over the information. They wanted the data to be treated as “the common heritage of humanity” so that it would remain openly accessible to everyone, including commercial researchers. When Sir John Sulston of the Human Genome Project broached the idea of putting his team’s research under a GPL-like license, it provoked objections that ownership of the data would set a worrisome precedent. A GPL for data amounts to a “reach-through” requirement on how data may be used in the future. This might not only imply that data can be owned — flouting the legal tradition that facts cannot be owned — it might discourage future data producers from depositing their data into public databases.~{ John Sulston and Georgina Ferry, /{The Common Threat: A Story of Science, Politics, Ethics and the Human Genome}/ (Washington, DC: Joseph Henry Press, 2002), pp. 212–13. }~ +={Human Genome Project (HGP);science:Human Genome Project;Sulston, Sir John;General Public License (GPL):“reach-through” requirement} + +The International HapMap Project attempted such a copyleft strategy with its database of genotypes; its goal is to compare the genetic sequences of different individuals to identify chromosomal regions where genetic variants are shared.~{ http://www.hapmap.org. }~ The project initially required users to register and agree to certain contract terms in order to use the database. One key term prohibited users from patenting any genetic information from the database or using patents to block usage of HapMap data.~{ Andrés Guadamuz González, “Open Science: Open Source Licenses in Scientific Research,” /{North Carolina Journal of Law & Technology}/ 7, no. 2 (Spring 2006), pp. 349–50. }~ This viral, open-content license for data seemed to provide a solution to the problem of how to keep data in the commons. But in time the HapMap Project found that its license inhibited people’s willingness to integrate their own data with the HapMap database. It therefore abandoned its license and now places all of its data into the public domain; it is now available to be used by anyone for any purpose, although it has issued guidelines for the “responsible use and publication” of the data.~{ http://www.hapmap.org/guidelines_hapmap_data.html.en. }~ +={International HapMap Project} + +The basic problem with applying copyright law to databases is how to draw the line between what is private property and what remains in the commons. “If you try to impose a Creative Commons license or free-software-style licensing regime on a database of uncopyrightable facts,” explained John Wilbanks, “you create an enormous amount of confusion in the user about where the rights start and stop.”~{ Interview with John Wilbanks, November 19, 2007. }~ It is not very practical for a working scientist to determine whether copyright protection applies only to the data itself, to the database model (the structure and organization of the data), or to the data entry and output sheet. A scientist might reasonably presume that his data are covered by copyright law, and then use that right to apply a CC ShareAlike license to the data. But in fact, the data could be ineligible for copyright protection and so the CC license would be misleading; other scientists could ignore its terms with impunity. At the other extreme, other scientists may be unwilling to share their data at all lest the data circulate with no controls whatsoever. Data are either overprotected or underprotected, but in either case there is great ambiguity and confusion. +={Wilbanks, John;copyright law:science, in+1;science:copyright and patent restrictions in+1;Creative Commons (CC) licenses:applied to databases} + +For two years, Science Commons wrestled with the challenge of applying the CC licenses to databases. Ultimately, the project came to the conclusion that “copyright licenses and contractual restrictions are simply the wrong tool, even if those licenses are used with the best of intentions.” There is just too much uncertainty about the scope and applicability of copyright — and thus questions about any licenses based on it. For example, it is not entirely clear what constitutes a “derivative work” in the context of databases. If one were to query hundreds of databases using the Semantic Web, would the federated results be considered a derivative work that requires copyright permissions from each database owner? There is also the problem of “attribution stacking,” in which a query made to multiple databases might require giving credit to scores of databases. Different CC licenses for different databases could also create legal incompatibilities among data. Data licensed under a CC ShareAlike license, for example, cannot be legally combined with data licensed under a different license. Segregating data into different “legal boxes” could turn out to impede, not advance, the freedom to integrate data on the Web. +={attribution stacking;copyright law:attribution stacking in databases, and|derivative works, on;derivative works} + +After meeting with a variety of experts in scientific databases, particularly in the life sciences, biodiversity, and geospatial research, the Science Commons came up with an ingenious solution to the gnarly difficulties. Instead of relying on either copyright law or licenses, Science Commons in late 2007 announced a new legal tool, CC0 (CC Zero), which creates a legal and technical platform for a scientific community to develop its own reputation system for sharing data. +={CC Zero (CC0)+6;Creative Commons (CC):CC0 (CC Zero), and+6;Science Commons:CC0 (CC Zero), and+6} + +CC0 is not a license but a set of protocols. The protocols require that a database producer waive all rights to the data based on intellectual property law — copyrights, patents, unfair competition claims, unfair infringement rights — a “quitclaim” that covers everything. Then it requires that the database producer affirmatively declare that it is not using contracts to encumber future uses of the data. Once a database is certified as complying with the protocols, as determined by Science Commons, it is entitled to use a Science Commons trademark, “Open Access Data,” and CC0 metadata. The trademark signals to other scientists that the database meets certain basic standards of interoperability, legal certainty, ease of use, and low transaction costs. The metadata is a functional software tool that enables different databases to share their data. + +“What we are doing,” said John Wilbanks, “is reconstructing, contractually, the public domain. The idea is that with any conforming implementation — any licensed database — you have complete freedom to integrate with anything else. It creates a zone of certainty for data integration.”~{ Ibid. }~ Unlike public-domain data, the databases that Science Commons certifies as meeting open-data protocols cannot be taken private or legally encumbered. To qualify to use the Open Access Data mark, databases must be interoperable with other databases licensed under the protocols. If someone falsely represents that his data are covered by the license, Science Commons could pursue a trademark infringement case. +={Wilbanks, John;public domain:reconstructing+4} + +To develop this scheme, Science Commons’s attorney Thinh Nguyen worked closely with Talis, a company that has built a Semantic Web technology platform for open data and developed its own open database license. Nguyen also worked with the company’s legal team, Jordan Hatcher and Charlotte Waelde, and with the Open Knowledge Foundation, which has developed the Open Knowledge Definition. +={Nguyen, Thinh;Hatcher, Jordan;Waelde, Charlotte;Open Knowledge Definition} + +The CC0 approach to data represents something of a breakthrough because it avoids rigid, prescriptive legal standards for a type of content (data) that is highly variable and governed by different community norms. CC0 abandons the vision of crafting a single, all-purpose copyright license or contract for thousands of different databases in different legal jurisdictions. Instead it tries to create a legal framework that can honor a range of variable social norms that converge on the public domain. Each research community can determine for itself how to meet the CC0 protocols, based on its own distinctive research needs and traditions. Different norms can agree to a equivalency of public-domain standards without any one discipline constraining the behaviors of another. +={public domain:social norms, and} + +The system is clever because it provides legal reliability without being overly prescriptive. It is simple to use but still able to accommodate complex variations among disciplines. And it has low transaction costs for both producers and users of data. Over time, the databases that comply with the CC0 protocols are likely to grow into a large universe of interoperable open data. +={science:transaction costs in;transaction costs:science, in} + +It is still too early to judge how well the CC0 program is working, but initial reactions have been positive. “The solution is at once obvious and radical,” said Glyn Moody, a British journalist who writes about open-source software. “It is this pragmatism, rooted in how science actually works, that makes the current protocol particularly important.” Deepak Singh, the co-founder of Bioscreencast, a free online video tutorial library for the scientific community, said, “I consider just the announcement to be a monumental moment.”~{ Moody and Singh quotations from Donna Wentworth, Science Commons blog post, December 20, 2007. }~ +={Moody, Glyn;Singh, Deepak} + +2~ The Neurocommons + +Every day there is so much new scientific literature generated that it would take a single person 106 years to read it all.~{ Brian Athey, University of Michigan, presentation at Commons of Science conference, National Academy of Science, Washington, DC, October 3, 2006. }~ In a single year, over twenty-four thousand peer-reviewed journals publish about 2.5 million research articles.~{ Stevan Harnad, “Maximizing Research Impact Through Institutional and National Open-Access Self-Archiving Mandates,” /{Electronics & Computer Science E-Prints Repository}/, May 2006, available at http://eprints.ecs.soron.ac.uk/ 12093/02/harnad-crisrey.pdf. }~ Our ability to generate content has far outstripped our ability to comprehend it. We are suffering from a cognitive overload — one that can only be addressed by using software and computer networks in innovative ways to organize, search, and access information. For many years, Sir Tim Berners-Lee, the celebrated inventor of the World Wide Web, and his colleagues at the World Wide Web Consortium (W3C), based at MIT, have been trying to solve the problem of information overload by developing a “new layer” of code for the Web. +={Berners-Lee, Tim;World Wide Web Consortium (W3C)} + +This visionary project, the so-called Semantic Web, aspires to develop a framework for integrating a variety of systems, so they can communicate with one another, machine to machine. The goal is to enable computers to identify and capture information from anywhere on the Web, and then organize the results in sophisticated and customized ways. “If you search for ‘signal transduction genes in parameter neurons,’ ” said John Wilbanks of Science Commons, “Google sucks. It will get you 190,000 Web pages.” The goal of the Semantic Web is to deliver a far more targeted and useful body of specialized information. +={Semantic Web+6;Science Commons:Semantic Web, and the+6;World Wide Web:Semantic Web+6;ilbanks, John} + +A key tool is the Unique Resource Identifier, or URI, which is analogous to the Unique Resource Locator, or URL, used by the Web. Affix a URI to any bit of information on the Web, and the Semantic Web will (so it is hoped) let you mix and match information tagged with that URI with countless other bits of information tagged with other URIs. It would not matter if the bit of information resides in a journal article, database, clinical image, statistical analysis, or video; the point is that the URI would identify a precise bit of information. By enabling cross-linking among different types of information, the idea is that scientists will be able to make all sorts of unexpected and serendipitous insights. +={Unique Resource Identifier (URI);Unique Resource Locator (URL)} + +For example, geneticists studying Huntington’s disease, a rare neurodegenerative disorder, and experts studying Alzheimer’s disease are both exploring many of the same genes and proteins of the brain. But because of the specialization of their disciplines, the chances are good that they read entirely different scientific journals and attend different conferences. There is no easy or systematic way for scientists in one specialty to explore the knowledge that has developed in another specialty. The Semantic Web could probably help. + +Unfortunately, for a grand dream that has been touted since the 1990s, very little has developed. The W3C has been embroiled in the design challenges of the Semantic Web for so long that many companies and computer experts now scoff at the whole idea of the Semantic Web. There have been too many arcane, inconclusive debates about computer syntax, ontology language, and philosophical design choices that no one is holding their breath anymore, waiting for the Semantic Web to arrive. (Wikipedia defines a computer ontology as “a data model that represents a set of concepts within a domain and the relationships between those concepts. It is used to reason about the objects within that domain.”) The vision of the Semantic Web may have the potential to revolutionize science, but few people have seen much practical value in it over the near term, and so it has garnered little support. +={World Wide Web Consortium (W3C)+1} + +Wilbanks, who once worked at the W3C, was frustrated by this state of affairs. Although he has long believed in the promise of the Semantic Web, he also realized that it is not enough to extol its virtues. One must demonstrate its practicality. “The way to herd cats is not to herd cats,” he said, citing a colleague, “but to put a bowl of cream on your back stoop and run like hell.” For Wilbanks, the bowl of cream is the Neurocommons knowledge base, a project that seeks to integrate a huge amount of neuroscientific research using Semantic Web protocols and is easy to use. +={Neurocommons+5;Science Commons:Neurocommons, and the+5;Wilbanks, John+4} + +“The way to overcome the inertia that the Semantic Web critics rightly point out, is not to sit down and argue about ontologies,” said Wilbanks. “It’s to release something that’s useful enough that it’s worth wiring your database into the commons system. If I want to get precise answers to complicated questions that might be found in my own database, among others, now I can do that. I simply have to wire it into the Neurocommons. You don’t need to come to some magical agreement about ontology; you just need to spend a couple of days converting your database to RDF [Resource Description Framework, a set of Semantic Web specifications], and then— boom! — I’ve got all of the other databases integrated with mine.” By getting the ball rolling, Science Commons is betting that enough neuroscience fields will integrate their literature to the Neurocommons protocols and make the new commons a lively, sustainable, and growing organism of knowledge. +={RDF (Resource Description Framework)} + +Using the “open wiring” of the Semantic Web, the Neurocommons has already integrated information from fifteen of the top twenty databases in the life sciences and neuroscience. The data have been reformatted to conform to Semantic Web protocols and the scientific literature, where possible, has been tagged so that it can be “text-mined” (searched for specific information via URI tags). “We have put all this stuff into a database that we give away,” said Wilbanks. “It’s already been mirrored in Ireland, and more mirrors are going up. It’s sort of like a ‘knowledge server,’ instead of a Web server.” +={Unique Resource Identifier (URI)} + +Commercial journal publishers already recognize the potential power of owning and controlling metadata in scientific literature and datasets. To leverage this control many are starting to make copyright claims in certain kinds of metadata, and to amend their contracts with libraries in order to limit how they may retrieve electronic information. “There is a lot at stake here,” says Villanova law professor Michael Carroll. “What Science Commons wants to do is make sure that metadata is an open resource.”~{ Interview with Michael Carroll, August 7, 2006. }~ +={Carroll, Michael W.;libraries:Science Commons, and;Science Commons:libraries, and} + +Wilbanks has high hopes that the Neurocommons project, by providing a useful demonstration of Semantic Web tools, will hasten the interoperability of specialized knowledge that is currently isolated from related fields. It comes down to how to motivate a convergence of knowledge. Instead of arguing about which discipline’s ontology of specialized knowledge is superior to another’s — and making little headway toward a consensus — Wilbanks has a strategy to build a knowledge tool that is useful. Period. His bet is that a useful “knowledge server” of integrated neuroscientific information will be a powerful incentive for adjacent disciplines to adapt their own literature and databases to be compatible. The point is to get the commons going — while allowing the freedom for it to evolve. Then, if people have disagreements or quibbles, they will be free to change the ontologies as they see fit. “The version [of the Neurocommons] that we are building is useful and it is free,” Wilbanks said. “That means that if you want to integrate with it, you can. It means that if you want to redo our work your way, you can— as long as you use the right technical formats. You can reuse all of our software.” + +The problem with a field like neuroscience, which has so many exploding frontiers, is that no single company or proprietary software platform can adequately manage the knowledge. The information is simply too copious and complex. Like so many other fields of knowledge that are large and complicated, it appears that only an open-source model can successfully curate the relevant information sources. A Web-based commons can be remarkably efficient, effective, and scalable. This has been the lesson of free and open-source software, wikis, and the Web itself. Although it is too early to tell how the Neurocommons project will evolve, the initial signs are promising. A number of foundations that support research for specific diseases — Alzheimer’s disease, Parkinson’s, autism, epilepsy, Huntington’s disease — have already expressed interest in the Neurocommons as a potential model for advancing research in their respective fields. + +2~ Open Physical Tools +={Science Commons:open physical tools in+10} + +Science is not just about text and data, of course. It also involves lots of tangible /{stuff}/ needed to conduct experiments. Typical materials include cell lines, monoclonal antibodies, reagents, animal models, synthetic materials, nano-materials, clones, laboratory equipment, and much else. Here, too, sharing and collaboration are important to the advance of science. But unlike digital bits, which are highly malleable, the physical materials needed for experiments have to be located, approved for use, and shipped. Therein lies another tale of high transaction costs impeding the progress of science. As Thinh Nguyen, counsel for Science Commons, describes the problem: +={Nguyen, Thinh+1} + +_1 The ability to locate materials based on their descriptions in journal articles is often limited by lack of sufficient information about origin and availability, and there is no standard citation for such materials. In addition, the process of legal negotiation that may follow can be lengthy and unpredictable. This can have important implications for science policy, especially when delays or inability to obtain research materials result in lost time, productivity and research opportunities.~{ Thinh Nguyen, “Science Commons: Material Transfer Agreement Project,” /{Innovations}/, Summer 2007, pp. 137–43, at http://www.mitpressjournals.org/ doi/pdf/10.1162/itgg.2007.2.3.137. }~ + +To the nonscientist, this transactional subculture is largely invisible. But to scientists whose lab work requires access to certain physical materials, the uncertainties, variations, and delays can be crippling. Normally, the transfer of materials from one scientist to another occurs through a Material Transfer Agreement, or MTA. The technology transfer office at one research university will grant, or not grant, an MTA so that a cell line or tissue specimen can be shipped to a researcher at another university. Typically, permission must be granted for the researcher to publish, disseminate, or use research results, and to license their use for commercialization. +={Material Transfer Agreements (MTAs)+7;science:Material Transfer Agreements (MTAs)+7} + +While certain types of transactions involve material that could conceivably generate high royalty revenues, a great many transactions are fairly low-value, routine transfers of material for basic research. Paradoxically, that can make it all the harder to obtain the material because consummating an MTA is not a high priority for the tech transfer office. In other cases, sharing the material is subject to special agreements whose terms are not known in advance. + +Corporations sometimes have MTAs with onerous terms that prevent academic researchers from using a reagent or research tool. Individual scientists sometimes balk at sharing a substance because of the time and effort needed to ship it. Or they may wish to prevent another scientist from being the first to publish research results. Whatever the motivation, MTAs can act as a serious impediment to verification of scientific findings. They can also prevent new types of exploratory research and innovation. + +Wilbanks describes the existing system as an inefficient, artisanal one that needs to becomes more of a streamlined industrial system. Just as Creative Commons sought to lower the transaction costs for sharing creative works, through the use of standard public licenses, so Science Commons is now trying to standardize the process for sharing research materials. The idea is to reduce the transaction costs and legal risks by, in Nguyen’s words, “creating a voluntary and scalable infrastructure for rights representation and contracting.”~{ Ibid. }~ Like the CC licenses, the Science Commons MTAs will consist of “three layers” of licenses — the standard legal agreement, the machine-readable metadata version, and the “humanreadable deed” that nonlawyers can understand. +={Wilbanks, John;Nguyen, Thinh;science:transaction costs in;transaction costs:science, in} + +There are already some successful systems in place for sharing research materials, most notably the Uniform Biological Material Transfer Agreement (UBMTA), which some 320 institutions have accepted, as well as a Simple Letter Agreement developed by the National Institutes of Health. The problem with these systems is that they cannot be used for transfers of materials between academic and for-profit researchers. In addition, there are many instances in which UBMTA signatories can opt out of the system to make modifications to the UBMTA on a case-by-case basis. +={National Institutes for Health (NIH);Uniform Biological Material Transfer Agreement (UBMTA)} + +To help standardize and streamline the whole system for sharing research materials, Science Commons is working with a consortium of ten research universities, the iBridge Network, to develop a prototype system. The hope is that by introducing metadata to the system, and linking that information to standard contracts and human-readable deeds, scientists will be able to acquire research materials much more rapidly by avoiding bureaucratic and legal hassles. Just as eBay, Amazon, and Federal Express use metadata to allow customers to track the status of their orders, so the Science Commons MTA project wants to develop a system that will allow searching, tracking, and indexing of specific shipments. It is also hoped that metadata links will be inserted into journal articles, enabling scientists to click on a given research material in order to determine the legal and logistical terms for obtaining the material. +={iBridge Network;Science Commons:iBridge Network, and} + +Wilbanks envisions a new market of third-party intermediaries to facilitate materials transfers: “There’s an emerging network of third parties — think of them as ‘biology greenhouses’ — who are funded to take in copies of research materials and manufacture them on demand — to grow a quantity and mail them out. What Science Commons is trying to do with the Materials Transfer Project is to put together a functional system where materials can go to greenhouses under standard contracts, with digital identifiers, so that the materials can be cross-linked into the digital information commons. Anytime you see a list of genes, for example, you will be able to right-click and see the stuff that’s available from the greenhouses under standard contract, and the cost of manufacture and delivery in order to access the tool. Research materials need to be available under a standard contract, discoverable with a digital identifier, and fulfillable by a third party. And there needs to be some sort of acknowledgment, like a citation system.” +={Wilbanks, John} + +At one level, it is ironic that one of the oldest commons-based communities, academic science, has taken so long to reengineer its digital infrastructure to take advantage of the Internet and open digital systems. Yet academic disciplines have always clung tightly to their special ways of knowing and organizing themselves. The arrival of the Internet has been disruptive to this tradition by blurring academic boundaries and inviting new types of cross-boundary research and conversation. If only to improve the conversation, more scientists are discovering the value of establishing working protocols to let the diverse tribes of science communicate with one another more easily. Now that the examples of networked collaboration are proliferating, demonstrating the enormous power that can be unleashed through sharing and openness, the momentum for change is only going to intensify. The resulting explosion of knowledge and innovation should be quite a spectacle. + +1~ 12 OPEN EDUCATION AND LEARNING +={education+44} + +/{Managing educational resources as a commons can make learning more affordable and exciting.}/ + +In the late 1990s, as Richard Baraniuk taught electrical engineering to undergraduates at Rice University, the furthest thing from his mind was revolutionizing learning. He just wanted to make digital signal processing a more palatable subject for his students. Baraniuk, an affable professor with a venturesome spirit, was frustrated that half of his undergraduate class would glaze over when he taught signal processing, perhaps because it involves a lot of math. But then he explained the social ramifications of signal processing — for wiretapping, the Internet, the airwaves, radar, and much more. Students got excited. +={Baraniuk, Richard+13;Rice University+13} + +“If I wanted to reach a broader class of people, outside of Rice University,” Baraniuk said, “that would be very difficult. The standard thing is to write your own book.” But he quickly realized that writing the 176th book ever written on signal processing (he counted) would not be very efficient or effective. It would take years to write, and then additional years to traverse the editorial, production, and distribution process. And even if the book were successful, it would reach only five thousand readers. Finally, it would be a static artifact, lacking the timeliness and interactivity of online dialogue. A book, Baraniuk ruefully observed, “redisconnects things.”~{ Interview with Richard Baraniuk, January 21, 2008. }~ + +As chance had it, Baraniuk’s research group at Rice was just discovering open-source software. “It was 1999, and we were moving all of our workstations to Linux,” he recalled. “It was just so robust and high-quality, even at that time, and it was being worked on by thousands of people.” Baraniuk remembers having an epiphany: “What if we took books and ‘chunked them apart,’ just like software? And what if we made the IP open so that the books would be free to re-use and remix in different ways?’” +={Linux:education, and} + +The vision was exciting, but the tools for realizing it were virtually nonexistent. The technologies for collaborative authoring and the legal licenses for sharing, not to mention the financing and outreach for the idea, would all have to be developed. Fortunately, the Rice University administration understood the huge potential and helped Baraniuk raise $1 million to put together a skunk works of colleagues to devise a suitable software architecture and nonprofit plan. A colleague, Don Johnson, dubbed the enterprise “Connexions.” +={Connexions+10;education:Connexions+10;Johnson, Don} + +The group made a number of choices that turned out to be remarkably shrewd. Instead of organizing teaching materials into a “course” or a “textbook,” for example, the Connexions planners decided to build an open ecosystem of shared knowledge. Just as the Web is “small pieces loosely joined,” as David Weinberger’s 2003 book put it, so Connexions decided that the best way to structure its educational content was as discrete modules (such as “signal processing”) that could be reused in any number of contexts. The planners also decided to build a system on the open Semantic Web format rather than a simple interlinking of PDF files. This choice meant that the system would not be tethered to a proprietary or static way of displaying information, but could adapt and scale in the networked environment. Modules of content could be more easily identified and used for many different purposes, in flexible ways. +={education:Semantic Web, and;Semantic Web;World Wide Web:Semantic Web;Weinberger, David} + +By the summer of 2000, the first version of Connexions went live with two Rice University courses, Fundamentals of Electronic Engineering and Introduction to Physical Electronics. The goal was to let anyone create educational materials and put them in the repository. Anyone could copy and customize material on the site, or mix it with new material in order to create new books and courses. Materials could even be used to make commercial products such as Web courses, CD-ROMs, and printed books. By the end of 2000, two hundred course modules were available on Connexions: a modest but promising start. + +It turned out to be an auspicious moment to launch an open platform for sharing. A wave of Web 2.0 applications and tools was just beginning to appear on the Internet. Innovators with the savvy to take advantage of open networks, in the style of free and open software, could amass huge participatory communities in very short order. For Connexions, the living proof was Kitty Schmidt-Jones, a private piano teacher from Champaign, Illinois. She discovered Connexions through her husband and posted a 276-page book on music theory to the site. “Kitty is not the kind of person who would be a music textbook author,” said Baraniuk, “but she thought that music education is important, and said, ‘I can do this, too!’ By 2007 /{Understanding Basic Music Theory}/ had been downloaded more than 7.5 million times from people around the world. A Connexions staffer attending a conference in Lithuania met an educator from Mongolia who lit up at the mention of Schmidt-Jones. “We use her work in our schools!” he said. +={education:Web 2.0 applications, and;Web 2.0: applications:education, and;Schmidt-Jones, Kitty} + +Besides curating a collection of educational content, Connexions has developed a variety of open-source applications to let authors create, remix, share, and print content easily. The project has also developed systems to let users rate the quality of materials. Professional societies, editorial boards of journals, and even informal groups can use a customizable software “lens” to tag the quality of Connexions modules, which can then be organized and retrieved according to a given lens. + +It was a stroke of good fortune when Baraniuk and his associates learned, in 2002, that Lawrence Lessig was developing a new licensing project called Creative Commons. As the CC team drafted its licenses, Connexions helped it understand academic needs and then became one of the very first institutional adopters of the CC licenses. Connexions decided to require that its contributors license their works under the least restrictive CC license, CC-BY (Attribution). This was a simple decision because most textbook authors write to reach large readerships, not to make money. +={Lessig, Lawrence:CC licenses, and} + +The real expansion of Connexions as a major international repository of teaching materials did not occur until early 2004, when the software platform had been sufficiently refined. Then, with virtually no publicity, global usage of the Connexions site took off. It helped that Rice University has never sought to “own” the project. Although it administers the project, the university has deliberately encouraged grassroots participation from around the world and across institutions. Electrical engineering faculty at ten major universities are cooperating in developing curricula, for example, and diverse communities of authors are adding to content collections in music, engineering, physics, chemistry, bioinformatics, nanotechnology, and history. In 2008, Connexions had 5,801 learning modules woven into 344 collections. More than 1 million people from 194 countries are using the materials, many of which are written in Chinese, Italian, Spanish, and other languages. + +One of Connexion’s neatest tricks is offering printed textbooks for a fraction of the price of conventional textbooks. Because the content is drawn from the commons, a 300-page hardback engineering textbook that normally sells for $125 can be bought for $25, through a print-on-demand publishing partner, QOOP.com. Ten percent of the purchase price is earmarked to support Connexions, and another 10 percent helps disadvantaged students obtain textbooks for free. Unlike conventional textbooks, which may be a year or two old, Connexions materials are generally up-to-date. + +By providing an alternative to the spiraling costs of academic publishing, Connexions’s publishing model may actually help a number of academic disciplines pursue their scholarly missions. Over the past decade, some sixty university presses have closed or downsized for economic reasons. “If you’re in art history, anthropology, or the humanities, you get tenure based on your monographs published by a university press,” Baraniuk said. “The problem is that, as university presses shut down, there’s nowhere to publish books anymore.” It is often financially prohibitive to publish art history books, for example, because such books typically require highquality production and small press runs. An overly expensive market structure is blocking the flow of new scholarly publishing. +={education:university presses+1} + +One solution: a new all-digital hybrid business model for academic publishing. As the Connexions platform has proved itself, Rice University saw the virtue of reopening Rice University Press (RUP), which it had closed ten years earlier.~{ Rice University Press homepage, at http://www.ricepress.rice.edu. }~ The new RUP retains the editorial structure, high standards, and focus on special fields of a conventional academic press, but it now works within a “branded partition” of Connexions. RUP posts all of its books online as soon as the manuscripts are finalized, and all books are licensed under a CC-BY (Attribution) license. The press does not have to pay for any warehouse or distribution costs because any physical copies of the books are printed on demand. The sales price includes a mission-support fee for RUP and the author’s royalty. “Because the RUP has eliminated all the back-end costs,” said Baraniuk, “they figure they can run it from five to ten times more cheaply than a regular university press.” + +The Connexions publishing model has inspired a group of more than twenty community colleges to develop its own publicdomain textbooks to compete with expensive commercial textbooks. The Community College Consortium for Open Educational Resources~{ http://cccoer.pbwiki.com. }~ —led by Foothill–De Anza Community College District in Los Altos, California — plans to publish the ten most popular textbooks used in community colleges, and expand from there. The consortium will make the books available for free online and sell hardcover versions for less than thirty dollars. Even if the effort gains only a small slice of the textbook market, it will help hold down the prices of commercial textbooks and demonstrate the viability of a new publishing model. More to the point, by slashing one of the biggest costs facing community college students, the project will help thousands of lower-income students to stay in college. + +2~ MIT’s OpenCourseWare Initiative +={MIT OpenCourseWare+8;OpenCourseWare+8;education:OpenCourseWare+8} + +The other pioneering visionary in open education has been MIT. In April 2001, MIT president Charles Vest shocked the world when he announced that MIT would begin to put the materials for all two thousand of its courses online for anyone to use, for free. The new initiative, called OpenCourseWare, would cover a wide array of instructional materials: lecture notes, class assignments, problem sets, syllabi, simulations, exams, and video lectures. Putting the materials online in a searchable, consistent format was expected to take ten years and cost tens of millions of dollars. (The Hewlett and Mellon foundations initially stepped forward with two $5.5 million grants, supplemented by $1 million from MIT.) +={Vest, Charles+1} + +The project had its origins two years earlier, in 1999, when President Vest charged a study group with exploring how the university might develop online educational modules for lifelong learning. The assumption was that it would sell MIT-branded course materials to the budding “e-learning” market. At the time, Columbia University was developing Fathom.com, a bold for-profit co-venture with thirteen other institutions, to sell a wide variety of digital content. Publishers and universities alike envisioned a lucrative new market for academic and cultural materials. + +OpenCourseWare (OCW) was a startling move because it flatly rejected this ambition, and appeared to be either a foolish or magnanimous giveaway of extremely valuable information. Knowledge was assumed to be a species of property that should be sold for as dear a price as possible; few people at the time recognized that the Great Value Shift on the Internet was reversing this logic. The idea that giving information away might actually yield greater gains— by enhancing an institution’s visibility, respect, and influence on a global scale — was not seen as credible. After all, where’s the money? +={Great Value Shift;Internet:Great Value Shift, and} + +After studying the matter closely, MIT decided that the online market was not likely to be a boon, and that posting course materials online would send a strong message about MIT’s values. President Vest conceded that the plan “looks counter-intuitive in a market-driven world.” But he stressed that OpenCourseWare would combine “the traditional openness and outreach and democratizing influence of American education and the ability of the Web to make vast amounts of information instantly available.”~{ MIT press release, “MIT to make nearly all course materials available free on the World Wide Web,” April 4, 2001.}~ Professor Steven Lerman, one of the architects of the OCW plan, told the /{New York Times}/, “Selling content for profit, or trying in some ways to commercialize one of the core intellectual activities of the university, seemed less attractive to people at a deep level than finding ways to disseminate it as broadly as possible.”~{ Carey Goldberg, “Auditing Classes at M.I.T., on the Web and Free,” /{New York Times}/, April 4, 2001, p. 1. }~ +={Vest, Charles;Lerman, Steven} + +MIT also realized the dangers of propertizing college courses and teaching materials, said computer scientist Hal Abelson, another member of the OCW study group (and a CC board member). Ownership, he said, “can be profoundly destructive to the idea of a university community . . . The more people can stop talking about property and start talking about the nature of a faculty member’s commitment to the institution, the healthier the discussion will be. It’s not really about what you own as a faculty member; it’s about what you do as a faculty member.”~{ Interview with Hal Abelson, “OpenCourseWare and the Mission of MIT,” /{Academe}/, September/October 2002, pp. 25–26. }~ +={Abelson, Hal:OpenCourseWare, and} + +School officials stressed that using MIT courseware on the Web is not the same as an MIT education. Indeed, the free materials underscore the fact that what really distinguishes an MIT education is one’s participation in a learning community. Unlike the Connexions content, MIT’s OpenCourseWare is a fairly static set of course materials; they are not modular or constantly updated. In addition, they are licensed under a CC BY-NC-SA (AttributionNonCommercial-ShareAlike.) license. While this prevents businesses from profiting from MIT course materials, it also prevents other educational institutions from remixing them into new courses or textbooks. +={communities:learning;education:learning community, in a} + +Despite these limitations, MIT’s OCW materials have been profoundly influential. The course Laboratory in Software Engineering, for example, has been used by students in Karachi, Pakistan; the island of Mauritius; Vienna, Austria; and Kansas City, Missouri, among scores of other places around the world.~{ David Diamond, “MIT Everyware,” /{Wired}/, September 2003. }~ Ten of the leading Chinese universities now use hundreds of MIT courses, leading three noted OER experts, Daniel E. Atkins, John Seely Brown, and Allen L. Hammond, to conclude that MIT’s OCW “has had a major impact on Chinese education.”~{ Daniel E. Atkins, John Seely Brown, and Allen L. Hammond, “A Review of the Open Educational Resources (OER) Movement: Achievements, Challenges and New Opportunities,” February 2007, at http://www.oerderves .org/wp-content/uploads/2007/03/a-review-of-the-open-educational-re sources-oer-movement_final.pdf, p. 23. }~ Noting the life-changing impact that OCW has had on students in rural villages in China and West Africa, Atkins and his co-authors cite “the power of the OCW as a means for cross-cultural engagement.” Over the course of four years, from October 2003 through 2007, the OCW site received nearly 16 million visits; half were newcomers and half were repeat visits. +={Atkins, Daniel E.;Brown, John Seely;Hammond, Allen L.;education:OER movement;Open Educational Resources (OER) movement} + +OCW is becoming a more pervasive international ethic now that more than 120 educational institutions in twenty nations have banded together to form the OpenCourseWare Consortium. Its goal is to create “a broad and deep body of open educational content using a shared model.”~{ OpenCourseWare Consortium, at http://www.ocwconsortium.org. }~ Although plenty of universities are still trying to make money from distance education courses, a growing number of colleges and universities realize that OCW helps faculty connect with other interested faculty around the world, build a college’s public recognition and recruitment, and advance knowledge as a public good. + +2~ The Rise of the Open Educational Resources Movement +={education:OER movement+19;Open Educational Resources (OER) movement+19} + +While Connexions and MIT’s OpenCourseWare have understandably garnered a great deal of attention, all sorts of fascinating educational projects, big and small, have popped up on the Internet as Web 2.0 innovations matured. Some of these projects have become celebrated, such as Wikipedia, the Public Library of Science, and the Internet Archive. Others, though less celebrated, represent a dazzling mosaic of educational innovation and new possibilities. In a sense, the Long Tail has come to education; even the most obscure subjects have a sustainable niche on the Internet. The groundswell has even produced its own theorists, conveners, and infrastructure builders. Utah State University hosts the Center for Open Sustainable Learning, which is a clearinghouse for open educational tools. Carnegie Mellon has an Open Learning Initiative that designs educational courses. And so on. +={Long Tail;Wikipedia} + +While American institutions and educators have been the first movers in this field, it has quickly taken on an international grassroots flavor. Thousands of commoners from around the world have started their own projects. MathWorld has become the Web’s most extensive mathematical resource. Curriki is a wiki that offers lessons plans and guidance for teachers. The British Library’s Online Gallery features digitized versions of Mozart’s musical diary and sketches by Leonardo da Vinci. U.K. and Australian high school students can now use the Internet to operate the Faulkes Telescope on the island of Maui, Hawaii. Students around the world do much the same with Bugscope, a scanning electronic microscope that can be operated remotely. + +It is hard to set a precise date when the practitioners in this area realized that such wildly diverse projects might constitute a coherent movement with a shared agenda. But as more grantees began to discover each other, the movement-in-formation adopted a rather ungainly name to describe itself — “Open Educational Resources,” or OER. + +Most OER projects share a simple and powerful idea — “that the world’s knowledge is a public good and that technology in general and the World Wide Web in particular provide an extraordinary opportunity for everyone to share, use and reuse knowledge.” That is how Atkins and his co-authors define OER. It consists of “teaching, learning and research resources that reside in the public domain or have been released under an intellectual property license that permits their free use or re-purposing by others.”~{ Ibid. }~ +={Atkins, Daniel E.;Brown} + +The heart of the OER movement is, of course, open sharing and collaboration. OER advocates regard learning as an intrinsically social process, and so they believe that knowledge and learning tools ought to freely circulate. Inspired by the GPL and the CC licenses, OER advocates believe they should be free to copy, modify, and improve their learning tools and pass them forward to others. There is a presumption that artificial barriers to the free flow of information should be eliminated, and that teachers and learners should be empowered to create their own knowledge commons. +={Creative Commons (CC) licenses:tools for creating commons, as} + +The OER movement has a special importance for people who want to learn but don’t have the money or resources, which is to say, people in developing nations, low-income people, and people with specialized learning needs. For the 4 billion people who live in the developing world, schooling is a privilege, textbooks are rare, and money is scarce. In many African nations, there would not be libraries if books were not photocopied. The OER movement aspires to address these needs. OER projects can provide important benefits in industrialized nations, too, where subscriptions to research journals are often prohibitively expensive and many community college students drop out because textbooks cost more than tuition. + +The OER movement is currently in a formative stage, still trying to make sense of the many players in the movement and understand the complex impediments to its progress. Some of this could be seen at a “speed geeking” session at the iCommons Summit in 2007 in Dubrovnik, Croatia. Speed geeking, a puckish variation on “speed dating,” consists of people listening to a short presentation, asking questions and then moving on to the next presentation. After five minutes, a moderator blows a whistle and shouts, “Everyone move — now!” A speed geek can learn about twelve different projects, and meet twelve interesting people, in a single hour. +={Croatia} + +% added Croatia missing Croatia reference in source book index + +In this case, the speed geeking took place in a sweltering loft space without air-conditioning, in a medieval building overlooking the Adriatic Sea. At the first station, a group of participants marveled at a sturdy lime-green laptop of a kind that was about to be distributed to millions of children around the world. The One Laptop Per Child project, the brainchild of Nicholas Negroponte of MIT’s Media Lab, is an ambitious nonprofit initiative to build a sturdy, kidfriendly laptop filled with open-source software and Wi-Fi capabilities for $100.~{ See, e.g., John Markoff, “For $150, Third-World Laptop Stirs a Big Debate,” /{New York Times}/, November 30, 2006. }~ (The cost turned out to be $188, but is expected to decline as production volume grows.) Hundreds of thousands of the so-called XO laptops have now been distributed to kids in Peru, Uruguay, Mexico and other poor nations. +={Negroponte, Nicholas;One Laptop Per Child} + +/{Tweet!}/ Next stop: the Free High School Science Textbooks project in South Africa is developing a free set of science textbooks for students in grades ten through twelve. The project depends on volunteers to write modules of text about various physics, chemistry, and mathematical topics. Paid editors then craft the text into a coherent, high-quality textbook; printing is funded by donations. +={Free High School Science Textbooks} + +Five minutes later, it was on to Educalibre, a Chilean project that is installing free software on old computers so that they can be reused in classrooms. Educalibre is also trying to integrate free software into high school curricula, especially math. The project seeks to bring open-source software principles into formal education. + +Next, Delia Browne of the National Education Access Licence for Schools, or NEALS, explained that some ten thousand Australian schools pay millions of dollars each year to collecting societies in order to reprint materials that the Australian schools themselves have produced. NEALS wants to eliminate this expense, as well as millions of dollars in photocopying expenses, by creating a vast new commons of freely shareable educational materials. Its solution is to persuade Australian schools, as copyright holders, to adopt a special license so that participating schools can copy and share each other’s materials. +={Browne, Delia;National Education Access Licence for Schools (NEALS)} + +/{Tweet!}/ At the next station, Ed Bice of San Francisco explained how his nonprofit group, Meedan.net, is developing a “virtual town square” for Arabic- and English-speaking Internet users. Using realtime translation and social networking tools, the site aspires to open up a new global conversation between Arabs and the rest of the world. It plans to break down cultural barriers while opening up educational opportunities to Arab populations. +={Bice, Ed} + +/{Tweet! Tweet!}/ Neeru Paharia, a former executive director of the Creative Commons, introduced her fledgling project, AcaWiki. Paharia is concerned that too many academic articles are locked behind paywalls and are not readily accessible to everyone. AcaWiki plans to recruit graduate students, academics, and citizens to write summaries of academic papers. Since many grad students make abstracts as part of their routine research, it would not be difficult to pool thousands of summaries into a highly useful, searchable Web collection. +={Paharia, Neeru} + +The speed geekers in Dubrovnik were sweaty and overstimulated at the end, but gratified to learn that there are a great many OER projects under way throughout the world; they just aren’t very well known or coordinated with one another. Two of the participants — J. Philipp Schmidt of the University of the Western Cape and Mark Surman of the Shuttleworth Foundation, both of South Africa — conceded that “there is still a great deal of fuzziness about what this movement includes,” and that “we don’t yet have a good ‘map’ of open education.” But the significance of grassroots initiatives is unmistakable. “There is a movement afoot here,” they concluded, “and it is movement with an aim no less than making learning accessible and adaptable for all.”~{ J. Philipp Schmidt and Mark Surman, “Open Sourcing Education: Learning and Wisdom from the iSummit 2007,” September 2, 2007, at http://icommons .org/download_banco/open-sourcing-education-learning-and-wisdom-from -isummit-2007. }~ “Education,” another participant predicted, “will drive the future of the Commons movement.” +={Schmidt, J. Philipp;Surman, Mark} + +In a sign that the OER movement is getting serious as a movement, thirty of its leaders met in Cape Town, South Africa, and in January 2008 issued the Cape Town Open Education Declaration.~{ http://www.capetowndeclaration.org. Schmidt and Surman, “Open Sourcing Education.” }~ The declaration is a call to make learning materials more freely available online, and to improve education and learning by making them more collaborative, flexible, and locally relevant. The declaration outlines the challenge: “Many educators remain unaware of the growing pool of open educational resources. Many governments and educational institutions are either unaware or unconvinced of the benefits of open education. Differences among licensing schemes for open resources create confusion and incompatibility. And, of course, the majority of the world does not have access to the computers and networks that are integral to most current open education efforts.” + +New funding support is materializing from foundations like the Open Society Institute and the Shuttleworth Foundation, and the Creative Commons has instigated a new project, ccLearn, headed by Ahrash Bissell, to help coordinate OER factions and tackle barriers to further progress. +={Bissell, Ahrash} + +Despite the challenges it faces, the Open Educational Resources movement has a promising future if only because it has such an appealing ethos and practical value. It offers to lower the costs and increase the efficiencies of learning. It helps to generate high-quality materials that address specific learning needs. Where markets are too expensive or unresponsive, collective provisioning through the commons can meet needs effectively and in socially convivial ways. + +Such intangible satisfactions may be one of the secrets of the OER movement’s success to date. Institutions and individuals take pleasure in contributing to the public good. There is pleasure in helping people who thirst for an education, whether in Africa or in a community college, to acquire the resources they need. For learners, the OER movement offers new, more flexible styles of learning. Over time, it seems likely that OER projects will transform the familiar “information transfer” models of formal education into more informal and participatory learning communities. Passive students will more easily become passionate, self-directed learners. + +Finally, at a time of great geopolitical rivalries and cultural animosities, the OER movement holds itself forth as an arena of transnational cooperation. It regards diversity as a strength and social inequity as a challenge to be squarely met. It is a measure of the movement’s idealism that Schmidt and Surman, the South African OER commoners, compare open education to “a flock of migratory geese, moving back and forth between North and South. The flock combines birds from all places. Each goose takes a turn leading the flock, taking the strain, and then handing over to their peers. The flock is not confined to just the North, or the South. It flourishes as a global movement.” 14 +={Schmidt, J. Philipp;Surman, Mark} + +1~ CONCLUSION: THE DIGITAL REPUBLIC AND THE FUTURE OF DEMOCRATIC CULTURE +={free culture+56} + +/{You never change things by fighting the existing reality. To change something, build a new model that makes the existing model obsolete. —R. Buckminster Fuller}/ +={Fuller, R. Buckminster} + +Legend has it that, upon leaving Independence Hall on the final day of the Constitutional Convention in 1787, Benjamin Franklin was approached by a woman, who asked, “Well, Doctor, what have we got — a Republic or a Monarchy?” Franklin famously replied, “A Republic, if you can keep it.” The American colonies had imagined and engineered a new constitutional order, but its survival would depend on countless new struggles and innovations. An American civic culture had to be invented. +={Franklin, Benjamin+1} + +The Franklin vignette might well be applied to the digital republic that the commoners have built. Except that, instead of asking, “Well, Mr. Stallman and Professor Lessig, what have we got — a free culture or a proprietary tyranny?” the question might better be posed to the commoners themselves. Their very existence answers the question, Tyranny or freedom? Free culture exists. It exists to the extent that people practice its ideals. It is not pervasive; many people have no idea what it is; it overlaps in fuzzy ways with the market. But it is flourishing wherever online communities have devised satisfactory commons structures — through law, software, and social norms — to capture the value that they create. Or, as the American Framers put it, to secure the blessings of liberty to ourselves and our posterity. +={commoners:influence of+3} + +As the preceding chapters make clear, the commoners are now a respected force in culture, politics, and economics. Their influence can be felt in varying degrees in the worlds of music, video, photography, and books; in software, Web design, and Internet policies; in social networks and peer-to-peer communities; in business, science, and education; and in scores of countries that have ported the Creative Commons licenses and developed their own commons-based projects. + +Thanks to the Internet, the commons is now a distinct sector of economic production and social experience. It is a source of “value creation” that both complements and competes with markets. It is an arena of social association, self-governance, and collective provisioning that is responsive and trustworthy in ways that government often is not. In a sense, the commons sector is a recapitulation of civil society, as described by Alexis de Tocqueville, but with different capacities. +={Tocqueville, Alexis de;Internet:socially created value of} + +Yet even with the great advances that the commoners have made in building their own shared platforms, tools, and content, the digital republic is not secure. In most countries, the commoners have less conventional political power than corporations, which means that the interests of citizens, consumers, and users are scanted in the policies that govern market competition, intellectual property, and life on the Internet.~{ For a nice overview of these policy contests, see Yochai Benkler, /{The Wealth of Networks: How Social Production Transforms Markets and Freedom}/ (New Haven, CT: Yale University Press, 2006), chapter 11, “The Battle Over the Institutional Ecology of the Digital Environment,” pp. 383–459. }~ Faced with the Great Value Shift, mass-media and entertainment corporations are not eager to surrender their historic market franchises to newcomers without a fight; they are resisting competition from open business models and the commons. +={Great Value Shift;value;Centralized Media:competition, and;Internet:attempts to control+3} + +In the United States, cable broadcast operators and telephone carriers are threatening the very future of the Internet as a commons infrastructure. They wish to assert greater control over Web access and traffic, and so are staunchly resisting “net neutrality” rules that would require them to act as nondiscriminatory common carriers. They would like to leverage their roles as oligopolistic gatekeepers to the Internet, and boost their revenues, by choosing whose Web sites will receive superior transmission and whose communications may be censored or put in the “slow lane.” +={Internet:net neutrality rules on|future of} + +At a further extreme, authoritarian countries such as China, Saudi Arabia, Egypt, and Singapore have shown that national governments still retain great powers to censor and control Internet communications.~{ Shanthi Kalathil and Taylor C. Boas, /{Open Networks, Closed Regimes: The Impact of the Internet on Authoritarian Rule}/ (Washington, DC: Carnegie Endowment for International Peace, 2003). }~ Even the United States government is reportedly engaged in extensive surveillance of Internet traffic, ostensibly for antiterrorism purposes. Meanwhile, many poor nations, especially in Africa and Asia, are struggling simply to get online and create their own digital commons. + +These battles are all part of a larger struggle over “the institutional ecology of the digital environment,” in Yochai Benkler’s words — a struggle that is likely to continue for many years. What powers and capabilities will the commoners and their institutions have relative to business and government, and how will they be able to protect and enhance the value created within the commons? +={Benkler, Yochai:social movements, on} + +2~ A New Species of Citizenship +={citizenship:new species of+22;democracy:new species of citizenship+22;free culture:new species of citizenship, as+22} + +Perhaps the most enduring contribution of the free software, free culture, and other “open movements” has been their invention of a new species of citizenship. Despite significant differences of philosophy and implementation, these commons share some basic values about access, use, and reuse of creative works and information. No matter their special passions, the commoners tend to be improvisational, resourceful, self-directed, collaborative, and committed to democratic ideals. They celebrate a diversity of aesthetics, viewpoints, and cultures. They are egalitarian in spirit yet respectful of talent and achievement. There is a strong predilection to share because the accrual of digital contributions (code, content, metatags) will lead to a greater good for all and perhaps even democratic change. But there is no hostility to commercial activity — indeed, there is a lively admiration for entrepreneurialism — so long as it does not violate basic creative and civic freedoms or core principles of the Internet (openness, interoperability, sharing). The disagreements that do exist center on how best to achieve those goals. +={free culture:sharing ethic of} + +As this book has shown, the Internet is enabling a new species of citizenship in modern life. It is not just a “nice thing.” It is a powerful force for change. The new technologies have been instrumental in helping the commoners imagine and build a digital republic of their own. Over the long term, this citizenship and the culture that it is fostering are likely to be a politically transformative force. They just might help real-world democracies restore a measure of their waning legitimacy and competence.~{ David Bollier, /{The Rise of Netpolitik: How the Internet Is Changing International Politics and Diplomacy}/ (Washington, DC: Aspen Institute Communications and Society Program, 2003). }~ +={Internet:mass participation in} + +David R. Johnson, a lawyer and scholar, describes the citizen of the Internet — the “netizen” — as a significant historical development because he or she can potentially compete with government as a source of binding rule sets. In a brilliant essay, “The Life of the Law Online,” Johnson writes that “we haven’t had a real competition for survival among rule sets. The competition is only between the rule of (our one) law and, presumably, anarchy. So the tendency of all rule sets to become more complicated over time, especially when written by people considering only parts of the system in analytical isolation, has not been checked by evolutionary forces.”~{ David R. Johnson, “The Life of the Law Online,” /{First Monday}/ 11, no. 2 (February 2006), at http://firstmonday.org/issues/issue11_2/johnson/index.html. }~ Government has an unchecked monopoly on lawmaking even though its relationship to the governed, whose consent is vital, is now greatly attenuated. +={Johnson, David R.+1;commoners:“netizens”+1} + +One evolutionary “competitor” to government-made law and to markets is the netizen — or, in my terms, the commoner. For the most part, members of a commons generate and maintain the rules that govern their collective. By Johnson’s reckoning, the commons must be considered a new social metabolism for creating law; it is a new type of “legal organism.” It is, in Johnson’s words, “a selfcausing legal order composed of systems that adopt goals that serve the values of those they regulate, without excessively imposing those goals on others.” +={commons:new type of legal organism;law:commons as new type of legal organism} + +A commons is a kind of biological entity operating in a complex cultural ecosystem. It has its own internal systems for managing its affairs, interacting with its environment, repairing itself, and defining its own persistent identity. It is a force by which ordinary people can express their deepest interests and passions, directly and without institutional mediation, on a global stage. This is an unprecedented capacity in communications, culture, and, indeed, human history. +={commons:definition of} + +To understand why the commoner represents a great leap forward in citizenship, it helps to consider the history of citizenship in the oldest democracy in the world, the United States. In his book /{The Good Citizen}/, sociologist Michael Schudson describes the evolution of three distinct types of citizenship over the past three centuries: +={Schudson, Michael+2;citizenship:history-making+16;Internet:citizenship, and+16} + +_1 When the nation was founded, being a citizen meant little more than for property-owning white males to delegate authority to a local gentleman — and accept his complimentary glass of rum on election day. This “politics of assent” gave way early in the nineteenth century to a “politics of parties.” Parties conducted elaborate campaigns of torchlight processions and monster meetings; voting day was filled with banter, banners, fighting and drinking. . . . The third model of citizenship, ushered in by Progressive reformers, was a “politics of information.” Campaigning became less emotional and more educational. Voting was by secret ballot.~{ Michael Schudson, /{The Good Citizen: A History of American Civic Life}/ (New York: Free Press, 1998), dust jacket. }~ + +We are heirs to the “politics of information,” a model of citizenship that presumes, as economics does, that we are rational actors who, if armed with sufficient quantities of high-quality information, will make educated decisions and optimize civic outcomes. But as Walter Lippmann noted and Schudson echoes, “if democracy requires omnicompetence and omniscience from its citizens, it is a lost cause.”~{ Ibid., p. 310. }~ Life is too busy, fast, and complex. A new type of citizenship is needed. Schudson offers a fairly weak prescription — the “monitorial citizen,” a watchdog who vigilantly monitors the behavior of power. +={Lippmann, Walter} + +But it is precisely here that the Internet is offering up a new, more muscular model of citizenship. I call it /{history-making citizenship}/. The rise of the blogosphere over the past ten years is emblematic of this new paradigm of citizenship. So is citizen-journalism, free software, Wikipedia, the Open Educational Resources movement, open business models like Jamendo and Flickr, and the Creative Commons and iCommons communities. In one sense, the citizenship that these groups practice is “monitorial” in that their members spend a great deal of time watching and discussing. But “monitoring” barely begins to describe their activities. The commoners have the ability — rare in pre-Internet civic life — to publish and incite others to action, and then organize and follow through, using a growing variety of powerful tools. With the advent of blogs, meetups, social networking, text messaging, and many other digital systems, citizens are able to communicate, coordinate, organize, and take timely action on a wide range of matters, including matters of public and political concern. +={commoners:influence of+1} + +I call the new sorts of citizen behaviors “history-making” because ordinary people are able to assert moral agency and participate in making change.~{ I am inspired in this choice of terms by Charles Spinosa, Frnando Flores, and Hubert L. Dreyfus in their book, /{Disclosing New Worlds: Entrepreneurship, Democratic Action, and the Cultivation of Solidarity}/ (Cambridge, MA: MIT Press, 1997). }~ This capacity is not reserved chiefly to large, impersonal institutions such as corporations, government agencies, and other bureaucracies. It is not a mere “participatory citizenship” in which people can volunteer their energies to a larger a more influential leader, political party, or institution in order to help out. It is a citizenship in which /{the commoners themselves}/ choose projects that suit their talents and passions. Dispersed, unorganized groups of strangers can build their own platforms and social norms for pursuing their goals; instigate public action that would not otherwise occur (and that may clash with the practices of existing institutions); and push forward their own distinctive agenda. +={commons:political implications of} + +These behaviors exist in some measure in offline realms, of course, but they are a growing norm in the digital republic. A few examples will suffice to make the point. The Web helped create and propel a handful of cause-oriented candidacies — Howard Dean, Ron Paul, Ned Lamont~[* Lamont was an insurgent candidate for U.S. Senate from Connecticut challenging Senator Joseph Lieberman in a campaign that helped culturally validate opposition to the U.S. war in Iraq.]~ — who rapidly raised enormous sums of money, galvanized large numbers of passionate supporters, and altered mainstream political discourse. Although none prevailed in their races, Barack Obama made a quantum leap in online organizing in 2008, raising $50 million in a single month from supporters via the Internet. Obama’s candidacy was buoyed by the rise of the “netroots” — Web activists with a progressive political agenda— whose size and credibility enable them to sway votes in Congress, raise significant amounts of campaign funds, and influence local activism. The stories are now legion about blogs affecting political life — from the resignation of Senate majority leader Trent Lott after he praised the racist past of Senator Strom Thurmond at his hundredth birthday party, to the electoral defeat of Senate candidate George Allen after his uttering of an ethnic slur, /{macaca}/, was posted on YouTube. +={Dean, Howard;Lamont, Ned;Obama, Barack;Paul, Ron;Internet:political campaigns on;Allen, George;Lott, Trent;YouTube} + +Citizens are now able to initiate their own policy initiatives without first persuading the mainstream media or political parties to validate them as worthy. For example, a handful of citizens troubled by evidence of “hackable” electronic voting machines exposed the defects of the Diebold machines and the company’s efforts to thwart public scrutiny and reforms.~{ See, e.g.,Yochai Benkler, /{The Wealth of Networks}/, pp. 225–32. }~ (The effort has led to a nationwide citizen effort, www.blackboxvoting.org, to expose security problems with voting machines and vote counting.) An ad hoc group of activists, lawyers, academics, and journalists spontaneously formed around a public wiki dealing with the lethal side effects of a bestselling antipsychotic drug Zyprexa, and the manufacturer’s allegedly illegal conduct in suppressing evidence of the drug’s risks. (Prosecutors later sought a $1 billion fine against Eli Lilly.)~{ Jonah Bossewitch, “The Zyprexa Kills Campaign: Peer Production and the Frontiers of Radical Pedagogy,” /{Re-public}/, at http://www.re-public.gr/en/ ?p=144. }~ + +The Web is giving individuals extra-institutional public platforms for articulating their own facts and interpretations of culture. It is enabling them to go far beyond voting and citizen vigilance, to mount citizen-led interventions in politics and governance. History-making citizens can compete with the mass media as an arbiter of cultural and political reality. They can expose the factual errors and lack of independence of /{New York Times}/ reporters; reveal the editorial biases of the “MSM” — mainstream media — by offering their own videotape snippets on YouTube; they can even be pacesetters for the MSM, as the blog Firedoglake did in its relentless reporting of the “Scooter” Libby trial (Libby, one of Vice President Cheney’s top aides, was convicted of obstruction of justice and perjury in connection with press leaks about CIA agent Valerie Plame.) Citizen-journalists, amateur videographers, genuine experts who have created their own Web platforms, parodists, dirty tricksters, and countless others are challenging elite control of the news agenda. It is no wonder that commercial journalism is suffering an identity crisis. Institutional authority is being trumped by the “social warranting” of online communities, many of which function as a kind of participatory meritocracy. +={Libby, “Scooter”;YouTube} + +History-making citizenship is not without its deficiencies. Rumors, misinformation, and polarized debate are common in this more open, unmediated environment. Its crowning virtue is its potential ability to mobilize the energies and creativity of huge numbers of people. GNU/Linux improbably drew upon the talents of tens of thousands of programmers; certainly our contemporary world with its countless problems could use some of this elixir— platforms that can elicit distributed creativity, specialized talent, passionate commitment, and social legitimacy. In 2005 Joi Ito, then chairman of the board of the Creative Commons, wrote: “Traditional forms of representative democracy can barely manage the scale, complexity and speed of the issues in the world today. Representatives of sovereign nations negotiating with each other in global dialog are limited in their ability to solve global issues. The monolithic media and its increasingly simplistic representation of the world cannot provide the competition of ideas necessary to reach informed, viable consensus.”~{ Joichi Ito, “Emergent Democracy,” chapter 1 in John Lebkowsky and Mitch Ratcliffe, eds., /{Extreme Democracy}/ (Durham, NC: Lulu.com, 2005), at http:// extremedemocracy.com/chapters/Chapter%20One-Ito.pdf. }~ Ito concluded that a new, not-yetunderstood model of “emergent democracy” is likely to materialize as the digital revolution proceeds. A civic order consisting of “intentional blog communities, ad hoc advocacy coalitions and activist networks” could begin to tackle many urgent problems. +={Ito, Joichi;NU/Linux;democracy:emergent+1|traditional forms of+5} + +Clearly, the first imperative in developing a new framework to host representative democracy is to ensure that the electronic commons be allowed to exist in the first place. Without net neutrality, citizens could very well be stifled in their ability to participate on their own terms, in their own voices. If proprietary policies or technologies are allowed to override citizen interests (Verizon Wireless in 2007 prevented the transmission of abortion rights messages on its text-messaging system, for example~{ Adam Liptak, “Verizon Reverses Itself on Abortion Messages,” /{New York Times}/, September 27, 2007, at http://www.nytimes.com/2007/09/27/busi ness/27cnd-verizon.html. }~), then any hope for historymaking citizenship will be stillborn. + +Beyond such near-term concerns, however, the emerging digital republic is embroiled in a much larger structural tension with –terrestrial “real world” governments. The commoner is likely to regard the rules forged in online commons as more legitimate and appropriate than those mandated by government. Again, David R. Johnson: +={Johnson, David R.} + +_1 The goals of a successful legal organism must be agreed upon by those who live within it, because a legal system is nothing more than a collective conversation about shared values. When it ceases to be that kind of internally entailed organism, the law becomes mere power, social “order” becomes tyranny, and the only option, over the long term at least, is war. + +_1 Organisms can’t be repaired from the outside. But, with reference to interactions that take place primarily online, among willing participants who seek primarily to regulate their own affairs, that’s exactly where existing governments are situated — outside the vibrant, self-regulating online spaces they seek to regulate. Their efforts to engineer the Internet as if it were a mechanism are not only fundamentally illegitimate but doomed by the very nature of the thing they seek to regulate. They are trying to create social order, of course. But they have not recognized . . . that order in complex systems creates itself.~{ Johnson, “The Life of the Law Online.” }~ + +After all, he or she is likely to have had a more meaningful personal role in crafting those rules. Now, of course, people live their lives in both online and terrestrial environments; there is no strict division between the two. That said, as people’s lives become more implicated in Internet spaces, citizens are likely to prefer the freedoms and affordances of the open-networked environment to the stunted correlates of offline politics, governance, and law. + +Indeed, this may be why so many activists and idealists are attracted to online venues. There is a richer sense of possibility. Contemporary politics and government have been captured by big money, professionals, and concentrated power. By contrast, in the digital republic, the ethic of transparency deals harshly with institutional manipulations, deceptions, and bad faith. They literally become part of your “permanent record,” forever available via a Google search. More fundamentally, the digital republic has a basic respect for everyone’s ability to contribute. It respects the principle of open access for all. The “consent of the governed” really matters. How sobering it is, then, to return to the “real world” of the American polity — or most other national governments — and realize that “money talks and bullshit walks.” How depressing to realize that the system is highly resistant to ordinary citizen action, such is the mismatch of resources. +={transparency+1;open business models:transparency in} + +The growing dissonance between the American system of governance, as practiced, and the more open, meritocratic online world was surely a factor in Lessig’s decision in 2007 to step down as CEO of Creative Commons, a move that eventually took place in April 2008. Lessig’s crushing responsibilities as the leader of Creative Commons — the international travel, the fund-raising, the strategic planning, the public events and movement obligations — had surely taken its toll. Feeling a personal need for new challenges as well as a responsibility to let new leaders emerge within the CC world, Lessig announced an ambitious new agenda for himself — tackling the “systemic corruption” of the democratic process in Congress. He joined with Joe Trippi, the campaign manager for Howard Dean’s 2004 presidential run, to launch a new organization, Change Congress, which seeks to ban special-interest campaign contributions, secure public financing for campaigns, and bring greater transparency to congressional proceedings. In a shuffle of roles, longtime board member James Boyle — who had been especially active on science and education initiatives — became the new chairman of Creative Commons. Board member Joi Ito, who had been chairman for a brief period, became CEO. +={Boyle, James:CC board, on|chairman, as;Change Congress (organization);Dean, Howard;democracy:corruption in|traditional forms of+1;Trippi, Joe;Lessig, Lawrence:political activity of} + +If Lessig is going to succeed in using the tools of the digital republic to reform and rejuvenate the American polity (and perhaps inspire other governments as well), he will have to confront the rather deeply rooted premises of the official constitutional order. The fast-paced, commons-based governance of the digital republic is naturally going to clash with a system of governance that revolves around bureaucratic hierarchies, a slow-moving system of law, archaic types of political intermediaries, and electoral principles designed for eighteenth-century life. Can the two be reconciled? The structural tensions are likely to be a significant and persistent issue for many, many years. + +2~ A Long-Term Power Shift? +={free culture:political nature of+23} + +It is hard to get a fix on this long-term transformation because the struggles to actualize an emergent democracy, as envisioned by Ito, are strangely apolitical and intensely political at the same time. They are apolitical in the sense that commoners are chiefly focused on the pragmatic technical challenges of their individual projects; they are not usually involved in official policymaking in legislatures or before courts and government agencies. Yet free software and free culture projects are highly political in the sense that commons projects, taken together over time, represent a profound challenge to the conventional market order and political culture. For example, Wikitravel, Jamendo, and open-access journals arguably provide better value than the commercial alternatives. The success of free software punctures the foundational assumptions of copyright law, making it easier to challenge new expansions of copyright law. Participatory commons are diverting viewer “eyeballs” away from commercial media and its genres of culture, spurring the growth of new hybrid forms of user-generated content. These kinds of effects, which advance project by project, month by month, are likely to have a longterm transformational impact. A new social ethic is taking root. +={Ito, Joichi;free software:FOSS/FLOSS+2;FOSS/FLOSS+2;copyright law:assumptions of;democracy:emergent} + +Free culture, though culturally progressive, is fairly nonjudgmental about ideological politics. When American conservatives decided they wanted to start Conservapedia because they found Wikipedia too liberal, Wikipedia founder Jimmy Wales was happy to bless it: “Free culture knows no bounds . . . We welcome the reuse of our work to build variants. That’s directly in line with our mission.”~{ Robert Mackey, “Conservapedia: The Word Says it All,” /{New York Times}/, March 8, 2007, at http://thelede.blogs.nytimes.com/2007/03/08/conserva pedia-the-word-says-it-all/?scp=1&sq=wales+conservapedia. }~ Anthropology professor E. Gabriella Coleman has found a similar ecumenicism in the free software movement, which is agnostic about conventional politics but adamant about its own polity of freedom.~{ E. Gabriella Coleman, “The Political Agnosticism of Free and Open Source Software and the Inadvertent Politics of Contrast,” /{Anthropology Quarterly}/ 77, no. 3 (Summer 2004), pp. 507–19. See also her Ph.D. dissertation, “The Social Construction of Freedom in Free and Open Source Software: Hackers, Ethics and the Liberal Tradition,” abstract at http://healthhacker.org/biella/cole man-abstract.pdf. }~ Thus, the FOSS movement has no position with respect to social justice or globalization issues, but it does demand a strict commitment to the “four freedoms” of software development. Johan Söderberg makes much the same case in his book /{Hacking Capitalism}/.~{ Johan Söderberg, /{Hacking Capitalism: The Free and Open Source Software Movement}/ (New York: Routledge, 2007). }~ +={Coleman, E. Gabriella;Wales, Jimmy;Söderberg, Johan} + +As projects like GNU/Linux, Wikipedia, open courseware, open-access journals, open databases, municipal Wi-Fi, collections of CC-licensed content, and other commons begin to cross-link and coalesce, the commons paradigm is migrating from the margins of culture to the center. The viral spiral, after years of building its infrastructure and social networks, may be approaching a Cambrian explosion, an evolutionary leap. +={commons:force for change, as+5} + +History suggests that any new style of politics and polity will arrive through models developed /{from within}/ the edifice of existing law, markets, and culture. A revolutionary coup or showdown with existing institutions will not be necessary. Superior working models — running code and a healthy commons — will trump polemics and exhortation. + +Ideological activists and political professionals are likely to scoff at this scenario. After all, they are suspicious of distributed political power, if not hostile to it. They prefer the levers of consolidated power (laws, court rulings, police powers) that are within their sphere of influence to the dispersed, sovereign powers of an online multitude. The latter is highly resistant to capture and control, and in that sense, profoundly threatening to the traditional configurations of political power. We have already seen how the mandarins of journalism, politics, and business are quick to lash out at the noncredentialed masses who dare to put forward their own interpretations of the world. + +However necessary it is to engage in the official governance of a nation, corrupted though it may be, the commoners have shown that building their own functioning commons can be a powerful force for change as well. A commons of technical standards for the Web — how mundane! — can achieve more than most antitrust lawsuits. A common pool of information can prevent a company from reaping easy monopoly rents from the control of a public good. Instead, the company must “move upstream” to provide more specialized forms of value (for example, sophisticated graphing of the information or data analysis). A commons may also be affirmatively helpful to businesses, as Eric von Hippel has shown, by aggregating a body of aficionados into a social community that can articulate customer needs and preferences in highly efficient ways: the commons as a cheap form of R & D and marketing. + +In either case, the rise of a commons can be disruptive not just because it changes how market power is exercised, but because it may disperse power to a broader community of participants. Recall Johnson’s observation that a commons is a “self-causing legal order” that competes with other legal orders. Individuals who affiliate with an online community may acquire the ability to manage their own social relationships and group identity. +={Johnson, David R.;commons:new type of legal organism+1;law:commons as new type of legal organism+1|political implications of+1;democracy:power of the commons in+1} + +This is not just a form of marketplace power, it is a form of /{political}/ power. In effect, a group may be able to neutralize the power of corporations to use brands to organize their identities. By developing its own discourse and identity, an online community can reject their treatment as a demographic cohort of consumers. They can assert their broader, nonmarket concerns. As a group of commoners, they are less susceptible to propaganda, ideology, and commercial journalism as tools for organizing their political allegiances. They have greater civic sovereignty. + +“Free cooperation aims at distributing power,” argues Geert Lovink, a Dutch media theorist: +={Lovink, Geert+1} + +_1 I am not saying that power as such disappears, but there is certainly a shift, away from the formal into the informal, from accountable structures towards a voluntary and temporal connection. We have to reconcile with the fact that these structures undermine the establishment, but not through recognizable forms of resistance. The “anti” element often misses. This is what makes traditional, unreconstructed lefties so suspicious, as these networks just do their thing and do not fit into this or that ideology, be it neoliberal or autonomous Marxist. Their vagueness escapes any attempt to deconstruct their intention either as proto-capitalist or subversive.~{ Geert Lovink, “Theses on Wiki Politics,” an exchange with Pavlos Hatzopoulos, /{Re-public}/, at http://www.re-public.gr/en/?p=135. }~ + +This can be disorienting. Energies are not focused on resisting an oppressor, but rather on building innovative, positive alternatives. In Buckminster Fuller’s terms, free culture is mostly about building new models that make the existing models obsolete. Instead of forging an identity in relation to an adversary, the movement has built an identity around an affirmative vision and the challenge of /{becoming}/. People feel fairly comfortable with a certain level of ambiguity because the whole environment is so protean, diverse, evolving, and dynamic. +={Fuller, R. Buckminster} + +The GPL and the CC licenses are ingenious hacks because they navigate this indeterminate ideological space with legally enforceable tools, while looking to informal social practice and norms to provide stable governance. (“Order without law,” in law professor Robert Ellickson’s formulation.)~{ Robert Ellickson, Order Without Law: How Neighbors Settle Disputes (Cambridge, MA: Harvard University Press, 2005). }~ The licenses use the existing legal order to achieve their goals (the sharing of tools and content), and so the strategies are not seen as politically provocative. Yet the licenses are nonetheless politically transformative because they help new communities of practice to organize themselves and do work that may question core premises of copyright law, conventional economics, and government policy in general. +={Ellickson, Robert} + +The beauty of this “ideological straddle” is that it enables a diverse array of players into the same tent without inciting sectarian acrimony. (There is some, of course, but mostly at the margins.) Ecumenical tolerance is the norm because orthodoxies cannot take root at the periphery where innovation is constantly being incubated. In any case, there is a widespread realization in the networked world that shared goals are likely to require variable implementations, depending on specific needs and contexts. + +It may appear that the free software hacker, blogger, tech entrepreneur, celebrity musician, college professor, and biological researcher have nothing in common. In truth, each is participating in social practices that are incrementally and collectively bringing into being a new sort of democratic polity. French sociologist Bruno Latour calls it the “pixellation of politics,”~{ Bruno Latour, “We Are All Reactionaries Today,” Re-public, at http://www .republic.gr/en/?p=129. }~ which conjures up a pointillist painting slowly materializing. The new polity is more open, participatory, dynamically responsive, and morally respected by “the governed” than the nominal democracies of nation-states. The bureaucratic state tends to be too large and remote to be responsive to local circumstances and complex issues; it is ridiculed and endured. But who dares to aspire to transcend it? +={Latour, Bruno} + +Sooner or later, history-making citizenship is likely to take up such a challenge. It already has. What is the digital republic, after all, but a federation of self-organized communities, each seeking to fulfill its members’ dreams by developing its own indigenous set of tools, rules, and ethics? The power of the commons stems from its role as an organizing template, and not an ideology. Because it is able to host a diverse and robust ecosystem of talent without squeezing it into an ideological straitjacket, the commons is flexible and resilient. It is based on people’s sincerest passions, not on remote institutional imperatives or ideological shibboleths. It therefore has a foundational support and energy that can outperform “mainstream” institutions. +={citizenship:history-making+1;commons:political implications of+1} + +This, truly, is the animating force of the viral spiral: the capacity to build one’s own world and participate on a public stage. (Cicero: “Freedom is participation in power.”) When such energies are let loose in an open, networked environment, all sorts of new and interesting innovations emerge. Since an online commons does not have the burden of turning a profit or supporting huge overhead, it can wait for serendipity, passion, and idiosyncratic brilliance to surface, and then rely on the Internet to propagate the fruits virally. +={Cicero:on freedom} + +Oddly enough, entrenched commercial interests do not seem to be alarmed by the disruptive long-term implications of free culture. If the users of CC licenses genuflect before the altar of copyright law, it would appear, that is sufficient. Due respect is being shown. Meanwhile, at the level of social practice, the commoners are gradually building a very different moral economy that converges, from different paths, on a new type of civic order. In /{Code}/, Lessig called it “freedom without anarchy, control without government, consensus without power.” +={Lessig, Lawrence:Code and Other Laws of Cyberspace|freedom, and} + +It is not entirely clear how the special capacities of bottom-up networks — a “non-totalizing system of structure that nonetheless acts as a whole,” in Mark Taylor’s words — can be integrated with conventional government and institutions of power. It is easy to imagine a future confrontation in the political culture, however, as the citizens of the digital republic confront the stodgy bureaucratic state (corporate and governmental). The latter will have the advantages of constitutional authority and state and economic power, but the former are likely to have the advantages of social legitimacy, superior on-the-ground information, and creative energy. How the digital republic will confront the old regime, or supplant it gradually as archaic institutions collapse over time, is the stuff of future history. +={Taylor, Mark;citizenship:history-making+1} + +Theory has its limits. The building of the digital republic was in many ways animated by theory, of course, chiefly the rejection of certain theories of copyright law and the invention of new narratives about creativity and the commons. But this project has not been an intellectual, theory-driven enterprise so much as a vast, collective enterprise of history-making citizenship. Using the affordances of digital technologies, individuals have stepped out of their customary or assigned roles to invent entirely new vehicles for creativity, social life, business, politics, science, and education. Individuals have come together to make some remarkable new tools and institutions to serve their needs and preferences. +={commons:sources of new ideas, as+3} + +The story of the commons is, in this sense, the story of a series of public-spirited individuals who are determined to build new vehicles for protecting shared wealth and social energies. It is the story of Richard Stallman fighting the privatization of software and the disenfranchisement of the hacker community. It is the story of Eric Eldred’s determination to go to jail if necessary to defend his ability to build a Web site for great American literature. The viral spiral, as I have called it, truly gained momentum when Lawrence Lessig, as a boundary-breaking law professor, decided to mount a constitutional test case and then to assemble a larger effort to imagine and build a new licensing scheme for sharing. +={Stallman, Richard;Eldred, Eric:public domain, and;Lessig, Lawrence:law in contemporary context, and;software:proprietary} + +The viral spiral then spins off in dozens of directions as newly empowered people discover the freedoms and satisfactions that can accrue to them through this ancient yet now rediscovered and refurbished social vessel. Taken together, countless commons projects are validating some new models of human aspiration. Instead of presuming that a society must revolve around competitive individuals seeking private, material gain (the height of “rationality,” economists tell us), the commons affirms a broader, more complex, and more enlightened paradigm of human self-interest. If the Invisible Hand presumes to align private interest and the public good, the commons has shown that cooperation and sharing can also serve this goal with great versatility and sophistication. +={commoners:sharing by} + +Over the long term, the real meaning of the viral spiral may lie in our discovery that the new platforms that we use to create and organize knowledge, and relate to one another, is changing how we think and how we conceptualize our place in the world. John Seely Brown, the former director of Xerox PARC, has said, “From my perspective, a key property of participatory cultures is that they help to create both a culture of learning and a culture of doing. The social basis of doing (e.g. networked communities of interest/ practice) that you see emerging here actually form reflective practicum(s). This, in turn, ends up grounding epistemology — ways of knowing — and provides a pathway back to a kind of pragmatism that Dewey first talked about that is situated between realism and idealism. This is the pathway to creating a learning society and a culture that can embrace change by unleashing and affording productive inquiry in powerful and exciting ways.”~{ John Seely Brown, personal communication, January 26, 2008. }~ +={Brown, John Seely;Dewey, John} + +By empowering us to “step into history” and take greater responsibility for more aspects of our lives, it is no exaggeration to say that the commons encourages us to become more integrated human beings. We learn to integrate our production with our consumption, our learning with our doing, and our ideals with practical realities. This is surely why the viral spiral has been so powerfully transformative. It has helped bring our personal needs and interests into a closer, more congenial alignment with the institutions that serve us. We may be caught in a messy transition, and there remains much to negotiate and debate, but we should count our blessings. Few generations are as fortunate in being able to imagine and build a new commons sector of such liberating potential. +={citizenship:history-making} + diff --git a/data/v2/samples/_sisu/image/vs_db_1.png b/data/v2/samples/_sisu/image/vs_db_1.png new file mode 100644 index 0000000..4a9bfd1 Binary files /dev/null and b/data/v2/samples/_sisu/image/vs_db_1.png differ diff --git a/data/v2/samples/_sisu/skin/doc/skin_vs_david_bollier.rb b/data/v2/samples/_sisu/skin/doc/skin_vs_david_bollier.rb new file mode 100644 index 0000000..d357797 --- /dev/null +++ b/data/v2/samples/_sisu/skin/doc/skin_vs_david_bollier.rb @@ -0,0 +1,77 @@ +# coding: utf-8 +=begin + * Name: SiSU - Simple information Structuring Universe - Structured information, Serialized Units + * Author: Ralph Amissah + * http://www.jus.uio.no/sisu + * http://www.jus.uio.no/sisu/SiSU/download + * Description: Skin prepared for Viral Spiral, David Bollier + * License: Same as SiSU see http://www.jus.uio.no/sisu + * Notes: Site default appearance variables set in defaults.rb + Generic site wide modifications set here scribe_skin.rb, and this file required by other "scribes" instead of defaults.rb +=end +module SiSU_Viz + require SiSU_lib + '/defaults' + class Skin + def url_home + 'http://viralspiral.cc/' + end + def url_txt # text to go with url usually stripped url + 'viralspiral.cc' + end + def url_author + 'http://www.bollier.org/' + end + def color_band1 + '"#ffffff"' + end + def txt_hp + 'viralspiral.cc' + end + def txt_home + 'David Bollier' + end + def icon_home_button + '' + end + def icon_home_banner + icon_home_button + end + def banner_home_button + %{
#{png_home}
\n} + end + def banner_home_and_index_buttons + %{
#{png_home}
 This text sub- 
 Table of Contents 
#{table_close}
 #{table_close}} + end + def banner_band + %{
+

Viral Spiral

+

David Bollier

+ #{table_close}} + end + def credits_splash + %{
+Viral Spiral, David Bollier
+The original pdf is available online at
#{url_txt}
+available at
Amazon.com and
+Barnes & Noble
+This book is Copyright David Bollier © 2008
+Under a Creative Commons License, License: Attribution-Noncommercial Works (CC-BY-NC) 3.0 +http://creativecommons.org/licenses/by-nc/3.0/
} + end + end + class TeX + def header_center + "\\chead{\\href{#{@vz.url_home}}{#{@vz.url_txt}}}" + end + def home_url + "\\href{#{@vz.url_home}}{#{@vz.url_txt}}" + end + def home + "\\href{#{@vz.url_home}}{David Bollier}" + end + def owner_chapter + "Document owner details" + end + end +end +__END__ diff --git a/data/v2/samples/viral_spiral.david_bollier.sst b/data/v2/samples/viral_spiral.david_bollier.sst new file mode 100644 index 0000000..e5c9ace --- /dev/null +++ b/data/v2/samples/viral_spiral.david_bollier.sst @@ -0,0 +1,2930 @@ +% SiSU 2.0 + +@title: Viral Spiral + :subtitle: How the Commoners Built a Digital Republic of Their Own + :language: US + +@creator: + :author: Bollier, David + +@classify: + :type: Book + :oclc: 227016731 + :topic_register: SiSU:markup sample:book;networks;Internet:social aspects|copyright|intellectual property;intellectual property:copyright|creative commons|patents|public domain;society:information society;copyright:creative commons|public domain|licenses;patents;book:subject:information society|information networks|society|copyright|creative commons|patents|culture;open source software:social aspects;software:free software|GPL|open source;license:GPL;programming;democracy;democratization;creative commons:organization;public domain:copyright law (U.S.);free culture;culture + +@rights: + :copyright: © 2008 by David Bollier All rights reserved. No part of this book may be reproduced, in any form, without written permission from the publisher. The author has made an online version of the book available under a Creative Commons Attribution-NonCommercial license. It can be accessed at http://www.viralspiral.cc and http://www.onthecommons.org. Requests for permission to reproduce selections from this book should be mailed to: Permissions Department, The New Press, 38 Greene Street, New York, NY 10013. Published in the United States by The New Press, New York, 2008 Distributed by W. W. Norton & Company, Inc., New York ISBN 978-1-59558-396-3 (hc.) CIP data available The New Press was established in 1990 as a not-for-profit alternative to the large, commercial publishing houses currently dominating the book publishing industry. The New Press operates in the public interest rather than for private gain, and is committed to publishing, in innovative ways, works of educational, cultural, and community value that are often deemed insufficiently profitable. www.thenewpress.com A Caravan book. For more information, visit www.caravanbooks.org. + :license: Creative Commons Attribution-NonCommercial license. + +@make: + :breaks: new=:B,C; break=1 + :texpdf_font: Liberation Sans + :skin: skin_vs_david_bollier + +@links: + {Viral Spiral}http://viralspiral.cc/ + {David Bollier}http://www.bollier.org/ + {David Bollier @ Wikipedia}http://en.wikipedia.org/wiki/David_Bollier + {Viral Spiral, David Bollier@ SiSU}http://www.jus.uio.no/sisu/viral_spiral.david_bollier + {The Wealth of Networks, Yochai Benkler @ SiSU}http://www.jus.uio.no/sisu/the_wealth_of_networks.yochai_benkler + {Democratizing Innovation, Eric von Hippel @ SiSU}http://www.jus.uio.no/sisu/democratizing_innovation.eric_von_hippel + {Two Bits, Christopher Kelty @ SiSU}http://www.jus.uio.no/sisu/two_bits.christopher_kelty + {Free Culture, Lawrence Lessig @ SiSU}http://www.jus.uio.no/sisu/free_culture.lawrence_lessig + {CONTENT, Cory Doctorow @ SiSU}http://www.jus.uio.no/sisu/content.cory_doctorow + {Free as in Freedom (on Richard M. Stallman), Sam Williams @ SiSU}http://www.jus.uio.no/sisu/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams + {Free For All, Peter Wayner @ SiSU}http://www.jus.uio.no/sisu/free_for_all.peter_wayner + {The Cathedral and the Bazaar, Eric S. Raymond @ SiSU }http://www.jus.uio.no/sisu/the_cathedral_and_the_bazaar.eric_s_raymond + {Little Brother, Cory Doctorow @ SiSU}http://www.jus.uio.no/sisu/little_brother.cory_doctorow + {Viral Spiral @ Amazon.com}http://www.amazon.com/Viral-Spiral-Commoners-Digital-Republic/dp/1595583963 + {Viral Spiral @ Barnes & Noble}http://search.barnesandnoble.com/booksearch/isbnInquiry.asp?isbn=1595583963 + +:A~ @title @author + +1~attribution Attribution~# + +To Norman Lear, dear friend and intrepid explorer of the frontiers of democratic practice -# + +1~acknowledgments ACKNOWLEDGMENTS + +In this book, as with any book, dozens of barely visible means of support conspired to help me. It has been hard work, but any author with sufficient honesty and self-awareness realizes the extent to which he or she is a lens that refracts the experiences, insights, and writings of others. It is a pleasure to pay tribute to those who have been helpful to me. + +I am grateful to Larry Lessig, a singular visionary in developing the commons as a new paradigm, for helping to make this book possible. He submitted to several interviews, facilitated my research within the Creative Commons community, and, despite our shared involvements in various projects over the years, scrupulously respected my independence. It is also a pleasure to thank the Rockefeller Foundation for generously helping to cover my research, reporting, and travel expenses. + +I interviewed or consulted with more than one hundred people in the course of writing this book. I want to thank each of them for carving out some time to speak with me and openly sharing their thoughts. The Creative Commons and iCommons staff were particularly helpful in making time for me, pointing me toward useful documents and Web sites and sharing their expertise. I must single out Glenn Otis Brown, Mia Garlick, Joichi Ito, Heather Ford, Tomislav Medak, Ronaldo Lemos, and Hal Abelson for their special assistance. + +Since writing a book resembles parachuting into a forest and then trying to find one’s way out, I was pleased to have many friends who recommended some useful paths to follow. After reading some or all of my manuscript, the following friends and colleagues offered many invaluable suggestions and criticisms: Charles Schweik, Elliot E. Maxwell, John Seely Brown, Emily Levine, Peter Suber, Julie Ristau, Jay Walljasper, Jonathan Rowe, Kathryn Milun, Laurie Racine, and Gigi Sohn. It hardly requires saying that none of these astute readers bears any responsibility for the choices that I ultimately made. + +For the past seven years, the Tomales Bay Institute, recently renamed On the Commons, has nurtured my thinking and commitment to the commons. (On the Commons has no formal affiliation to the Creative Commons world, but it enthusiastically shares its commitments to the commons.) I am grateful to my colleagues Peter Barnes, Harriet Barlow, and Julie Ristau for their unflagging support of my book over the past three years, even when it impinged on my other responsibilities. + +In the early stages of this book, Elaine Pagels was unusually generous in offering her help, and my conversations with Nick Bromell helped pry loose some important insights used in my conclusion. Cherry Alvarado was of extraordinary help to me as she transcribed scores of interviews with unfailing good humor and precision. I also wish to thank Andrew Ryder for resourceful assistance in the early stages of my research. + +I have dedicated this book to my dear friend and mentor Norman Lear. The zeal, imagination, and grace that he brings to the simple imperatives of citizenship have been more instructive and inspirational than he perhaps realizes. He has also been of incalculable support to me in my headstrong explorations of the commons. + +Finally, at the end of the day, when I emerge from my writer’s lair or return from yet another research and reporting trip, it is Ellen and my sons Sam and Tom who indulge my absences, mental and physical, and reacquaint me with the things that matter most. I could not wish for more. David Bollier Amherst, Massachusetts May 1, 2008 + +1~introduction INTRODUCTION + +It started with that great leap forward in human history the Internet, which gave rise to free software in the 1980s and then the World Wide Web in the early 1990s. The shockingly open Internet, fortified by these tools, began empowering a brash new culture of rank amateurs — you and me. And this began to reverse the fierce tide of twentieth-century media. Ordinary people went online, if only to escape the incessant blare of television and radio, the intrusive ads and the narrow spectrum of expression. People started to discover their own voices . . . and their own capabilities . . . and one another. +={free software+2} + +As the commoners began to take charge of their lives, they discovered anew that traditional markets, governments, and laws were often not serving their needs very well. And so some pioneers had the audacity to invent an infrastructure to host new alternatives: free and open-source software. Private licenses to enable sharing and bypass the oppressive complications of copyright law. A crazy quilt of Web applications. And new types of companies that thrive on servicing social communities on open platforms. + +At the dawn of the twenty-first century, the commoners began to make some headway. More people were shifting their attention away from commercial media to homegrown genres — listservs, Web sites, chat rooms, instant messaging, and later, blogs, podcasts, and wikis. A swirling mass of artists, legal scholars, techies, activists, and even scientists and businesses began to create their own online commons. They self-organized themselves into a loosely coordinated movement dedicated to “free culture.” + +The viral spiral was under way. + +Viral spiral? /{Viral}/, a term borrowed from medical science, refers to the way in which new ideas and innovations on the Internet can proliferate with astonishing speed. A video clip, a blog post, an advertisement released on the Internet tumbles into other people’s consciousness in unexpected ways and becomes the raw feedstock for new creativity and culture. This is one reason the Internet is so powerful — it virally propagates creativity. A novel idea that is openly released in the networked environment can often find its way to a distant person or improbable project that can really benefit from it. This recombinative capacity — efficiently coordinated through search engines, Web logs, informal social networks, and other means— radically accelerates the process of innovation. It enlivens democratic culture by hosting egalitarian encounters among strangers and voluntary associations of citizens. Alexis de Tocqueville would be proud. + +The /{spiral}/ of /{viral spiral}/ refers to the way in which the innovation of one Internet cohort rapidly becomes a platform used by later generations to build their own follow-on innovations. It is a corkscrew paradigm of change: /{viral}/ networking feeds an upward /{spiral}/ of innovation. The cutting-edge thread achieves one twist of change, positioning a later thread to leverage another twist, which leverages yet another. Place these spirals in the context of an open Internet, where they can sweep across vast domains of life and catalyze new principles of order and social practice, and you begin to get a sense of the transformative power of viral spirals. + +The term /{viral spiral}/ is apt, additionally, because it suggests a process of change that is anything but clean, direct, and mechanical. In the networked environment, there is rarely a direct cause-andeffect. Things happen in messy, irregular, indeterminate, serendipitous ways. Life on the Internet does not take place on a stable Cartesian grid — orderly, timeless, universal — but on a constantly pulsating, dynamic, and labyrinthine /{web}/ of finely interconnected threads radiating through countless nodes. Here the context is as rich and generative as any individual, /{Viral spiral}/ calls attention to the holistic and historical dynamics of life on the Web, which has a very different metaphysical feel than the world of twentieth-century media. + +The viral spiral began with free software (code that is free to use, not code at no cost) and later produced the Web. Once these open platforms had sufficiently matured, tech wizards realized that software’s great promise is not as a stand-alone tool on PCs, but as a social platform for Web-based sharing and collaboration. The commoners could then begin to imagine: How might these tools be used to overcome the arbitrary and confusing limitations of copyright law? One answer, the Creative Commons (CC) licenses, a free set of public licenses for sharing content, helped mitigate the legal risks of sharing of works under copyright law. This innovation, in turn, helped unleash a massive wave of follow-on innovations. +={free software;Creative Commons (CC) licenses} + +Web 2.0 applications flourished, many of them relying upon sharing made legal through CC licenses. By avoiding the costly overhead of centralized production and marketing, and tapping into the social vitality of a commons, Web 2.0 platforms have enabled ordinary people to share photos (Flickr), favorite browser bookmarks (del.icio.us), favorite news stories (Digg, Reddit), and homemade videos (YouTube). They let people access user-created archives (Wikipedia, Internet Archive, Ourmedia.org), collaborate in news gathering (OhmyNews, Assignment Zero), participate in immersive communities (Second Life), and build open-business models (Magnatune, Revver, Jamendo). +={web 2.0:applications} + +This book seeks to trace the long arc of change wrought by a kaleidoscopic swarm of commoners besieged by oppressive copyright laws, empowered by digital technologies, and possessed of a vision for a more open, democratic society. Their movement has been fired by the rhetoric of freedom and actualized by digital technologies connected by the Internet. These systems have made it extremely cheap and easy for ordinary people to copy and share things, and to collaborate and organize. They have democratized creativity on a global scale, challenging the legitimacy and power of all sorts of centralized, hierarchical institutions. + +This larger story has rarely been told in its larger scope. It is at base a story of visionary individuals determined to protect the shared code, content, and social community that they have collectively generated. Richard Stallman pioneered the development of free software; Lawrence Lessig waged challenges against excessive copyright protection and led the development of the Creative Commons licenses; citizen-archivist Eric Eldred fought to preserve his online body of public-domain literature and the community that grew up around it. These are simply the better-known leaders of a movement that has attracted thousands of commoners who are building legally defensible commons into which to pour their creative energies and live their lives. +={free software;commoners:concept of+5;commons:concept of+1} + +The commons — a hazy concept to many people — is a new paradigm for creating value and organizing a community of shared interest. It is a vehicle by which new sorts of self-organized publics can gather together and exercise new types of citizenship. The commons can even serve as a viable alternative to markets that have grown stodgy, manipulative, and coercive. A commons arises whenever a given community decides that it wishes to manage a resource in a collective manner, with special regard for equitable access, use, and sustainability. The commons is a means by which individuals can band together with like-minded souls and express a sovereignty of their own. + +Self-styled commoners can now be found in dozens of nations around the world. They are locally rooted but internationally aware citizens of the Internet. They don’t just tolerate diversity (ethnic, cultural, aesthetic, intellectual), they celebrate it. Although commoners may have their personal affinities — free software, open-access publishing, remix music, or countless others — they tend to see themselves as part of a larger movement. They share an enthusiasm for innovation and change that burbles up from the bottom, and are known to roll their eyes at the thick-headedness of the mainstream media, which always seem to be a few steps behind. +={free software} + +If there is an element of self-congratulatory elitism at times, it stems from the freedom of commoners to negotiate their own rules and the pleasure of outmaneuvering conventional institutions. The commoners know how to plug into the specialized Web sites and practitioner communities that can provide just-in-time, highly specialized expertise. As Herbert Simon, the computer-oriented social scientist, once put it, “The meaning of ‘knowing’ today has shifted from being able to remember and repeat information to being able to find and use it.” ~{Cited by John Seely Brown, former chief scientist, Xerox Palo Alto Research Center, at Open Educational Resources conference, Houston, Texas, March 29, 2007.}~ Commoners realize that this other way of being, outside hierarchical institutions, in the open space where viral spirals of innovation are free to materialize, is an important source of their insurgent power. +={Herbert, Simon} + +It is perilous to generalize about a movement that has so many disparate parts pushing and pulling and innovating in so many different directions at once. Yet it is safe to say that the commoners— a digital embodiment of /{e pluribus unum}/ — share a common goal. They wish to transcend the limitations of copyright law in order to build their own online communities. It’s not as if the commoners are necessarily hostile to copyright law, markets, or centralized institutions. Indeed, many of them work for large corporations and universities; many rely on copyright to earn a livelihood; many are entrepreneurs. +={commoners:goal} + +Yet the people who are inventing new commons have some deeper aspirations and allegiances. They glimpse the liberating potential of the Internet, and they worry about the totalizing inclinations of large corporations and the state, especially their tendency to standardize and coerce behavior. They object as well to processes that are not transparent. They dislike the impediments to direct access and participation, the limitations of credentialed expertise and arbitrary curbs on people’s freedom. + +One of the first major gatherings of international commoners occurred in June 2006, when several hundred people from fifty nations converged on Rio de Janeiro, Brazil, for the iCommons Summit. The people of this multinational, eclectic vanguard blend the sophistication of the establishment in matters of power and politics with the bravado and playfulness of Beat poets. There were indie musicians who can deconstruct the terms of a record company licensing agreement with Talmudic precision. There were Web designers who understand the political implications of arcane rules made by the World Wide Web Consortium, a technical standards body. The lawyers and law professors who discourse about Section 114 of the Copyright Act are likely to groove on the remix career of Danger Mouse and the appropriationist antics of Negativland, a sound-collage band. James Boyle and Jennifer Jenkins, two law scholars at Duke Law School, even published a superhero comic book, /{Down by Law!}/, which demystifies the vagaries of the “fair use doctrine” through a filmmaker character resembling video game heroine Lara Croft.~{Keith Aoki, James Boyle, Jennifer Jenkins, /{Down by Law!}/ at http://www .duke.edu/cspd/comics.}~ (Fair use is a provision of copyright law that makes it legal to excerpt portions of a copyrighted work for noncommercial, educational, and personal purposes.) +={commoners:gatherings of} + +2~ The Rise of Socially Created Value +={socially created value+5} + +The salience of electronic commerce has, at times, obscured an important fact — that the commons is one of the most potent forces driving innovation in our time. Individuals working with one another via social networks are a growing force in our economy and society. This phenomenon has many manifestations, and goes by many names — “peer production,” “social production,” “smart mobs,” the “wisdom of crowds,” “crowdsourcing,” and “the commons.”~{“Social production” and “peer production” are associated with the work of Yale law professor Yochai Benkler, especially in his 2006 book, /{The Wealth of Networks}/. “Smart mobs” is a coinage of Howard Rheingold, author of a 2003 book by the same name.“Crowdsourcing” is the name of a blog run by Jeff Howe and the title of a June 2006 /{Wired}/ article on the topic.“Wisdom of crowds” is a term coined by James Surowiecki and used as the title of his 2004 book.}~ The basic point is that /{socially created value}/ is increasingly competing with conventional markets, as GNU/Linux has famously shown. Through an open, accessible commons, one can efficiently tap into the “wisdom of the crowd,” nurture experimentation, accelerate innovation, and foster new forms of democratic practice. +={commons:sources of new ideas, as+1} + +This is why so many ordinary people — without necessarily having degrees, institutional affiliations, or wealth — are embarking upon projects that, in big and small ways, are building a new order of culture and commerce. It is an emerging universe of economic, social, and cultural activity animated by self-directed amateurs, citizens, artists, entrepreneurs, and irregulars. + +Hugh McGuire, a Montreal-based writer and Web designer, is one. In 2005, he started LibriVox, a digital library of free public-domain audio books that are read and recorded by volunteers. More than ten thousand people a day visit the Web site to download audio files of Twain, Kafka, Shakespeare, Dostoyevsky, and others, in nearly a dozen languages.~{ http://www.librivox.org. }~ The Faulkes Telescope Project in Australia lets high school students connect with other students, and with professional astronomers, to scan the skies with robotic, online telescopes.~{ http://faulkes-telescope.com. }~ In a similar type of learning commons, the Bugscope project in the United States enables students to operate a scanning electronic microscope in real time, using a simple Web browser on a classroom computer connected to the Internet.~{ http://bugscope.beckman.uiuc.edu. }~ +={Bugscope;LibriVox;McGuire, Hugh;Faulkes Telescope Project} + +Thousands of individual authors, musicians, and filmmakers are using Web tools and Creative Commons licenses to transform markets for creative works — or, more accurately, to blend the market and commons into integrated hybrids. A nonprofit humanitarian group dedicated to doing reconstructive surgery for children in poor countries, Interplast, produced an Oscar-winning film, /{A Story of Healing}/, in 1997. Ten years later, it released the film under a Creative Commons license as a way to publicize Interplast’s work while retaining ownership of the film: a benefit for both film buffs and Interplast.~{ http://www.interplast.org and http://creativecommons.org/press-releases/ 2007/04/%E2%80%9Ca-story-of-healing%E2%80%9D-becomes-first-acad emy-award%C2%AE-winning-film-released-under-a-creative-commons-li cense. }~ +={Interplast} + +Scoopt, a Glasgow, Scotland–based photography agency, acts as a broker to help bloggers and amateurs sell newsworthy photos and videos to the commercial media.~{ http://www.scoopt.com. }~ The Boston band Two Ton Shoe released its music on the Web for free to market its concerts. Out of the blue, a South Korean record label called one day to say it loved the band and could it come over to Seoul, all expenses paid, to perform four concerts? Each one sold out.~{ http://www.twotonshoe.com/news.html. }~ Boing Boing blogger and cyber-activist Cory Doctorow released his 2003 science-fiction novel, /{Down and Out in the Magic Kingdom}/, under a CC license, reaping a whirlwind of worldwide exposure.~{ See Doctorow’s preface to the second release of the book, February 12, 2004, Tor Books. See also his blog Craphound.com, September 9, 2006, at http:// www.craphound.com/?=p=1681. }~ +={Doctorow, Cory;Scoopt} + +2~ The Commoners Build a Digital Republic of Their Own +={commons: achievement of+11} + +The profusion of commons on the Internet may appear to be a spontaneous and natural development. In fact, it is a hard-won achievement. An infrastructure of software, legal rights, practical expertise, and social ethics had to be imagined, built, and defended. In a sense, the commoners had to invent themselves as commoners. They had to learn to recognize their own distinct interests — in how to control their creative works, how to organize their communities, and how to engage with market players without being co-opted. They have, in fact, invented a new sort of democratic polity within the edifice of the conventional nation-state. + +The commoners differ from most of their corporate brethren in their enthusiasm for sharing. They prefer to freely distribute their writing, music, and videos. As a general rule, they don’t like to encase their work in airtight bubbles of property rights reinforced by technological locks. They envision cyberspace more as a peaceable, sociable kingdom than as a take-no-prisoners market. They honor the individual while respecting community norms. They are enthusiastic about sharing while respecting the utility of markets. Idealistic yet pragmatic, they share a commitment to open platforms, social cooperation, and elemental human freedoms. +={commoners:sharing by+1} + +It is all very well to spout such lofty goals. But how to actualize them? That is the story that the following pages recount. It has been the work of a generation, some visionary leaders, and countless individuals to articulate a loosely shared vision, build the infrastructure, and develop the social practices and norms. This project has not been animated by a grand political ideology, but rather is the result of countless initiatives, grand and incremental, of an extended global family of hackers, lawyers, bloggers, artists, and other supporters of free culture. +={commons:political implications of+3} + +And yet, despite its focus on culture and its aversion to conventional politics, the growth of this movement is starting to have political implications. In an influential 2003 essay, James F. Moore announced the arrival of “an emerging second superpower.”~{ James F. Moore, “The Second Superpower Rears its Beautiful Head,” March 31, 2003, available at http://cyber.law.harvard.edu/people/jmoore/second superpower.html. }~ It was not a nation, but the coalescence of people from around the world who were asserting common values, and forming new public identities, via online networks. The people of this emerging “superpower,” Moore said, are concerned with improving the environment, public health, human rights, and social development. He cited as early examples the international campaign to ban land mines and the Seattle protests against the World Trade Organization in 1999. The power and legitimacy of this “second superpower” do not derive from the constitutional framework of a nation-state, but from its ability to capture and project people’s everyday feelings, social values, and creativity onto the world stage. Never in history has the individual had such cheap, unfettered access to global audiences, big and small. +={Moore, James} + +The awakening superpower described in /{Viral Spiral}/ is not a conventional political or ideological movement that focuses on legislation and a clutch of “issues.” While commoners do not dismiss these activities as unimportant, most are focused on the freedom of their peer communities to create, communicate, and share. When defending these freedoms requires wading into conventional politics and law, they are prepared to go there. But otherwise, the commoners are more intent on building a kind of parallel social order, inscribed within the regnant political economy but animated by their own values. Even now, the political/cultural sensibilities of this order are only vaguely understood by governments, politicians, and corporate leaders. The idea of “freedom without anarchy, control without government, consensus without power” — as Lawrence Lessig put it in 1999~{ Lawrence Lessig, /{Code and Other Laws of Cyberspace}/ (New York: Basic Books, 1999), p. 4. }~ —is just too counterintuitive for the conventionally minded to take seriously. +={commoners:sharing by+1;Lessig, Lawrence+2} + +Very early on, the commoners identified copyright law as a major impediment to their vision of a “sharing economy.” It is not that they revile copyright law as such; indeed, many commoners defend the importance of copyright law to creative endeavor. The problem, they insist, is that large corporations with vast inventories of copyrighted works — film studios, record labels, book publishers, software companies — have used their political power unfairly to extend the scope and term of copyright privileges. A limited monopoly granted by the U.S. Constitution has morphed into an expansive, near-perpetual monopoly, enforced by intrusive technologies and draconian penalties. +={copyright law:sharing economy vs.+3} + +The resulting curbs on citizen freedom, as large entertainment and media corporations gain legal privileges at the expense of the public, is a complicated issue that I return to in chapter 2. But it is worth noting briefly why copyright law has been particularly harmful to the commons in the digital age. When Congress enacted a major revision of U.S. copyright law in 1976, it eliminated a longstanding requirement that works had to be formally registered in order to receive copyright protection.~{ The effect of the elimination of formal registration in copyright law is cogently discussed by Lessig in /{Free Culture}/ (New York: Penguin, 2004), pp. 170–73, and pp. 248–53. }~ Under the new law, /{everything}/ became automatically copyrighted upon creation. This meant that all information and artistic work created after 1978 (when the law took effect) has been born into an invisible envelope of property rights. It sounds appealing to eliminate bureaucratic formalities like registration. But the shift to automatic copyright has meant that every digital scribble is born with a © branded on its side. /{Culture = private property}/. +={Copyright Act (1976);copyright law:automatic+1|revision of (1976)+1|and property rights+1;property rights, and copyright law} + +The various industries that rely on copyrights have welcomed this development because it helps them portray their ownership rights as all-encompassing. They can cast the public’s right to use works without permission or payment — traditionally guaranteed under the fair use doctrine and the public domain — as exceptions to the general rule of absolute property rights. “What could be wrong with enclosing works in ever-stronger packages of property rights?” the music and film industries argue. “That’s how new economic wealth is created.” The media oligopolies that control most of television, film, music, and news gathering naturally want to protect their commercial content. It is the fruit of a vast system of fixed investment — equipment, high-priced stars, lawyers, distribution channels, advertising, etc. — and copyright law is an important tool for protecting that value. + +The Internet has profoundly disrupted this model of market production, however. The Internet is a distributed media system of low-cost capital (your personal computer) strung together with inexpensive transmission and software. Instead of being run by a centralized corporation that relies upon professionals and experts above all else, the Internet is a noncommercial infrastructure that empowers amateurs, citizens, and ordinary individuals in all their quirky, authentic variety. The mass media have long regarded people as a commodifiable audience to be sold to advertisers in tidy demographic units. +={Internet:empowerment by+2} + +Now, thanks to the Internet, “the people formerly known as the audience” (in Jay Rosen’s wonderful phrase) are morphing into a differentiated organism of flesh-and-blood, idiosyncratic individuals, as if awakening from a spell. Newly empowered to speak as they wish, in their own distinctive, personal voices to a global public of whoever cares to listen, people are creating their own transnational tribes. They are reclaiming culture from the tyranny of mass-media economics and national boundaries. In Lessig’s words, Internet users are overthrowing the “read only” culture that characterized the “weirdly totalitarian” communications of the twentieth century. In its place they are installing the “read/write” culture that invites everyone to be a creator, as well as a consumer and sharer, of culture.~{ Lawrence Lessig, “The Read-Write Society,” delivered at the Wizards of OS4 conference in Berlin, Germany, on September 5, 2006. Available at http:// www.wizards-of-os.org/programm/panels/authorship_amp_culture/keynote _the_read_write_society/the_read_write_society.html. }~ A new online citizenry is arising, one that regards its socially negotiated rules and norms as at least as legitimate as those established by conventional law. +={Rosen, Jay} + +Two profoundly incommensurate media systems are locked in a struggle for survival or supremacy, depending upon your perspective or, perhaps, mutual accommodation. For the moment, we live in a confusing interregnum — a transition that pits the dwindling power and often desperate strategies of Centralized Media against the callow, experimental vigor of Internet-based media. This much is clear, however: a world organized around centralized control, strict intellectual property rights, and hierarchies of credentialed experts is under siege. A radically different order of society based on open access, decentralized creativity, collaborative intelligence, and cheap and easy sharing is ascendant. Or to put it more precisely, we are stumbling into a strange hybrid order that combines both worlds — mass media and online networks — on terms that have yet to be negotiated. + +2~ The Rise of the Commoners +={commoners:rise of+21} + +But who shall do the negotiating? Who will set forth a compelling alternative to centralized media, and build it? That task has fallen to a loosely coordinated global federation of digital tribes — the free software and open-source hackers, the Wikipedians, the bloggers and citizen-journalists, the remix musicians and filmmakers, the avant-garde artists and political dissidents, the educators and scientists, and many others. It is a spontaneous folk-tech conspiracy that belongs to everyone and no one. + +As we will see in chapter 1, Richard Stallman, the legendary hacker, played an indispensable first-mover role by creating a sovereign domain from which to negotiate with commercial players: free software. The software commons and later digital commons inspired by it owe an incalculable debt to Stallman’s ingenious legal innovation, the General Public License, or GPL, launched in 1989. The GPL is a license for authorizing anyone to use a copyrighted software program so long as any copies or derivative versions are also made available on the same terms. This fairly simple license enables programmers to contribute code to a common pool without fear that someone might privatize and destroy the commons. +={General Public License (GPL)} + +As the computer revolution continued through the 1980s and the Internet went wide in the 1990s, the antisocial, antidemocratic implications of copyright law in networked spaces became more evident. As we will see in chapter 2, a growing community of progressive legal scholars blew the whistle on some nasty developments in copyright law that were shrinking the public’s fair use rights and the public domain. Scholars such as James Boyle, Pamela Samuelson, Jessica Litman, Yochai Benkler, Lawrence Lessig, Jonathan Zittrain, and Peter Jaszi provided invaluable legal analyses about the imperiled democratic polity of cyberspace. +={Lessig, Lawrence+2} + +By the late 1990s, this legal scholarship was in full flower, Internet usage was soaring, and the free software movement produced its first significant free operating system, GNU/Linux. The commoners were ready to take practical action. Lessig, then a professor at Harvard Law School, engineered a major constitutional test case, /{Eldred v. Reno}/ (later /{Eldred v. Ashcroft}/), to try to strike down a twentyyear extension of copyright terms — a case that reached the U.S. Supreme Court in 2002. At the same time, Lessig and a number of his colleagues, including MIT computer scientist Hal Abelson, Duke law professor James Boyle, and Villanova law professor Michael W. Carroll, came together to explore innovative ways to protect the public domain. It was a rare moment in history in which an ad hoc salon of brilliant, civic-minded thinkers from diverse fields of endeavor found one another, gave themselves the freedom to dream big thoughts, and embarked upon practical plans to make them real. +={GNU/Linux;Linux:see also GNU/Linux;Eldred v. Reno/Eldred v. Ashcroft} + +The immediate upshot of their legal and techno ingenuity, as we will see in chapters 3 and 4, was the drafting of the Creative Commons licenses and the organization that would promote them. The purpose of these free, standardized public licenses was, and is, to get beyond the binary choice imposed by copyright law. Why must a work be considered either a chunk of privately owned property or a kind of nonproperty completely open to anyone without constraint (“in the public domain”)? The CC licenses overcome this stifling either/or logic by articulating a new middle ground of ownership that sanctions sharing and collaboration under specified terms. To stress its difference from copyright law, which declares “All Rights Reserved,” the Creative Commons licenses bear the tagline “Some Rights Reserved.” +={Creative Commons (CC) licenses+2|copyright law, and+2;opyright law:CC licenses+2} + +Like free software, the CC licenses paradoxically rely upon copyright law to legally protect the commons. The licenses use the rights of ownership granted by copyright law not to exclude others, but to invite them to share. The licenses recognize authors’ interests in owning and controlling their work — but they also recognize that new creativity owes many social and intergenerational debts. Creativity is not something that emanates solely from the mind of the “romantic author,” as copyright mythology has it; it also derives from artistic communities and previous generations of authors and artists. The CC licenses provide a legal means to allow works to circulate so that people can create something new. /{Share, reuse, and remix, legally}/, as Creative Commons puts it. + +After the licenses were introduced in December 2002, they proliferated throughout the Internet and dozens of nations as if by spontaneous combustion. It turns out that the licenses have been more than a legal fix for the limitations of copyright law. They are a powerful form of social signaling. The licenses have proven to be a flag for commoners to advertise their identities as members of a culturally insurgent sharing economy — an aesthetic/political underground, one might say. Attaching the CC logo to one’s blog, video, MP3 file, or laptop case became a way to proclaim one’s support for free culture. Suddenly, all sorts of participatory projects could be seen as elements of a larger movement. By 2007, authors had applied one or more of six CC licenses to 90 million works, by one conservative estimate, or more than 220 million works by another estimate. Collectively, CC-licensed works constitute a class of cultural works that are “born free” to be legally shared and reused with few impediments. + +A great deal of the Creative Commons story revolves around its founder, the cerebral yet passionate Larry Lessig, a constitutional law professor at Harvard in the mid-1990s until a move to Stanford Law School in 2000. As a scholar with a sophisticated grasp of digital technologies, Lessig was one of the first to recognize that as computers became the infrastructure for society, software code was acquiring the force of law. His 1999 classic, /{Code and Other Laws of Cyberspace}/, is renowned for offering a deep theoretical framework for understanding how politics, law, technology, and social norms shape the character of cyberspace — and in turn, any society. +={Lessig, Lawrence:Code and Other Laws of Cyberspace} + +In popularizing this message, it didn’t hurt that Lessig, an experienced classroom lecturer, is a poised and spellbinding performer. On the tech and copyright circuit, in fact, he has become something of a rock star. With his expansive forehead and wire glasses, Lessig looks every bit the professor he is. Yet in his signature black jeans and sport jacket, delivering punchy one-liners punctuated by arresting visuals projected on a big screen behind him, Lessig makes a powerful impression. He’s a geek-chic techie, intellectual, legal activist, and showman all rolled into one. + +From the beginning, Lessig and his colleagues wondered, How far can the sharing ethic be engineered? Just how far can the idea of free culture extend? As it turns out, quite far. At first, of course, the free culture project was applied mostly to Web-based text and music. But as we see in chapters 5 through 12, the technologies and ethic of free culture have rapidly taken root in many creative sectors of society — video, music, books, science, education — and even business and international arts and culture. + +!{/{Remix culture.}/}! Thanks to digital technologies, musicians can sample verbatim snippets of other musicians’ work in their own works, producing “remixes” that blend sounds from a number of copyrighted songs. It’s all patently illegal, of course, unless you’re wealthy enough to pay for the rights to use a sample. But that hasn’t stopped artists. +={music:remixes+2;remix works+2} + +In fact, the underground remix scene has become so robust that even established artists feel obliged to engage with it to bolster their street cred. With a wink and a nudge from record labels, major rap stars like Jay-Z and Eminem have released instrumental tracks of their records in the hope and expectation that remix /{auteurs}/ will recycle the tracks. Record labels have quietly relied on mixtapes— personalized compilations of tracks — to gain exposure and credibility.~{ See, e.g., Joanna Demers, /{Steal This Music: How Intellectual Property Law Affects Musical Creativity}/ (Athens: University of Georgia Press, 2006); Kelefa Sanneh, “Mixtapes Mix in Marketing,” New York Times, July 20, 2006. }~ To help an illegal social art go legit, many artists are using Creative Commons licenses and public-domain sound clips to build a legal body of remix works. + +In the video world, too, the remix impulse has found expression in its own form of derivative creativity, the mashup. From underground remakes of /{Star Wars}/ films to parodies of celebrities, citizenamateurs are taking original video clips and mixing them with other images, pop music tracks, and their own narrations. When Alaska senator Ted Stevens compared the Internet to a “series of tubes,” video clips of his rambling speech were mashed up and set to a techno dance beat. Beyond this playful subculture, serious filmmakers are using CC licenses on their works to develop innovative distribution systems that attract large audiences and earn money. Machinima animations — a filmmaking technique that uses computer game action sequences, shot with in-game cameras and then edited together — are pioneering a new market niche, in part through their free distribution under a CC license. +={Machinima animations;Stevens, Ted} + +!{/{Open business.}/}! One of the most surprising recent developments has been the rise of “open business” models. Unlike traditional businesses that depend upon proprietary technology or content, a new breed of businesses see lucrative opportunities in exploiting open, participatory networks. The pioneer in this strategy was IBM, which in 2000 embraced GNU/Linux, the open-source computer operating system, as the centerpiece of its service and consulting business.~{ Steve Lohr, “IBM to Give Free Access to 500 Patents, /{New York Times}/, July 11, 2005. See also Steven Weber, /{The Success of Open Source Software}/ (Cambridge, Mass.: Harvard University Press, 2004), pp. 202–3. See also Pamela Samuelson, “IBM’s Pragmatic Embrace of Open Source,” /{Communications of the ACM}/ 49, no. 21 (October 2006). }~ Dozens of small, Internet-based companies are now exploiting open networks to build more flexible, sustainable enterprises. +={GNU/Linux:IBM, and;IBM:GNU/Linux, and;open business models+1} + +The key insight about many open-platform businesses is that they no longer look to copyright or patent law as tools to assert market control. Their goal is not to exclude others, but to amass large communities. Open businesses understand that exclusive property rights can stifle the value creation that comes with mass participation, and so they strive to find ways to “honor the commons” while making money in socially acceptable forms of advertising, subscriptions, or consulting services. The brave new economics of “peer production” is enabling forward-thinking businesses to use social collaboration among thousands, or even millions, of people to create social communities that are the foundation for significant profits. /{BusinessWeek}/ heralded this development in a major cover story in 2005, “The Power of Us,” and called sharing “the net’s next disruption.”~{ Robert D. Hof, “The Power of Us: Mass Collaboration on the Internet Is Shaking Up Business,” /{BusinessWeek}/, June 20, 2005, pp. 73–82. }~ + +!{/{Science}/}! as a commons. The world of scientific research has long depended on open sharing and collaboration. But increasingly, copyrights, patents, and university rules are limiting the flow of scientific knowledge. The resulting gridlock of rights in knowledge is impeding new discoveries and innovation. Because of copyright restrictions and software incompatibilities, scientists studying genetics, proteins, and marine biology often cannot access databases containing vital research. Or they cannot easily share physical samples of lab samples. When the maker of Golden Rice, a vitamin-enhanced bioengineered rice, tried to distribute its seeds to millions of people in poor countries, it first had to get permissions from seventy patent holders and obtain six Material Transfer Agreements (which govern the sharing of biomedical research substances).~{ Interview with John Wilbanks, “Science Commons Makes Sharing Easier,” /{Open Access Now}/, December 20, 2004, available at http://www.biomedcen tral.com/openaccess/archive/?page=features&issue=23.}~ +={Wilbanks, John+1;Science Commons:CC Commons spinoff, and+1} + +The problem of acquiring, organizing, and sharing scientific knowledge is becoming more acute, paradoxically enough, as more scientific disciplines become dependent on computers and the networked sharing of data. To help deal with some of these issues, the Creative Commons in 2005 launched a new project known as the Science Commons to try to redesign the information infrastructure for scientific research. The basic idea is to “break down barriers to sharing that are hindering innovation in the sciences,” says John Wilbanks, executive director of Science Commons. Working with the National Academy of Sciences and other research bodies, Wilbanks is collaborating with astronomers, archaeologists, microbiologists, and medical researchers to develop better ways to make vast scientific literatures more computer-friendly, and databases technically compatible, so that they can be searched, organized, and used more effectively. + +!{/{Open education and learning.}/}! A new class of knowledge commons is poised to join free and open-source software, the Creative Commons and Wikipedia as a coherent social movement. The new groundswell goes by the awkward name “Open Educational Resources,” or OER.~{ See, e.g., Daniel E. Atkins, John Seely Brown, and Allen L. Hammond, “A Review of the Open Educational Resources (OER) Movement: Achievements, Challenges and New Opportunities,” February 2007, available at http://www .oerderves.org/?p=23.}~ One of the earlier pioneers of the movement was the Massachusetts Institute of Technology which has put virtually all of its course materials on the Web, for free, through its OpenCourseWare initiative. The practice has now spread to scores of colleges and universities around the world, and inspired a broader set of OER initiatives: digital repositories for articles, reports, and data; open-access scholarly journals that bypass expensive commercial publishers; and collaborative Web sites for developing teaching materials. There are wikis for students and scholars working together, sites to share multimedia presentations, and much more. +={education:OER movement+1;pen Educational Resources (OER) movement+1;Wikipedia:social movement, as+1;Creative Commons (CC):social movement, as+1} + +The OER movement has particular importance for people who want to learn but don’t have the money or resources — scholars in developing countries, students struggling to pay for their educations, people in remote or rural locations, people with specialized learning needs. OER is based on the proposition that it will not only be cheaper or perhaps free if teachers and students can share their materials through the Web, it will also enable more effective types of learning. So the OER movement is dedicated to making learning tools cheaper and more accessible. The revolutionary idea behind OER is to transform traditional education — teachers imparting information to passive students — into a more learnerdriven process facilitated by teachers. Self-directed, socially driven learning supplants formal, hierarchical modes of teaching. + +!{/{The international sharing economy.}/}! Shortly after the first CC licenses were released in 2002, dozens of exceptionally capable volunteers — from Japan, Finland, Brazil, South Africa, and other countries — came knocking on the door of CC. How can we adapt the American CC licenses to our respective national legal systems? they asked. This unexpected turn prompted the Creative Commons to inaugurate Creative Commons International, based in Berlin, Germany, to supervise the complicated task of “porting” the U.S. licenses to other legal jurisdictions. To date, CC affiliates in fortyseven nations have adapted the U.S. licenses to their legal systems, and another seventeen have porting projects under way. +={Creative Commons International+1} + +The volunteers include avant-garde artists in Croatia, free software programmers in the Netherlands, South Korean judges, Italian law professors, South African musicians, Malaysian citizenjournalists, Bulgarian filmmakers, and Taiwanese songwriters. The passionate international licensing movement has even been embraced by the Brazilian government, which has proclaimed itself the first Free Culture Nation. As usage of the licenses spreads, they are effectively becoming the default international legal structure of the sharing economy. + +2~ A New Type of Emergent Democracy? + +Peter Suber, a leading champion of open-access scholarly publishing, once explained to me why a disparate, rambunctious crowd of commoners spread around the globe might wish to work together to do something about their plight. “People are taking back their culture,” Peter said. “People who have not been served by the current law have quietly endured it until they saw that they didn’t have to.”~{ Interview with Peter Suber, June 28, 2006. }~ The Creative Commons has become both a symbol and a tool for people to reclaim creativity and culture from the mass-media leviathans. The licenses and the organization have become instruments to advance a participatory, sharing economy and culture. +={Suber, Peter} + +How far can it go? Will it significantly affect conventional politics and government? Can it bring market forces and social needs into a more positive alignment? + +This book is about the struggle to imagine this new world and push it as far as it can go. It is, in one sense, a history, but “history” suggests that the story is over and done. The truth is that the commons movement is tremendously robust and expansive right now. The early history about free software, the public domain, and the Creative Commons is simply a necessary foundation for understanding the propulsive logic of what is happening. + +The story told in these pages is not entirely new; it has been told in fragments and through the restless lens of journalism. But it has not been told in its larger conceptual and historical sweep. That’s partly because most of its players are usually seen in isolation from one another, and not put in the context of the larger open-platform revolution. It’s also because the free culture movement, nothwithstanding its vigor, is generally eclipsed by the bigmoney corporate developments that are ostensibly more important. But that is precisely the problem: conventional economics does not understand the actual significance of open platforms and the commons. We need to understand what the online commons represent: a powerful sociotechnological paradigm that is reordering some basic dynamics of creative practice, culture, politics, and everyday life. + +I am no bystander in this story, it must be said, but a commoner who has grappled with the quandaries of copyright law and the public domain for nearly twenty years. In 2001, after co-founding Public Knowledge, a Washington advocacy group to defend the public’s stake in copyright and Internet policies, I went on to write books on the market enclosure of myriad commons and on the absurd expansions of copyright and trademark law. Over the course of this work, I discovered how a commons analysis can help us understand the digital revolution. It can help us see that it is not just about technological innovation, but about social and legal innovations. Reading Elinor Ostrom and Yochai Benkler, in particular — two leading theorists of the commons — I came to realize that social communities, and not just markets, must be recognized as powerful vehicles for creating value. I realized that many basic assumptions about property rights, as embedded in copyright law and neoclassical economics, fail to take account of the generative power of online communities. +={Public Knowledge} + +How then shall we create the commons and protect it? That question lies at the core of this book and the history of the commoners in cyberspace. I am mostly interested in exploring how the Creative Commons has galvanized a variety of interrelated crusades to build a digital republic of, by, and for the commoners. One reason why a small licensing project has grown into a powerful global brand is that, at a time of mass-media dominance and political stalemate, free culture offers an idealistic alternative vision. Something you can do. A movement in which everyone can play some useful role. The free culture movement stands for reclaiming culture by making it yourself and for reviving democracy by starting in your own digital backyard. CC stands for personal authenticity and diversity in a world of stale, mass-marketed product. It stands for good fun and the joys of sharing. + +Put the CC logo on your blog or music CD or video, and you too can belong to a movement that slyly sticks it to Big Media without getting into an ugly brawl. Don’t get mad, the CC community seems to whisper. Just affiliate with a growing virtual nation of creative renegades. Transcend a rigged game by migrating to a commons of your own making. Build therefore your own world, in the manner of Henry David Thoreau — then imagine its embrace by many others. Imagine it radiating into conventional politics with a refreshing ethic of open accountability and earned rewards, a contempt for coercive business practices and governmental abuses, and an insistence upon transparency, participation, and the consent of the governed. You may be an entrepreneur who just wants to build a profitable business, or a scientist who just wants to find better ways to research Huntington’s disease. The commons has some solutions in these areas, too. This big-tent movement is unabashedly ecumenical. + +This is the vision now exploding around the world anyway. The recurring question in its earliest days, and now, remains — How can we build it out? /{Can}/ it be built out? And how far? For the commoners, just asking the question is halfway to answering it. + +:B~ PART I + +:C~ Harbingers of the Sharing Economy + +1~intro_i [Intro] -# + +The rise of the sharing economy had its roots among the renegades living on the periphery of mainstream culture. At the time, they were largely invisible to one another. They had few ways of making common cause and no shared language for even naming the forces that troubled them. It was the 1990s, after all, a time of alluring mercantile fantasies about the limitless possibilities of the laissez-faire “information superhighway.” Even for those who could pierce the mystifications, the new technologies were so new, powerful, and perplexing that it was difficult to understand their full implications. + +The renegades, while sharing a vision of technological progress, were disturbed by many on-the-ground realities. A small network of hackers, for example, was enraged to learn that software was becoming a closed, proprietary product. Companies could prohibit interested individuals from tinkering with their own, legally purchased software. On both creative and political grounds, this development was odious to Richard Stallman, a brilliant programmer who soon hatched a dream of building a protected kingdom of “free software,” the subject of chapter 1. +={software:proprietary;Stallman, Richard} + +Meanwhile, a loose community of legal scholars and tech activists was becoming alarmed by the antisocial, anti-democratic tendencies of copyright law and digital technology. Scholars such as Lawrence Lessig, James Boyle, and Hal Abelson began to realize that copyright law and software code were acquiring unsuspected powers to redesign our political and social order. They also began to understand the ways in which the public domain is not a wasteland, as conventional minds had long supposed, but a highly generative zone of culture. This intellectual journey is described in chapter 2. +={Abelson, Hal;Boyle, James;Lessig, Lawrence+1} + +Finally, it was becoming painfully apparent to yet another amorphous band of renegades — artists, musicians, writers, scientists, educators, citizens — that copyright law and technological controls were artificially restricting their creative freedoms. With scant public attention, the music, film, and publishing industries were using their clout to protect their archaic business models at the expense of innovation and the commons. This onslaught ultimately provoked one exemplary commoner, Eric Eldred, to team up with legal scholar Lawrence Lessig to mount an unprecedented constitutional challenge to copyright law, the focus of chapter 3. + +None of these surges of innovative dissent was well funded or particularly promising. For the most part, they were improvisational experiments undertaken by public-spirited individuals determined to vindicate their visions for a better society. With the benefit of hindsight, we can now see that while many of these initiatives were only partially successful, each was indispensable to the larger, later task of imagining and building a digital republic to secure basic human freedoms, the subject of Part II. + +1~ 1 IN THE BEGINNING WAS FREE SOFTWARE +={Stallman, Richard+34;General Public License (GPL)+1} + +/{Richard Stallman's mythic struggle to protect the commons of code set the viral spiral in motion.}/ + +The struggle to imagine and invent the software commons, which later set in motion a viral spiral now known as free culture, began with Richard Stallman, a brilliant, eccentric MIT computer programmer. Stallman’s history as a hacker and legal innovator has by now become the stuff of legend. As one of the first people to confront the deep tensions between proprietary control and the public domain in software development, Stallman has achieved that rare pinnacle in the high-tech world, the status of celebrity geek. Besides his programming prowess, he is renowned for devising the GNU General Public License, more commonly known as the GPL, an ingenious legal mechanism to protect shared software code. +={free culture+33} + +Stallman — or RMS, as he likes to be called — has become an iconic figure in the history of free culture in part because he showed courageous leadership in protecting the commons well before anyone else realized that there was even a serious problem. He was a lone voice in the wilderness for at least ten years before the Internet became a mass medium, and so has earned enormous credibility as a leader on matters of free culture. He has also been reviled by some as an autocratic zealot with bad manners and strident rhetoric. + +It is perhaps fitting that Stallman could be mistaken for an Old Testament prophet. He is a shaggy, intense, and fiercely stubborn guy. On his Web site, visitors can find a gag photo of him posed as Saint IGNUcius, with his hand raised in mock genuflection and his head encircled by a gold aureole (held in place by two admiring acoyltes). He has been known to deliver lectures barefoot, sleep on the couch in a borrowed office for weeks at a time, and excoriate admirers for using taboo phrases like “intellectual property” and “copyright protection.” Stallman explains that “intellectual property” incorrectly conflates three distinct bodies of law — copyright, patent, and trademark — and emphasizes individual property rights over public rights. “Copyright protection” is misleading, he says, because it implies a positive, necessary act of /{defending}/ something rather than an acquisitive, aggressive act of a monopolist. Stallman considers /{content}/ to be a disparaging word, better replaced by “works of authorship.” He has even made a list of fourteen words that he urges people to avoid because of their politically misleading valences.~{ Joshua Gray, editor, /{Free Software Free Society: Selected Essays of Richard M. Stallman}/ (Boston: GNU Press, 2002), pp. 190–91. }~ +={authorship;intellectual property} + +Even though Stallman frequently speaks to august academic and scientific gatherings, and meets with the heads of state in developing countries, he resembles a defiant hippie, Yet for his visionary role in developing free software and the free software philosophy, Stallman is treated as if he were a head of state . . . which, in a way, he is. His story has irresistible mythological resonances — the hero’s journey through hardship and scorn, later vindicated by triumph and acclaim. But for many, including his most ardent admirers, Stallman’s stubborn idealism can also be supremely maddening. +={free software} + +His first encounter with the creeping ethic of proprietary control, in the late 1970s, is an oft-told part of his story. The Xerox Corporation had donated an experimental laser printer to the MIT Artificial Intelligence Lab, where Stallman was then a graduate student. The printer was constantly jamming, causing frustration and wasting everyone’s time. Stallman wanted to devise a software fix but he discovered that the source code was proprietary. Determined to find out who was responsible and force them to fix it, he tracked down a computer scientist at Carnegie Mellon University who had supposedly written the code — but the professor refused to help him; he had signed a nondisclosure agreement with Xerox prohibiting him from sharing the code. +={Xerox Corporation+1;software:proprietary+1} + +Stallman considered Xerox’s lockup of code a profound moral offense that violated the integrity of the hacker community. (Among practitioners, /{hacker}/ is a term of respect for an ingenious, resourceful programmer, not an accusation of criminality.) Not only did it prevent people from fixing their own equipment and software, the nondisclosure agreement flouted the Golden Rule. It prohibited sharing with one’s neighbor. The proprietary ethic was not just immoral, by Stallman’s lights, but a barrier to developing great software. +={hackers:use of term} + +By the late 1970s, he had developed a breakthrough text editor, Emacs, in collaboration with a large community of programmers. “Everybody and his brother was writing his own collection of redefined screen-editor commands, a command for everything he typically liked to do,” Stallman wrote. “People would pass them around and improve them, making them more powerful and more general. The collections of redefinitions gradually became system programs in their own right.”~{ Sam Williams, /{Free as in Freedom: Richard Stallman’s Crusade for Free Software}/ (Sebastopol, CA: O’Reilly & Associates 2002), pp. 76–88. }~ Emacs was one of the first software projects to demonstrate the feasibility of large-scale software collaboration and the deep well of innovative ideas that it could yield. Emacs enabled programmers to add new features with great ease, and to constantly upgrade and customize the program with the latest improvements. The Emacs experiment demonstrated that /{sharing}/ and /{interoperability}/ are vital principles for a flourishing online commons. +={Emacs+2;Stallman, Richard:Emacs, and+2} + +Two problems quickly emerged, however. If people did not communicate their innovations back to the group, divergent streams of incompatible code would produce a Tower of Babel effect. Second, if the code and its derivations were not shared with everyone, the usefulness of the program would slowly decline. The flow of innovation would dissipate. + +To solve these problems, Stallman invented a user contract that he called the “Emacs Commune.” It declared to all users that Emacs was “distributed on a basis of communal sharing, which means that all improvements must be given back to me to be incorporated and distributed.” He enforced the provisions of the contract with an iron hand. As Stallman biographer Sam Williams writes, when the administrators for the MIT Laboratory for Computer Science instituted a new password system — which Stallman considered an antisocial power grab — he “initiated a software ‘strike,’ refusing to send lab members the latest version of Emacs until they rejected the security system on the lab’s computers. The move did little to improve Stallman’s growing reputation as an extremist, but it got the point across: commune members were expected to speak up for basic hacker values.” +={Williams, Sam} + +Stallman was groping for a way to sustain the hacker ethic of community and sharing in the face of new types of top-down control. Some programmers were beginning to install code that would turn off access to a program unless money was paid. Others were copyrighting programs that had been developed by the community of programmers. Bill Gates, as an undergraduate at Harvard in the late 1970s, was nearly expelled for using publicly funded labs to create commercial software. He was forced to put his code into the public domain, whereupon he left the university to found an obscure Albuquerque company called Micro-Soft. +={authorship:community access;Gates, Bill;hackers:use of term+5} + +Software was simply becoming too lucrative for it to remain a shared resource — an attitude that enraged Stallman. He was determined to preserve the integrity of what we would now call the software commons. It was an immense challenge because copyright law makes no provisions for community ownership of creative work beyond “joint authorship” among named individuals. Stallman wanted to devise a way to ensure that all the talent and innovation created by commoners would /{stay}/ in the commons. The idea that an outsider — a university administrator, software entrepreneur, or large company — could intrude upon a hacker community and take its work was an appalling injustice to Stallman. +={authorship:joint;copyright law:community authorship vs.;commons:copyright law vs.} + +Yet this was precisely what was happening to the hacker community at MIT’s AI Lab in the early 1980s. It was slowly disintegrating as one programmer after another trooped off to join commercial software ventures; the software itself was becoming annexed into the marketplace. Software for personal computers, which was just then appearing on the market, was sold as a proprietary product. This meant that the source code — the deep design architecture of the program that operated everything — was inaccessible.~{ Steven Levy, /{Hackers: Heroes of the Computer Revolution}/ (New York: Delta, 1993), pp. 425, 427. }~ Perhaps most disturbing to Stallman at the time was that the leading mainframe operating system, Unix, was locking up its source code. Unix had been developed by AT&T with generous federal funding, and had been generally available for free within academic computing circles. At the time, most mainframe software was given away to encourage buyers to purchase the computer hardware. But when the Department of Justice broke up AT&T in 1984 to spur competition, it also enabled AT&T to enter other lines of business. Naturally, the company was eager to maximize its profits, so in 1985 it began to charge a licensing fee for Unix. +={AT&T;Unix;software:source code for} + +Stallman grieved at the disintegration of the hacker community at the AI Lab as closed software programs inexorably became the norm. As he wrote at the time: + +_1 The people remaining at the lab were the professors, students, and non-hacker researchers, who did not know how to maintain the system, or the hardware, or want to know. Machines began to break and never be fixed; sometimes they just got thrown out. Needed changes in software could not be made. The non-hackers reacted to this by turning to commercial systems, bringing with them fascism and license agreements. I used to wander through the lab, through the rooms so empty at night where they used to be full, and think, “Oh my poor AI lab! You are dying and I can’t save you.” + +Stallman compared himself to Ishi, “the last survivor of a dead [Native American] culture. And I don’t really belong in the world anymore. And in some ways I feel I ought to be dead.” + +Stallman decided to leave MIT — why stay? — but with a brash plan: to develop a free software operating system that would be compatible with Unix. It would be his brave, determined effort to preserve the hacker ethic. He dubbed his initiative the GNU Project, with “GNU” standing for “GNU’s Not Unix” — a recursive hacker’s pun. He also started, in 1985, the Free Software Foundation to help develop GNU software projects and distribute them for free to anyone. (The foundation now occupies a fifth-floor office on a narrow commercial street in downtown Boston.) +={Unix;free software;Free Software Foundation;GNU Project+3;Stallman, Richard:free software, and|GNU Project, and+3} + +The Emacs Commune experience had taught Stallman about the limits of informal social norms in protecting the software commons. It also revealed the difficulties of being the central coordinator of all code changes. This time, in developing a set of software programs for his GNU Project, Stallman came up with a better idea — a legally enforceable license. The goal was to ensure that people could have free access to all derivative works and share and reuse software. The licensing rights were based on the rights of ownership conferred by copyright law. +={Emacs;Stallman, Richard:Emacs, and;authorship:community access;copyright law:community authorship vs.+2|licensing rights+2} + +Stallman called his license the GNU General Public License, or GPL. He puckishly referred to it as “copyleft,” and illustrated it with a reverse copyright symbol (a backward c in a circle). Just as programmers pride themselves on coming up with ingenious hacks to solve a software problem, so the GPL is regarded as a world-class hack around copyright law. Copyright law has no provisions for protecting works developed by a large community of creators. Nor does it offer a way to prevent works from being made proprietary. Indeed, that’s the point of copyright law — to create private property rights. +={software:proprietary;copyright law:property rights, and;General Public License (GPL)+14;property rights, and copyright law;Stallman, Richard:GPL, and+14} + +The GPL bypasses these structural limitations of copyright law by carving out a new zone of collective ownership. A work licensed under the GPL permits users to run any program, copy it, modify it, and distribute it in any modified form. The only limitation is that any derivative work must also be licensed under the GPL. This provision of the GPL means that the license is /{automatically}/ applied to any derivative work, and to any derivative of a derivative, and so on — hence its viral nature.~[* Stallman told me he considers it “a common calumny to compare the GNU GPL to a virus. That is not only insulting (I have a virus infection in my throat right now and it is no fun), it is also inaccurate, because the GPL does not spread like a virus. It spreads like a spider plant: if you cut off a piece and plant it over here, it grows over here.]~ The GPL ensures that the value created by a given group of commoners shall stay within the commons. To guarantee the viral power of the license, users of GPL’d works cannot modify the licensing terms. No one has to pay to use a GPL’d work — but as a condition for using it, people are legally obliged to license any derivative versions under the GPL. In this way, a GPL’d work is born and forever protected as “shareable.” +={collective ownership;General Public License (GPL):viral nature of;software:user freedoms;Stallman, Richard:freedom, and} + +Version 1.0 of the GPL was first published in 1989. It was significant, writes Sam Williams, because it “demonstrated the intellectual similarity between legal code and software code. Implicit within the GPL’s preamble was a profound message: instead of viewing copyright law with suspicion, hackers should view it as yet another system begging to be hacked.”~{ Williams, /{Free as in Freedom}/, p. 127. }~ The GPL also served to articulate, as a matter of law, the value of collaborative work. A universe of code that might previously have been regarded as part of the “public domain” — subject to free and unrestricted access — could now be seen in a subtly different light. +={authorship:community access;Williams, Sam;public domain: GPL, and} + + +A GPL’d work is not part of the public domain, because the public domain has no rules constraining how a work may be used. Works in the public domain are open to anyone. The GPL is similar, but with one very important restriction: no private appropriation is allowed. Any follow-on uses must remain free for others to use (a provision that some property rights libertarians regard as “coercive”). Works in the public domain, by contrast, are vulnerable to privatization because someone need only add a smidgen of “originality” to the work and she would own a copyright in the resulting work. A GPL’d work and its derivatives stay free forever — because anyone who tries to privatize a GPL’d work is infringing on the license. + +For Stallman, the GPL became the symbol and tool for enacting his distinct political vision of “freedom.” The license rests on four kinds of freedoms for users of software (which he lists using computer protocols): +={software:user freedoms+4;Stallman, Richard:freedom, and+5} + +_1 Freedom 0: The freedom to run the program for any purpose; + +_1 Freedom 1: The freedom to study how the program works, and to adapt it to your needs. (Access to the source code is a precondition for this); +={authorship:community access} + +_1 Freedom 2: The freedom to redistribute copies so you can help your neighbor; and + +_1 Freedom 3: The freedom to improve the program, and release your improvements to the public, so that the whole community benefits. (Access to the source code is a precondition for this.) +={authorship:community access} + +Stallman has become an evangelist for the idea of freedom embodied in all the GNU programs. He refuses to use any software programs that are not “free,” and he has refused to allow his appearances to be Webcast if the software being used was not “free.” “If I am to be an honest advocate for free software,” said Stallman, “I can hardly go around giving speeches, then put pressure on people to use nonfree software. I’d be undermining my own cause. And if I don’t show that I take my principles seriously, I can’t expect anybody else to take them seriously.”~{ Stallman at MIT forum, “Copyright and Globalization in the Age of Computer Networks,” April 19, 2001, available at http://media-in-transition.mit .edu/forums/copyright/transcript.html. }~ +={Stallman, Richard:free software, and+2} + +Stallman has no problems with people making money off software. He just wants to guarantee that a person can legally use, copy, modify, and distribute the source code. There is thus an important distinction between software that is commercial (possibly free) and software that is proprietary (never free). Stallman tries to explain the distinction in a catchphrase that has become something of a mantra in free software circles: /{“free as in ‘free speech,’ not as in ‘free beer.’”}/ The point is that code must be freely accessible, not that it should be free of charge. (This is why “freeware” is not the same as free software. Freeware may be free of charge, but it does not necessarily make its source code accessible.) +={freeware vs. free software;software:proprietary|source code for} + +Eben Moglen, a professor of law at Columbia University and general counsel for the Free Software Foundation since 1994, calls the provisions of the GPL “elegant and simple. They respond to the proposition that when the marginal cost of goods is zero, any nonzero cost of barbed wire is too high. That’s a fact about the twentyfirst century, and everybody had better get used to it. Yet as you know, there are enormous cultural enterprises profoundly committed to the proposition that more and more barbed wire is necessary. And their basic strategy is to get that barbed wire paid for by the public everywhere.”~{ Eben Moglen, “Freeing the Mind: Free Software and the Death of Proprietary Culture,” June 29, 2003, available at http://emoglen.law/columbia.edu/publi cations/maine-speech.html. }~ +={Moglen, Eben;Free Software Foundation} + +The GPL truly was something new under the sun: a legally enforceable tool to vouchsafe a commons of software code. The license is based on copyright law yet it cleverly turns copyright law against itself, limiting its reach and carving out a legally protected zone to build and protect the public domain. In the larger scheme of things, the GPL was an outgrowth of the “gift economy” ethic that has governed academic life for centuries and computer science for decades. What made the GPL different from these (abridgeable) social norms was its legal enforceability. +={gift economy;Internet:gift economy of+1;General Public License (GPL):legal enforceability of} + +The GPL might well have remained an interesting but arcane curiosity of the software world but for two related developments: the rise of the Internet in the 1990s and software’s growing role as core infrastructure in modern society. As the computer and Internet revolutions have transformed countless aspects of daily life, it has become evident that software is not just another product. Its design architecture is seminally important to our civic freedoms and democratic culture. Or as Lawrence Lessig famously put it in his 1999 book /{Code}/, “code is law.” Software can affect how a business can function, how information is organized and presented, and how individuals can think, connect with one another, and collaborate. Code invisibly structures people’s relationships, and thus serves as a kind of digital constitutional order. As an economic force, software has become as critical as steel or transportation in previous eras: a building block for the basic activities of the economy, businesses, households, and personal life. +={Lessig, Lawrence:Code and Other Laws of Cyberspace;code:as law;free culture:Internet, of the+2;law:code as;Internet:rise of;software:core infrastructure, as} + +Stallman’s atavistic zeal to preserve the hacker community, embodied in the GPL, did not immediately inspire others. In fact, most of the tech world was focused on how to convert software into a marketable product. Initially, the GPL functioned like a spore lying dormant, waiting until a more hospitable climate could activate its full potential. Outside of the tech world, few people knew about the GPL, or cared.~[* The GPL is not the only software license around, of course, although it was, and remains, the most demanding in terms of protecting the commons of code. Other popular open-source licenses include the MIT, BSD, and Apache licenses, but each of these permit, but do not require, that the source code of derivative works also be freely available. The GPL, however, became the license used for Linux, a quirk of history that has had far-reaching implications.]~ And even most techies were oblivious to the political implications of free software. +={hackers:community of} + +Working under the banner of the Free Software Foundation, Stallman continued through the 1980s and 1990s to write a wide number of programs needed to build a completely free operating system. But just as Lennon’s music was better after finding McCartney, Stallman’s free software needed to find Linus Torvalds’s kernel for a Unix-like operating system. (A kernel is the core element of an operating system that controls how the various applications and utilities that comprise the system will run.) +={Free Software Foundation;Torvalds, Linus+1;Unix+1} + +In 1991, Torvalds was a twenty-one-year-old computer science student at the University of Helsinki, in Finland. Frustrated by the expense and complexity of Unix, and its inability to work on personal computers, Torvalds set out to build a Unix-like operating system on his IBM AT, which had a 33-megahertz processor and four megabytes of memory. Torvalds released a primitive version of his program to an online newsgroup and was astonished when a hundred hackers responded within a few months to offer suggestions and additions. Over the next few years, hundreds of additional programmers joined the project, which he named “Linux” by combining his first name, “Linus,” with “Unix.” The first official release of his program came in 1994.~{ One useful history of Torvalds and Linux is Glyn Moody, /{Rebel Code: Inside Linux and the Open Source Revolution}/ (Cambridge, MA: Perseus, 2001). }~ +={hackers:community of+3;Linux:development of+7} + +The Linux kernel, when combined with the GNU programs developed by Stallman and his free software colleagues, constituted a complete computer operating system — an astonishing and unexpected achievement. Even wizened computer scientists could hardly believe that something as complex as an operating system could be developed by thousands of strangers dispersed around the globe, cooperating via the Internet. Everyone assumed that a software program had to be organized by a fairly small group of leaders actively supervising the work of subordinates through a hierarchical authority system — that is, by a single corporation. Yet here was a virtual community of hackers, with no payroll or corporate structure, coming together in a loose, voluntary, quasi-egalitarian way, led by leaders who had earned the trust and respect of some highly talented programmers. + +The real innovation of Linux, writes Eric S. Raymond, a leading analyst of the technology, was “not technical, but sociological”: +={Linux:sociological effect of+1} + +_1 Linux was rather casually hacked on by huge numbers of volunteers coordinating only through the Internet. Quality was maintained not by rigid standards or autocracy but by the naively simple strategy of releasing every week and getting feedback from hundreds of users within days, creating a sort of rapid Darwinian selection on the mutations introduced by developers. To the amazement of almost everyone, this worked quite well.~{ Eric S. Raymond, “A Brief History of Hackerdom,” http://www.catb.org/ ~est/writings/cathedral-bazaar/hacker-history/ar01s06.html.}~ + +The Free Software Foundation had a nominal project to develop a kernel, but it was not progressing very quickly. The Linux kernel, while primitive, “was running and ready for experimentation,” writes Steven Weber in his book /{The Success of Open Source}/: “Its crude functionality was interesting enough to make people believe that it could, with work, evolve into something important. That promise was critical and drove the broader development process from early on.”~{ Steven Weber, /{The Success of Open Source}/ (Cambridge, MA: Harvard University Press, 2004), p. 100. }~ +={Weber, Steven:The Success of Open Source;Free Software Foundation} + +There were other powerful forces driving the development of Linux. Throughout the 1990s, Microsoft continued to leverage its monopoly grip over the operating system of personal computers, eventually attracting the attention of the U.S. Department of Justice, which filed an antitrust lawsuit against the company. Software competitors such as Hewlett-Packard, Sun Microsystems, and IBM found that rallying behind an open-source alternative — one that was legally protected against being taken private by anyone else— offered a terrific way to compete against Microsoft. +={Hewlett-Packard;IBM:open source, and;Microsoft:antitrust lawsuit against|competition against+2} + +Meanwhile, the once-free Unix software program was becoming a fragmented mess. So many different versions of Unix were being sold that users were frustrated by the proliferation of incompatible proprietary versions. In the words of a Sun Microsystems executive at the time, users were unhappy with the “duplication of effort around different implementations, leading to high prices; poor compatibility; and worst of all, slower development as each separate Unix vendor had to solve the same kinds of problems independently. Unix has become stagnant. . . .”~{ Williams, /{Free as in Freedom}/, p. 100.}~ +={Unix+1;Sun Microsystems} + +Given these problems, there was great appeal in a Unix-like operating system with freely available source code. Linux helped address the fragmentation of Unix implementations and the difficulties of competing against the Microsoft monopoly. Knowing that Linux was GPL’d, hackers, academics, and software companies could all contribute to its development without fear that someone might take it private, squander their contributions, or use it in hostile ways. A commons of software code offered a highly pragmatic solution to a market dysfunction. +={General Public License (GPL):Linux, and;GNU/Linux+5;Linux:GPL, and} + +Stallman’s GNU Project and Torvalds’s Linux software were clearly synergistic, but they represented very different styles. The GNU Project was a slower, more centrally run project compared to the “release early and often” developmental approach used by the Linux community. In addition, Stallman and Torvalds had temperamental and leadership differences. Stallman has tended to be more overbearing and directive than Torvalds, who does not bring a political analysis to the table and is said to be more tolerant of diverse talents.~{ Torvalds included a brief essay, “Linux kernel management style,” dated October 10, 2004, in the files of the Linux source code, with the annotation, “Wisdom passed down the ages on clay tablets.” It was included as an epilogue in the book /{Open Life: The Philosophy of Open Source}/, by Henrik Ingo, and is available at http://www.openlife.cc/node/43. }~ +={Torvalds, Linus;GNU Project;Stallman, Richard:GNU Project, and} + +So despite their natural affinities, the Free Software Community and the Linux community never found their way to a grand merger. Stallman has applauded Linux’s success, but he has also resented the eclipse of GNU programs used in the operating system by the Linux name. This prompted Stallman to rechristen the program “GNU/Linux,” a formulation that many people now choose to honor. + +Yet many hackers, annoyed at Stallman’s political crusades and crusty personal style, committed their own linguistic raid by renaming “free software” as “open source software,” with a twist. As GNU/Linux became more widely used in the 1990s, and more corporations began to seriously consider using it, the word /{free}/ in “free software” was increasingly seen as a problem. The “free as in free speech, not as in free beer” slogan never quite dispelled popular misconceptions about the intended sense of the word /{free}/. Corporate information technology (IT) managers were highly wary about putting mission-critical corporate systems in the hands of software that could be had for /{free}/. Imagine telling the boss that you put the company’s fate in the hands of a program you downloaded from the Internet for free! +={free software:open source software, as+6;software:open source+6;free software:uses of term+6} + +Many corporate executives clearly recognized the practical value of free software; they just had no interest in joining Stallman’s ideological crusade or being publicly associated with him. They did not necessarily want to become champions of the “four freedoms” or the political vision implicit in free software. They simply wanted code that works well. As Eric Raymond wrote: “It seemed clear to us in retrospect that the term ‘free software’ had done our movement tremendous damage over the years. Part of this stemmed from the well-known ‘free speech/free beer’ ambiguity. Most of it came from something worse — the strong association of the term ‘free software’ with hostility to intellectual property rights, communism, and other ideas hardly likely to endear themselves to an MIS [management information systems] manager.”~{ Eric S. Raymond, “The Revenge of the Hackers,” in Chris DiBona, Sam Ockman, and Mark Stone, eds., /{Open Sources: Voices from the Open Source Revolution}/ (Sebastopol, CA: O’Reilly & Associates, 1999), p. 212. }~ +={Stallman, Richard:free software, and+5;Raymond, Eric S.} + +One response to this issue was the rebranding of free software as “open-source” software. A number of leading free software programmers, most notably Bruce Perens, launched an initiative to set forth a consensus definition of software that would be called “opensource.” At the time, Perens was deeply involved with a community of hackers in developing a version of Linux known as the Debian GNU/Linux distribution. Perens and other leading hackers not only wanted to shed the off-putting political dimensions of “free software,” they wanted to help people deal with the confusing proliferation of licenses. A lot of software claimed to be free, but who could really tell what that meant when the terms were so complicated and legalistic? +={Debian GNU/Linux distribution;Perens, Bruce} + +The Open Source Initiative, begun in 1998, helped solve this problem by enumerating criteria that it considered significant in judging a program to be “open.”~{ http://www.opensource.org. }~ Its criteria, drawn from the Debian community, helped standardize and stabilize the definition of open-source software. Unlike the GPL, permissive software licenses such as BSD and MIT /{allow}/ a program to be freely copied, modified, and distributed but don’t /{require}/ it. A programmer can choose to make a proprietary derivative without violating the license. +={Open Source Initiative+2} + +The Open Source Initiative has focused more on the practical, technical merits of software than on the moral or political concerns that have consumed Stallman. Free software, as Stallman conceived it, is about building a cohesive moral community of programmers dedicated to “freedom.” The backers of open-source software are not necessarily hostile to those ideals but are more interested in building reliable, marketable software and improving business performance. As Elliot Maxwell described the free software/open source schism: +={Maxwell, Elliot+1} + +_1 [S]upporters of the Open Source Initiative were willing to acknowledge a role for proprietary software and unwilling to ban any link between open-source software and proprietary software. Richard Stallman aptly characterized the differences: “We disagree on the basic principles but agree more or less on the practical recommendations. So we can and do work together on many specific projects.”~{ Elliot Maxwell, citing Wikipedia entry on “Open Source Movement,” in “Open Standards Open Source and Open Innovation,” in /{Innovations: Technology, Governance, Globalization}/ 1, no. 3 (Summer 2006), p. 134, note 56. }~ + +The philosophical rift between free software and open-source software amounts to a “friendly schism,” a set of divergent approaches that has been bridged in some respects by language.~{ Richard Stallman has outlined his problems with the “open source” definition of software development in an essay, “Why ‘Open Source’ Misses the Point of Free Software,” http://www.gnu.org/philosophy/open-source-misses-thepoint.html. }~ Observers often use the acronym FOSS to refer to both free software and open-source software, or sometimes FLOSS — the L stands for the French word /{libre}/, which avoids the double meaning of the English word /{free}/. Whatever term is used, free and open-source software has become a critical tool for making online marketplaces more competitive, and for creating open, accessible spaces for experimentation. In his classic essay, “The Cathedral and the Bazaar,” Eric Raymond explains how the licenses help elicit important noneconomic, personal energies: +={FOSS/FLOSS+3;free software:FOSS/FLOSS+3;Raymond, Eric S.+1;Linux:sociological effect of+1} + + +_1 The Linux world behaves in many respects like a free market or an ecology, a collection of selfish agents attempting to maximize utility which in the process produces a selfcorrecting spontaneous order more elaborate and efficient than any amount of central planning could have achieved. . . . The utility function Linux hackers are maximizing is not classically economic, but is the intangible of their own ego satisfaction and reputation among other hackers.~{ Eric Raymond, “The Cathedral and the Bazaar,” available at http://www .catb.org/~esr/writings/cathedral-bazaar/cathedral-bazaar/ar01s11.html.}~ + +It turns out that an accessible collaborative process, FOSS, can elicit passions and creativity that entrenched markets often cannot. In this respect, FOSS is more than a type of freely usable software; it reunites two vectors of human behavior that economists have long considered separate, and points to the need for new, more integrated theories of economic and social behavior. +={free software:economic effects of+1} + +FOSS represents a new breed of “social production,” one that draws upon social energies that neoclassical economists have long discounted or ignored. It mobilizes the personal passions and moral idealism of individuals, going beyond the overt economic incentives that economists consider indispensable to wealth creation. The eighteenth-century economist Adam Smith would be pleased. He realized, in his 1776 book /{The Wealth of Nations}/, that people are naturally given to “truck, barter and exchange” — but he also recognized, in his earlier /{The Theory of Moral Sentiments}/, written in 1759, that people are motivated by deep impulses of human sympathy and morality. Neoclassical economists have long segregated these as two divergent classes of human behavior, regarding altruism and social sympathies as subordinate to the rational, utility-maximizing, selfserving behavior. FOSS embodies a new synthesis — and a challenge to economists to rethink their crude model of human behavior, /{Homo economicus}/. Free software may have started as mere software, but it has become an existence proof that individual and collective goals, and the marketplace and the commons, are not such distinct arenas.~{ I am grateful to Nicholas Gruen for this insight, taken from his essay “Geeks Bearing Gifts: Open Source Software and Its Enemies,” in /{Policy}/ 21, no. 2 (Winter 2005), pp. 39–48. }~ They are tightly intertwined, but in ways we do not fully understand. This is a golden thread that will reappear in later chapters. +={Smith, Adam:The Theory of Moral Sentiments;social production+6} + +Red Hat, a company founded in 1993 by Robert Young, was the first to recognize the potential of selling a custom version (or “distribution”) of GNU/Linux as a branded product, along with technical support. A few years later, IBM became one of the first large corporations to recognize the social realities of GNU/Linux and its larger strategic and competitive implications in the networked environment. In 1998 IBM presciently saw that the new software development ecosystem was becoming far too variegated and robust for any single company to dominate. It understood that its proprietary mainframe software could not dominate the burgeoning, diversified Internet-driven marketplace, and so the company adopted the open-source Apache Web server program in its new line of WebSphere business software. +={Red Hat;Young, Robert;GNU/Linux:IBM, and+1|Red Hat, and;IBM:GNU/Linux, and+1;Apache Web server;open source software:functions of+2} + +It was a daring move that began to bring the corporate and open-source worlds closer together. Two years later, in 2000, IBM announced that it would spend $1 billion to help develop GNU/Linux for its customer base. IBM shrewdly realized that its customers wanted to slash costs, overcome system incompatibilities, and avoid expensive technology “lock-ins” to single vendors. GNU/Linux filled this need well. IBM also realized that GNU/Linux could help it compete against Microsoft. By assigning its property rights to the commons, IBM could eliminate expensive property rights litigation, entice other companies to help it improve the code (they could be confident that IBM could not take the code private), and unleash a worldwide torrent of creative energy focused on GNU/Linux. Way ahead of the curve, IBM decided to reposition itself for the emerging networked marketplace by making money through tech service and support, rather than through proprietary software alone.~{ Andrew Leonard, “How Big Blue Fell for Linux,” Salon.com, September 12, 2000, available at http://www.salon.com/tech/fsp/2000/09/12/chapter_7_ part_one.print.html. The competitive logic behind IBM’s moves are explored in Pamela Samuelson, “IBM’s Pragmatic Embrace of Open Source,” /{Communications of the ACM}/ 49, no. 21 (October 2006), and Robert P. Merges, “A New Dynamism in the Public Domain,” /{University of Chicago Law Review}/ 71, no. 183 (Winter 2004). }~ +={Microsoft:competition against} + +It was not long before other large tech companies realized the benefits of going open source. Amazon and eBay both saw that they could not affordably expand their large computer infrastructures without converting to GNU/Linux. GNU/Linux is now used in everything from Motorola cell phones to NASA supercomputers to laptop computers. In 2005, /{BusinessWeek}/ magazine wrote, “Linux may bring about the greatest power shift in the computer industry since the birth of the PC, because it lets companies replace expensive proprietary systems with cheap commodity servers.”~{ Steve Hamm, “Linux Inc.,” /{BusinessWeek}/, January 31, 2005. }~ As many as one-third of the programmers working on open-source projects are corporate employees, according to a 2002 survey.~{ Cited by Elliot Maxwell in “Open Standards Open Source and Open Innovation,” note 80, Berlecon Research, /{Free/Libre Open Source Software: Survey and Study — Firms’ Open Source Activities: Motivations and Policy Implications}/, FLOSS Final Report, Part 2, at www.berlecon.de/studien/downloads/200207FLOSS _Activities.pdf. }~ +={Amazon;eBay} + +With faster computing speeds and cost savings of 50 percent or more on hardware and 20 percent on software, GNU/Linux has demonstrated the value proposition of the commons. Open source demonstrated that it can be cheaper and more efficacious to collaborate in the production of a shared resource based on common standards than to strictly buy and own it as private property. +={commons:value proposition of;value:creation of+1:see also Great Value Shift} + +But how does open source work without a conventional market apparatus? The past few years have seen a proliferation of sociological and economic theories about how open-source communities create value. One formulation, by Rishab Ghosh, compares free software development to a “cooking pot,” in which you can give a little to the pot yet take a lot — with no one else being the poorer. “Value” is not measured economically at the point of transaction, as in a market, but in the nonmonetary /{flow}/ of value that a project elicits (via volunteers) and generates (through shared software).~{ Rishab Aiyer Ghosh, “Cooking Pot Markets and Balanced Value Flows,” in Rishab Aiyer Ghosh, ed., /{CODE: Collaborative Ownership and the Digital Economy}/ (Cambridge, MA: MIT Press, 2005), pp. 153–68. }~ Another important formulation, which we will revisit later, comes from Harvard law professor Yochai Benkler, who has written that the Internet makes it cheap and easy to access expertise anywhere on the network, rendering conventional forms of corporate organization costly and cumbersome for many functions. Communities based on social trust and reciprocity are capable of mobilizing creativity and commitment in ways that market incentives often cannot — and this can have profound economic implications.~{ See, e.g., Benkler, “Coase’s Penguin, or Linux and the Nature of the Firm,” /{Yale Law Journal}/ 112, no. 369 (2002); Benkler, “ ‘Sharing Nicely’: On Shareable Goods and the Emergence of Sharing as a Modality of Economic Production,” Yale Law Journal 114, no. 273 (2004).}~ Benkler’s analysis helps explain how a global corps of volunteers could create an operating system that, in many respects, outperforms software created by a well-paid army of Microsoft employees. +={Benkler, Yochai:open networks, on;FOSS/FLOSS;free software:FOSS/FLOSS;Ghosh, Rishab;open source software:economic implications of|uses of term+4} + +A funny thing happened to free and open-source software as it matured. It became hip. It acquired a cultural cachet that extends well beyond the cloistered precincts of computing. “Open source” has become a universal signifier for any activity that is participatory, collaborative, democratic, and accountable. Innovators within filmmaking, politics, education, biological research, and drug development, among other fields, have embraced the term to describe their own attempts to transform hidebound, hierarchical systems into open, accessible, and distributed meritocracies. Open source has become so much of a cultural meme — a self-replicating symbol and idea — that when the Bikram yoga franchise sought to shut down unlicensed uses of its yoga techniques, dissident yoga teachers organized themselves into a nonprofit that they called Open Source Yoga Unity. To tweak the supremacy of Coca-Cola and Pepsi, culture jammers even developed nonproprietary recipes for a cola drink and beer called “open source cola” and “open source beer.”~{ Open Source Yoga Unity, http://www.yogaunity.org; open-source cola, http://alfredo.octavio.net/soft_drink_formula.pdf; open-source beer, Vores OI (Danish for “Our Beer”), http://en.wikipedia.org/wiki/Vores_%C3%981 . See also http://freebeer.org/blog and http://www.project21.ch/freebeer. }~ +={free software:uses of term+5} + +Stallman’s radical acts of dissent in the 1980s, regarded with bemusement and incredulity at the time, have become, twenty-five years later, a widely embraced ideal. Small-/{d}/ democrats everywhere invoke open source to lambaste closed and corrupt political systems and to express their aspirations for political transcendence. People invoke open source to express a vision of life free from overcommercialization and corporate manipulation. The term enables one to champion bracing democratic ideals without seeming naïve or flaky because, after all, free software is solid stuff. Moreover, despite its image as the software of choice for granola-loving hippies, free and open-source software is entirely compatible with the commercial marketplace. How suspect can open source be when it has been embraced by the likes of IBM, Hewlett-Packard, and Sun Microsystems? +={Stallman, Richard:influence of|free software, and+4;Hewlett-Packard;IBM:open source, and;Sun Microsystems} + +The appeal of “openness” has become so great that it is sometimes difficult to recognize that /{limits}/ on openness are not only necessary but desirable. The dark side of openness is the spam that clogs the Internet, the ability to commit fraud and identity theft, and the opportunities for disturbed adults to prey sexually upon children. Still, the virtues of an open environment are undeniable; what is more difficult is negotiating the proper levels of openness for a given realm of online life. +={openness:limitations of} + +Nearly twenty years after the introduction of the GPL, free software has expanded phenomenally. It has given rise to countless FOSS software applications, many of which are major viral hits such as Thunderbird (e-mail), Firefox (Web browser), Ubuntu (desktop GNU/Linux), and Asterisk (Internet telephony). FOSS has set in motion, directly or indirectly, some powerful viral spirals such as the Creative Commons licenses, the iCommons/free culture movement, the Science Commons project, the open educational resource movement, and a new breed of open-business ventures, Yet Richard Stallman sees little connection between these various “open” movements and free software; he regards “open” projects as too vaguely defined to guarantee that their work is truly “free” in the free software sense of the term. “Openness and freedom are not the same thing,” said Stallman, who takes pains to differentiate free software from open-source software, emphasizing the political freedoms that lie at the heart of the former.~{ Interview with Richard Stallman, January 21, 2008. }~ +={FOSS/FLOSS;free software:FOSS/FLOSS|expansion of;Stallman, Richard:influence of|freedom, and} + +Any revolution is not just about new tools and social practices, however. It is also about developing new ways of understanding the world. People must begin to /{see}/ things in a new perspective and /{talk}/ with a new vocabulary. In the 1990s, as Disney, Time Warner, Viacom, and other media giants realized how disruptive the Internet might be, the public was generally oblivious that it might have a direct stake in the outcome of Internet and copyright policy battles. Big Media was flexing its muscles to institute all sorts of self-serving, protectionist fixes — copy-protection technologies, broader copyright privileges, one-sided software and Web licenses, and much more — and most public-interest groups and civic organizations were nowhere to be seen. + +Fortunately, a small but fierce and keenly intelligent corps of progressive copyright scholars were beginning to discover one another in the 1990s. Just as the hacker community had had to recognize the enclosure of its commons of software code, and embrace the GPL and other licenses as defensive remedies, so progressive copyright scholars and tech activists were grappling with how to defend against a related set of enclosures, The relentless expansion of copyright law was eroding huge swaths of the public domain and fair use doctrine. Tackling this problem required asking a question that few in the legal or political establishments considered worth anyone’s time — namely, What’s so valuable about the public domain, anyway? + +1~ 2 THE DISCOVERY OF THE PUBLIC DOMAIN +={public domain+93} + +/{How a band of irregulars demonstrated that the public domain is enormously valuable after all.}/ + +For decades, the public domain was regarded as something of a wasteland, a place where old books, faded posters, loopy music from the early twentieth century, and boring government reports go to die. It was a dump on the outskirts of respectable culture. If anything in the public domain had any value, someone would sell it for money. Or so goes the customary conception of the public domain. + +Jack Valenti, the longtime head of the Motion Picture Association of America, once put it this way: “A public domain work is an orphan. No one is responsible for its life. But everyone exploits its use, until that time certain when it becomes soiled and haggard, barren of its previous virtues. Who, then, will invest the funds to renovate and nourish its future life when no one owns it?”~{ Jack Valenti, “A Plea for Keeping Alive the U.S. Film Industry’s Competitive Energy, ” testimony on behalf of the Motion Picture Association of America to extend the term of copyright protection, Senate Judiciary Committee, September 20, 1995, at http://instructors.cwrl.utexas.edu/~martin/Valenti .pdf. }~ (Valenti was arguing that longer copyright terms would give film studios the incentive to digitize old celluloid films that would otherwise enter the public domain and physically disintegrate.) +={Valenti, Jack} + +One of the great, unexplained mysteries of copyright law is how a raffish beggar grew up to be King Midas. How did a virtually ignored realm of culture — little studied and undertheorized— become a subject of intense scholarly interest and great practical importance to commoners and businesses alike? How did the actual value of the public domain become known? The idea that the public domain might be valuable in its own right — and therefore be worth protecting — was a fringe idea in the 1990s and before. So how did a transformation of legal and cultural meaning occur? +={public domain:emergence of+4|copyright law, and+3} + +Unlike Richard Stallman’s crusade to create a sustainable public domain of code,~[* Free software constitutes a “sustainable public domain” because the General Public License protects the code and its derivatives from private appropriation yet otherwise makes the code free for anyone to use. The public domain, by contrast, is vulnerable to private appropriation in practice if a company has sufficient market power (e.g., Disney’s appropriation of fairy tales) or if it uses the public domain to make derivative works and then copyrights them (e.g., vendors who mix government data with proprietary enhancements).]~ the discovery of the public domain for cultural works was not led by a single protagonist or group. It emerged over time through a loose network of legal scholars, techies, activists, and some businesses, who were increasingly concerned about worrisome expansions of copyright and patent law. Slowly, a conversation that was occurring in a variety of academic and tech communities began to intensify, and then coalesce into a more coherent story. +={copyright law: expansion of} + +Scholarship about copyright law is not exactly gripping stuff. But it has played an important role in the viral spiral. Before anyone could begin to imagine how an online commons could be structured and protected, someone needed to explain how intellectual property law had become “uncontrolled to the point of recklessness” — as law professor David Lange put it in 1981, well before the proprietarian explosion of the late 1980s and 1990s. +={Lange, David;copyright law:scholarship about} + +Fortunately, a new breed of public-spirited professors was reaching a critical mass just as the Internet was becoming culturally important. These professors, collaborating with programmers and activists, were among the first to understand the ways in which copyright law, historically an arcane backwater of law, was starting to pose serious threats to democracy-loving citizens and Internet users. The full complexity of this legal literature over the past generation cannot be unpacked here, but it is important to understand how progressive copyright scholarship played a critical role in identifying dangerous trends in law and technology — and in constructing a new narrative for what copyright law should be. + +This legal scholarship reconceptualized the public domain— then a vague notion of nonproperty — and developed it into an affirmative theory. It gave the public domain sharper definition and empirical grounding. Thinkers like Yochai Benkler (Harvard Law School), Lawrence Lessig (Stanford Law), and James Boyle (Duke Law) developed bracing new theories that recognize the power of social communities, and not just the individual, in the creative process. Others, such as Julie Cohen (Georgetown Law Center) and Pamela Samuelson (Boalt Hall), have respectively explored the need to develop a new social theory of creative practice~{ Julie E. Cohen, “Copyright, Commodification and Culture: Locating the Public Domain,” in Lucie Guibaut and P. Bernt Hugenholtz eds. /{The Future of the Public Domain: Identifying the Commons in Information Law}/ (The Netherlands: Kluwer Law International, 2006), pp. 121–66. }~ and the theoretical challenges of “mapping” the public domain.~{ Pamela Samuelson, “Challenges in Mapping the Public Domain,” in Guibault and Hugenholtz, eds. /{The Future of the Public Domain}/, pp. 7–26. }~ All of this thinking, mostly confined to scholarly workshops, law reviews, and tech journals, served as a vital platform for imagining the commons in general and the Creative Commons in particular. +={Boyle, James;Cohen, Julie;Lessig, Lawrence;Samuelson, Pamela} + +2~ The Elusive Quest for “Balance” +={copyright law:balance of public and private rights+19|public domain vs.+19;public domain:copyright law, and+19} + +Historically, copyright has been regarded as a “bargain” between the public and authors. The public gives authors a set of monopoly rights to help them sell their works and earn rewards for their hard work. In return, the public gets the marketable output of creators— books, films, music — and certain rights of free access and use. The primary justification of copyright law is not to protect the fortunes of authors; it is to promote new creative works and innovation. By giving authors a property right in their works — and so helping them to sell those works in the marketplace — copyright law aims to promote the “progress of human knowledge.” +={property rights:copyright law, and;copyright law:property rights, and} + +That’s the author’s side of the bargain. The public’s stake is to have certain limited rights to use copyrighted works. Under the “fair use” doctrine (or “fair dealing” in some countries), people are entitled to excerpt copyrighted works for noncommercial purposes such as journalism, scholarship, reviews, and personal use. People are also entitled to resell the physical copies of copyrighted works such as books and videos. (This right is granted under the “first sale doctrine,” which enables libraries and DVD rental stores to exist.) The public also has the right to use copyrighted works for free after the term of a copyright has expired — that is, after a work has “entered the public domain.” This general scheme is said to establish a balance in copyright law between the private rights of authors and the needs of the public and future authors. +={copyright law:fair use doctrine, and|first sale doctrine, and;fair use doctrine:copyright law, and;first sale doctrine} + +This “balance” has been more rhetorical than real, however. For decades, critics have complained that the public’s side of the copyright bargain is being abridged. Content industries have steadily expanded their rights under copyright law at the expense of the public’s modest access rights. + +What is notable about the long history of seeking “balance” in copyright law is the singular failure of critics to make much headway (until recently) in redressing the problem. The public’s interests in copyright law — and those of authors’ — have never been given that much attention or respect. From the authors of eighteenth-century England, whose formal rights were in practice controlled by booksellers, to the rhythm-and-blues singers of the 1940s whose music was exploited for a pittance by record labels, to academics whose copyrights must often be ceded to commercial journals, authors have generally gotten the short end of the stick. No surprise here. Business practices and copyright policy have usually been crafted by the wealthiest, most politically connected players: book publishers, film studios, record labels, broadcasters, cable operators, news organizations. The public’s lack of organized political power was reflected in its lack of a coherent language for even describing its own interests in copyright law. + +For most of the twentieth century, the forging of copyright law was essentially an insider contest among various copyright-dependent industries for market advantage. Congress hosted a process to oversee the squabbling and negotiation, and nudged the players now and again. This is what happened in the fifteen-year run-up to congressional enactment of the Copyright Act of 1976, for example. For the most part, Congress has preferred to ratify the compromises that industry players hammer out among themselves. The unorganized public has been treated as an ignorant bystander. +={Copyright Act (1976)+3;copyright law:revision of (1976)+3} + +Naturally, this has strengthened the hand of commercial interests. Copyright disputes could be argued within a congenial intellectual framework and closely managed by a priesthood of lawyer-experts, industry lobbyists, and friendly politicians. The interests of citizens and consumers, blessedly absent from most debates, could be safely bracketed as marginal. + +But letting industries negotiate their own solutions has its own problems, as Professor Jessica Litman has pointed out: “Each time we rely on current stakeholders to agree on a statutory scheme, they produce a scheme designed to protect themselves against the rest of us. Its rigidity leads to its breakdown; the statute’s drafters have incorporated too few general principles to guide courts in effecting repairs.”~{ Jessica Litman, /{Digital Copyright}/ (Amherst, NY: Prometheus, 2000), p. 62. }~ By letting the affected industries negotiate a series of fact specific solutions, each reflecting that moment in history, Congress has in effect let copyright law become an agglomeration of complex and irregular political compromises — or, as some might say, a philosophically incoherent mess. +={copyright law:expansion of+8;Litman, Jessica} + +Perhaps because it is so attentive to its industry benefactors, Congress has generally regarded the fair use doctrine and the public domain as a sideshow. Under the Copyright Act of 1976, for example, fair use is set forth only as an affirmative defense to accusations of copyright infringement, not as an affirmative right. Moreover, fair use is defined by four general statutory guidelines, which courts have proceeded to interpret in wildly inconsistent ways. In real life, Lawrence Lessig has quipped, fair use amounts to “the right to hire a lawyer.” +={Lessig, Lawrence:fair use, on;copyright use:fair use doctrine, and;fair use doctrine:copyright law, and} + +Congress has shown a similarly low regard for the public domain. After extending the term of copyright law eleven times since 1961, the copyright monopoly now lasts for an author’s lifetime plus seventy years (ninety-five years for corporations). For Congress, writes Professor Tyler Ochoa, “allowing works to enter the public domain was something to be condemned, or at least only grudgingly tolerated, rather than something to be celebrated.”~{ Tyler Ochoa, “Origins and Meanings of the Public Domain,” /{Dayton Law Review}/ 28, no. 215 (2002). }~ Congress’s most hostile act toward the public domain — and to the public’s rights of access — was the elimination of the registration requirement for copyright protection.~{ Lawrence Lessig explains the impact of eliminating the copyright registration requirement in Lessig, /{Free Culture}/ (New York: Penguin, 2004), pp. 222–23. }~ Since 1978, copyright holders have not had to formally register their works in order to receive protection. Doodle on a scratch pad, record your guitar strumming, and it’s automatically copyrighted. +={copyright law:automatic+1;Ochoa, Tyler} + +Sounds great . . . but this provision had especially nasty consequences once the digital revolution kicked into high gear in the 1990s, because every digital byte was born, by default, as a form of property. Automatic copyright protection dramatically reversed the previous default, where most everything was born in the public domain and was free to use unless registered. Today, anyone wishing to reuse a work legally has to get permission and possibly pay a fee. To make matters worse, since there is no longer a central registry of who owns what copyrighted works, it is often impossible to locate the copyright holder. Such books, films, and images are known as “orphan works.” +={copyright law:"orphan works"} + +Thirty years ago, the idea of throwing a net of copyright over all information and culture was not alarming in the least. As Jessica Litman recalled, “When I started teaching in 1984, we were at what was about to be the crest of a high-protectionist wave. That is, if you looked at the scholarship being written then, people were writing about how we should expand copyright protection, not only to cover useful articles and fashions and semiconductor chips and computer programs, but also recombinant DNA. The Chicago School of scholarship was beginning to be quite influential. People were reconceiving copyright in Chicago Law and Economics terms, and things like fair use were seen to be ‘free riding.’ ”~{ Interview with Jessica Litman, November 16, 2006. }~ +={Litman, Jessica+1} + +Yet the effects of this protectionist surge, at least for the short term, were muted for a number of reasons. First, corporate lobbying on copyright issues was extremely low-key. “I started going to congressional hearings in 1986,” said Litman, “and no one was there. There were no members of Congress; there was no press. The witnesses would come and they’d talk, and staffers would take notes. And that would be it.”~{ Ibid. }~ The big-ticket lobbying — receptions, slick reports, legislative junkets, private movie screenings with Jack Valenti — did not really begin to kick in until the late 1980s and early 1990s, when trade associations for every conceivable faction stepped up their Washington advocacy. When the Internet’s commercial implications became clear in the mid-1990s, copyright-dependent industries ratcheted up their campaign contributions and lobbying to another level entirely. +={Valenti, Jack} + +The protectionist surge in copyright law in the 1980s was mitigated by two stalwart public servants: Representative Robert Kastenmeier of Wisconsin, the chair of the House judiciary subcommittee that oversaw copyright legislation, and Dorothy Schrader, the longtime general counsel of the U.S. Copyright Office. Both considered it their job to protect the public from grasping copyright industries. When Kastenmeier lost his reelection bid in 1990 and Schrader retired in 1994, the film, music, broadcast, cable, and publishing industries would henceforth have staunch allies— sometimes their former lawyer-lobbyists — in key congressional staff positions and copyright policy jobs. Government officials no longer saw their jobs as protecting consumers from overbearing, revenuehungry media industries, but as helping copyright owners chase down and prosecute “pirates.” Copyright law was recast as a form of industrial policy — a way to retain American jobs and improve the U.S. balance of trade — not as an instrument that affects social equity, consumer rights, and democratic values. +={Kastenmeier, Robert;Schrader, Dorothy;piracy} + +Ironically, the mercantilist view of copyright was gaining ground at precisely the time when the public’s stake in copyright law was growing. An explosion of consumer electronics in the 1980s was giving the public new reasons to care about their fair use rights and the public domain. The introduction of the videocassette recorder, the proliferation of cable television, personal computers, software and electronics devices, and then the introduction of the Web in 1993 all invited people to control their own creative and cultural lives. The new media meant that the baroque encrustations of copyright law that had accumulated over decades were now starting to interfere with people’s daily activities. +={fair use doctrine:copyright law, and+1;copyright law:fair use doctrine, and+1|mercantilist view of+2} + +Yet rather than negotiate a new copyright bargain to take account of the public’s needs and interests, copyright industries stepped up their demands on Congress to ram through even stronger copyright, trademark, and patent privileges for themselves. Their basic goal was, and generally remains, a more perfect control over all downstream uses of works. Content industries generally do not concede that there is any presumptive “free use zone” of culture, notwithstanding the existence of the fair use doctrine. Works that citizens may regard as fair-use entitlements industry often regards as chunks of information that no one has yet figured out how to turn into marketable property. + +Most content industries, then and now, do not see any “imbalance” in copyright law; they prefer to talk in different terms entirely. They liken copyrighted works to personal property or real estate, as in “and you wouldn’t steal a CD or use my house without permission, would you?” A copyrighted work is analogized to a finite physical object, But the essential point about works in the digital age is that they can’t be “used up” in the same way that physical objects can. They are “nondepletable” and “nonrival,” as economists put it. A digital work can be reproduced and shared for virtually nothing, without depriving another person of it. +={property rights:copyright law, and+1;copyright law:property rights, and} + +Nonetheless, a new narrative was being launched — copyrighted works as property. The idea of copyright law reflecting a policy bargain between the public and authors/corporations was being supplanted by a new story that casts copyright as property that is nearly absolute in scope and virtually perpetual in term. In hindsight, for those scholars who cared enough to see, a disquieting number of federal court cases were strengthening the hand of copyright holders at the expense of the public. James Boyle, in a much-cited essay, called this the “second enclosure movement” — the first one, of course, being the English enclosure movement of common lands in medieval times and into the nineteenth century.~{ James Boyle, “The Second Enclosure Movement and the Construction of the Public Domain,” /{Law and Contemporary Problems}/ 66 (Winter–Spring 2003), pp. 33–74, at http://www.law.duke.edu/shell/cite.pl?66+Law+&+Contemp.+ Probs.+ 33+ (WinterSpring+2003). }~ +={Boyle, James:enclosure movement, on;commons:enclosure of+1;enclosure movement+1;copyright law:enclosure movement, and+1} + +Enclosure took many forms. Copyright scholar Peter Jaszi recalls, “Sometime in the mid-1980s, the professoriate started getting worried about software copyright.”~{ Interview with Peter Jaszi, October 17, 2007. }~ It feared that copyrights for software would squelch competition and prevent others from using existing code to innovate. This battle was lost, however. Several years later, the battle entered round two as copyright scholars and programmers sought to protect reverse-engineering as fair use. This time, they won.~{ /{Sega Enterprises v. Accolade}/, 977 F.2d 1510 (9th Cir. 1993). }~ +={Jaszi, Peter;fair use doctrine:reverse engineering, and;software:copyright of;copyright law:digital age, in} + +Then, in 1985, the U.S. Supreme Court ruled that it was not fair use for the /{Nation}/ magazine to excerpt three hundred words from President Ford’s 200,000-word memoir. The /{Nation}/ had acquired a copy of Ford’s book before its publication and published an article of highlights, including a handful of quotations. The material, derived from Ford’s official duties as president, was of obvious value to the democratic process. But by a 6-3 margin the Court held that the /{Nation}/ had violated Ford’s copyright.~{ /{Harper & Row v. Nation Enterprises}/, 471 U.S. 539 (1985). }~ The proprietary tilt of copyright law only intensified in the following years. Companies claimed copyrights for all sorts of dubious forms of “originality” — the page numbers of federal court decisions, the names and numbers in telephone directories, and facts compiled in databases. +={Ford, Gerald R.;Nation (magazine)} + +2~ The Great Expansion of Intellectual Property +={intellectual property:protection of+14;copyright law:expansion of+14} + +These expansions of proprietary control in the 1980s proved to be a prelude to much more aggressive expansions of copyright, patent, and trademark law in the 1990s. Congress and the courts were granting property rights to all sorts of things that had previously been considered unowned or unownable. The Supreme Court had opened this door in 1980 when it recognized the patentability of a genetically modified bacterium. This led to ethically and economically dubious patents for genes and life-forms. Then businesses began to win patents for “business methods” — ideas and theoretical systems — that would otherwise be in the public domain. Mathematical algorithms, if embedded in software, could now be owned. Amazon.com’s patent on “one-click shopping” on its Web site became the symbol of this trend. Boat manufacturers won a special /{sui generis}/ (“in a class by itself ”) form of protection for the design of boat hulls in 1998. Celebrities and talent agencies prevailed upon state legislatures to extend the scope of ownership of celebrity names and likenesses, which had long been considered in the public domain. +={Amazon;copyright law:proprietary rights;software:copyright of+1;software:copyright of+1;property rights:copyright law, and} + +Companies developed still other strategies to assert greater proprietary control over works. Software companies began to rely upon mass-market licenses — often referred to as “shrink wrap” contracts and “click-through” Web agreements — to expand their rights at the expense of consumers and the public domain. Various computer companies sought to enact a model state law that, in Samuelson’s words, would “give themselves more rights than intellectual property law would do and avoid the burdens of public interest limitations.”~{ Samuelson, “Digital Information, Digital Networks, and the Public Domain,” p. 92. }~ Consumers could in effect be forced to surrender their fair use rights, the right to criticize the product or their right to sue, because of a “contract” they ostensibly agreed to. +={Samuelson, Pamela;copyright law:fair use doctrine, and;fair use doctrine:copyright law, and} + +Trademarks, originally designed to help people identify brands and prevent fraud in the marketplace, acquired a new power in 1995 — the ability to control public meanings. For years, large corporations had wanted to extend the scope of their trademark protection to include “dilution” — a fuzzy concept that would prohibit the use of a trademark without permission, even for legitimate public commentary or parody, if it “dilutes” the recognized public associations and meanings of a trademark. For a decade or more, Kastenmeier had prevented antidilution legislation from moving forward. After Kastenmeier left Congress, the trademark lobby succeeded in getting Congress to enact the legislation. This made it much easier for Mattel to threaten people who did parodies of Barbie dolls. The /{Village Voice}/ could more credibly threaten the /{Cape Cod Voice}/ for trademark infringement. Wal-Mart could prevent others from using “its” smiley-face logo (itself taken from the cultural commons).~{ See, e.g., David Bollier, /{Brand Name Bullies: The Quest to Own and Control Culture}/ (New York: Wiley, 2005).}~ +={Kastenmeier, Robert;trademarks:dilution of} + +The election of Bill Clinton as president in 1992 gave content industries new opportunities to expand their copyright privileges. The Clinton administration launched a major policy effort to build what it called the National Information Infrastructure (NII), more commonly known as the Information Superhighway. Today, of course, we call it the Internet. A task force of industry heavyweights was convened to determine what policies should be adopted to help build the NII.~{ Jessica Litman has an excellent historical account of the NII campaign in her book /{Digital Copyright}/ (Amherst, NY: Prometheus, 2000). }~ Vice President Al Gore cast himself as a visionary futurist and laid out astonishing scenarios for what the NII could deliver: access to every book in the Library of Congress, the ability of doctors to share medical information online, new strides against inequality as everyone goes online. +={Clinton, Bill;Gore, Al;Information Superhighway+6;Internet:rise of+1;National Information Infrastructure (NII)+6} + +The NII project was a classic case of incumbent industries trying to protect their profit centers. Executives and lobbyists associated with broadcasting, film, and music were being asked how to structure the Information Superhighway. Predictably, they came up with fantasies of digital television with five hundred channels, programs to sell products, and self-serving scenarios of even stronger copyright protection and penalties. Few had any inkling of the transformative power of open networks or the power of the sharing economy — and if they did, the possibilities certainly were not appealing to them. + +One part of the NII campaign was a working group on intellectual property headed by Bruce Lehman, a former congressional staffer, lobbyist for the software industry, and commissioner of patents and trademarks. The Lehman panel spent two years developing a sweeping set of copyright policies for the Information Superhighway. When the panel’s report was released in September 1995, anyone who cared about open culture and democracy was livid. The White Paper, as it was called, recommended a virtual elimination of fair use rights in digital content and broader rights over any copyrighted transmissions. It called for the elimination of first-sale rights for digitally transmitted documents (which would prevent the sharing of digital files) and endorsed digital rights management systems for digital works (in order to monitor and prosecute illegal sharing). The White Paper even sought to reinterpret existing law so that transient copies in the random-access memory of computers would be considered illegal unless they had a license — essentially outlawing Web browsing without a license. With visions of Soviet-style indoctrination, the document also recommended an ambitious public education program to teach Americans to properly respect copyright laws. +={Lehman, Bruce;White Paper [on copyright]+4;Clinton administration:White Paper+4;first sale doctrine;copyright:first sale, and|fair use doctrine, and;fair use doctrine:copyright law, and+1|White Paper vs.+5} + +Litman wrote a revealing history of the misbegotten NII project in her book /{Digital Copyright}/. Her chapter title “Copyright Lawyers Set Out to Colonize Cyberspace” says it all.~{ Litman, /{Digital Copyright}/, pp. 89–100. }~ Samuelson alerted the readers of /{Wired}/ about the outrageous proposals of the White Paper in her devastating January 1996 article “The Copyright Grab.”~{ Pamela Samuelson, “The Copyright Grab,” /{Wired}/, January 1996. }~ If the NII proposals are enacted, warned Samuelson, “your traditional user rights to browse, share or make private noncommercial copies of copyrighted works will be rescinded. Not only that, your online service provider will be forced to snoop through your files, ready to cut you off and turn you in if it finds any unlicensed material there. The White Paper regards digital technology as so threatening to the future of the publishing industry that the public must be stripped of all the rights copyright law has long recognized — including the rights of privacy. Vice President Al Gore has promised that the National Information Infrastructure will dramatically enhance public access to information; now we find out that it will be available only on a pay-per-use basis.”~{ Ibid. }~ +={Litman, Jessica;Samuelson, Pamela;Gore, Al} + +The White Paper was not just an effort by Old Media to domesticate or eliminate the freedoms emerging on the Information Superhighway; it sought to set the stage for the internationalization of strict copyright norms, so that American-style copyright law would prevail around the world. To counter this effort, American University law professor Peter Jaszi convened a group of law professors, library organizations, and computer and consumer electronics makers, who promptly organized themselves as the Digital Future Coalition (DFC), the first broad-based coalition in support of the public’s stake in copyright law. +={Jaszi, Peter;Digital Future Coalition (DFC)+1;copyright law:international} + +The DFC attacked the White Paper as a copyright-maximalist nightmare and sought to rally civil liberties groups, Internet service providers, and electronics manufacturers. With modest industry support, the DFC was largely responsible for slowing progress on legislation that would have enacted Lehman’s proposals. As domestic opposition grew, Lehman shrewdly decided to push for a new global copyright treaty that would embody similar principles. In the end, however, the World Intellectual Property Organization demurred. +={Lehman, Bruce;World Intellectual Property Organization} + +By that time, however, the terms of debate had been set, and there was serious congressional momentum to adopt some variant of the White Paper agenda. The ultimate result, enacted in October 1998, was the Digital Millennium Copyright Act (DMCA), the crowning achievement of the copyright-maximalist decade. It contained dozens of highly specific provisos and qualifications to satisfy every special pleader. The law in effect authorized companies to eliminate the public’s fair use rights in digital content by putting a “digital lock” around the content, however weak. Circumventing the lock, providing the software to do so, or even telling someone how to do so became a criminal offense. +={copyright:fair use doctrine, and+1;fair use doctrine:copyright law, and+1;Digital Millennium Copyright Act (DMCA) [1998]+4;copyright law:DMCA+4} + +The DMCA has been roundly denounced by software programmers, music fans, and Internet users for prohibiting them from making personal copies, fair use excerpts, and doing reverse engineering on software, even with legally purchased products. Using digital rights management systems sanctioned by the DMCA, for example, many CDs and DVDs are now coded with geographic codes that prevent consumers from operating them on devices on other continents. DVDs may contain code to prevent them from running on Linux-based computers. Digital journals may “expire” after a given period of time, wiping out library holdings unless another payment is made. Digital textbooks may go blank at the end of the school year, preventing their reuse or resale. + +Critics also argue that the DMCA gives large corporations a powerful legal tool to thwart competition and interoperability. Some companies programmed garage door openers and printer cartridges so that the systems would not accept generic replacements (until a federal court found this behavior anticompetitive). Naturally, this sort of behavior, which the DMCA facilitates, lets companies avoid open competition on open platforms with smaller companies and entrepreneurs. It also gives companies a legal pretext for bullying Web site owners into taking down copyrighted materials that may in fact be legal to use. + +In her excellent history of the political run-up to the DMCA, Litman notes, “There is no overarching vision of the public interest animating the Digital Millennium Copyright Act. None. Instead, what we have is what a variety of different private parties were able to extract from each other in the course of an incredibly complicated four-year multiparty negotiation.”~{ Litman, /{Digital Copyright}/, pp. 144–45. }~ The DMCA represents a new frontier of proprietarian control — the sanctioning of technological locks that can unilaterally override the copyright bargain. Companies asked themselves, Why rely on copyrights alone when technology can embed even stricter controls into the very design of products? +={Litman, Jessica} + +The year 1998 was an especially bad year for the public domain. Besides enacting the trademark dilution bill and DMCA, the Walt Disney Company and other large media corporations succeeded in their six-year campaign to enact the Sonny Bono Copyright Term Extension Act.~{ See Wikipedia entry for the Copyright Term Extension Act, at http://en .wikipedia.org/wiki/Sonny_Bono_Copyright_Term_Extension_Act. See also /{Eldred v. Ashcroft}/, 537 U.S. 186 (2003), F. 3d 849 (2001). }~ The legislation, named after the late House legislator and former husband of the singer Cher, retroactively extended the terms of existing copyrights by twenty years. As we will see in chapter 3, this law became the improbable catalyst for a new commons movement. +={trademarks:dilution of;Walt Disney Company;Copyright Term Extension Act} + +2~ Confronting the Proprietarian Juggernaut +={copyright law:property rights, and+13;property rights:copyright law, and+13} + +If there was ever a need for independent scholarship on copyright law and activism to challenge the new excesses, this was such a time. Fred von Lohmann, senior staff attorney for the Electronic Frontier Foundation in San Francisco, recalls, “Peggy Radin taught the first cyber-law class at Stanford Law School in 1995, and I was her research assistant. And at the end of that semester, I had read everything that had ever been written about the intersection of the Internet and the law — not just in the legal literature, but in almost all the literature. It filled about two boxes, and that was it. That was all there was.”~{ Interview with Fred von Lohmann, March 20, 2006. }~ +={Lohmann, Fred von;Radin, Margaret Jane} + +In about a dozen years, those two boxes of literature have grown into many shelves and countless filing cabinets of case law and commentary. Much of the legal scholarship was the fruit of a new generation of copyright professors who rose to the challenge of the time. An earlier generation of copyright scholars — Melville Nimmer, Alan Latman, Paul Goldstein — were highly respected titans, but they also enjoyed busy consulting practices with the various creative industries that they wrote about. Protecting the public domain was not their foremost concern. +={Goldstein, Paul;Latman, Alan;Nimmer, Melville} + +By the 1980s, as law schools become more like graduate schools and less like professional schools, copyright commentary began to get more scholarly and independent of the industries it studied. People like Pamela Samuelson, Peter Jaszi, Jerome H. Reichman, Jessica Litman, L. Ray Patterson, and Wendy Gordon were among this cohort, who were soon joined in the 1990s by a new wave of thinkers such as James Boyle, Lawrence Lessig, Julie Cohen, Niva Elkin-Koren, and Yochai Benkler. Still others, such as Rosemary Coombe and Keith Aoki, approached copyright issues from cross-cultural and globalization perspectives. These scholars were frankly hostile to the large copyright industries, and greatly concerned with how the law was harming democracy, science, culture, and consumers. +={Litman, Jessica;Samuelson, Pamela;Jaszi, Peter;Benkler, Yochai;Boyle, James;Cohen, Julie;Coombe, Rosemary;Elkin-Koren, Niva;Gordon, Wendy;Lessig, Lawrence;Patterson, L. Ray;Reichman, Jerome H.;Aoki, Keith} + +A number of activist voices were also coming forward at this time to challenge the proprietarian juggernaut. As the Internet became a popular medium, ordinary people began to realize that the new copyright laws were curtailing their creative freedoms and free speech rights. The obscure complexities of copyright law started to become a far more public and political issue. The pioneering activist organization was the Electronic Frontier Foundation. EFF was founded in 1990 by tech entrepreneur Mitch Kapor, the famed inventor of the Lotus 1-2-3 spreadsheet in the 1980s; John Perry Barlow, Grateful Dead lyricist and hacker; and John Gilmore, a leading privacy/cryptography activist and free software entrepreneur. +={Barlow, John Perry+4;Gilmore, John;Kapor, Mitch;Electronic Frontier Foundation (EFF)} + +The organization was oriented to hackers and cyberlibertarians, who increasingly realized that they needed an organized presence to defend citizen freedoms in cyberspace. (Barlow adapted the term /{cyberspace}/ from science-fiction writer William Gibson in 1990 and applied it to the then-unnamed cultural life on the Internet.) Initially, the EFF was concerned with hacker freedom, individual privacy, and Internet censorship. It later went through some growing pains as it moved offices, changed directors, and sought to develop a strategic focus for its advocacy and litigation. In more recent years, EFF, now based in San Francisco, has become the leading litigator of copyright, trademark, and Internet free expression issues. It also has more than ten thousand members and spirited outreach programs to the press and public. +={Gibson, William;cyberspace:use of term} + +John Perry Barlow was an important visionary and populizer of the time. His March 1994 article “The Economy of Ideas” is one of the most prophetic yet accessible accounts of how the Internet was changing the economics of information. He astutely realized that information is not a “product” like most physical property, but rather a social experience or form of life unto itself. “Information is a verb, not a noun,” he wrote. “Freed of its containers, information obviously is not a thing. In fact, it is something that happens in the field of interaction between minds or objects or other pieces of information. . . . Sharks are said to die of suffocation if they stop swimming, and the same is nearly true of information.”~{22. John Perry Barlow, “The Economy of Ideas,” /{Wired}/, March 1994, at http:// www.wired.com/wired/archive/2.03/economy.ideas.html.}~ + +Instead of the sober polemics of law professors, Barlow — a retired Wyoming cattle rancher who improbably doubled as a tech intellectual and rock hipster — spiced his analysis of information with colorful metaphors and poetic aphorisms. Comparing information to DNA helices, Barlow wrote, “Information replicates into the cracks of possibility, always seeking new opportunities for /{Lebensraum}/.” Digital information, he said, “is a continuing process more like the metaphorphosing tales of prehistory than anything that will fit in shrink-wrap.” + +Since hyperbole is an occupational reflex among cyberjournalists, Barlow’s /{Wired}/ piece bore the obligatory subtitle, “Everything you know about intellectual property is wrong.” Yet reading Barlow more than a decade later confirms that, posturing aside, he /{was}/ on to the big story of our time: “Notions of property, value, ownership and the nature of wealth itself are changing more fundamentally than at any time since the Sumerians first poked cuneiform into wet clay and called it stored grain. Only a very few people are aware of the enormity of this shift, and fewer of them are lawyers or public officials.”~{ Ibid. }~ + +With a nod to Professor Samuelson, Barlow was prescient enough to compare the vulnerability of indigenous peoples to the coming dispossession of Internet communities: “Western countries may legally appropriate the music, designs and biomedical lore of aboriginal people without compensation to their tribes of origins since those tribes are not an ‘author’ or ‘investors.’ But soon most information will be generated collaboratively by the cyber-tribal hunter-gatherers of cyberspace. Our arrogant legal dismissal of the rights of ‘primitives’ will soon return to haunt us.” +={Samuelson, Pamela} + +No account of cyberactivism in the 1990s is complete without mention of James Love, a feisty advocate with a brilliant strategic mind and an extraordinary ability to open up broad new policy fronts. For example, Love, as director of the Ralph Nader–founded Consumer Project on Technology, worked with tech activist Carl Malamud to force the U.S. Securities and Exchange Commission to put its EDGAR database of corporate filings online in 1994, at a time when the SEC was planning to give the data to private vendors to sell. By prevailing at the SEC, Love and Malamud set an important precedent that government agencies should post their information on the Internet for free. A few years later, in 1997, Love convened a conference to assess Microsoft’s troubling monopoly power, an event that emboldened the Department of Justice to launch its antitrust lawsuit against the company. Love later played a key role in persuading an Indian drugmaker to sell generic HIV/AIDS drugs to South Africa, putting Big Pharma on the defensive for its callous patent and trade policies and exorbitant drug prices. Love’s timely gambit in 1996 to organize broader advocacy for the public domain failed, however. He co-founded the Union for the Public Domain, with a board that included Richard Stallman, but the project never developed a political following or raised much money. +={Love, James;EDGAR database;Malamud, Carl;Microsoft:antitrust lawsuit against} + +The American Library Association was the largest and bestfunded advocate on copyright issues in the 1990s, but its collaborations with other Washington allies tended to be modest, and its grassroots mobilization disappointing. Libraries are respected in the public mind precisely because they are stable, apolitical civic institutions — that is, not activists. Despite its valuable presence on copyright and Internet policy issues, the library lobby was temperamentally disinclined to get too far ahead of the curve. +={American Library Association;libraries} + +By the end of the decade, a muscular, dissenting discourse about copyright law was starting to take shape. On one side was a complicated body of industry-crafted copyright law that claimed imperial powers to regulate more and more aspects of daily life — your Web site, your music CDs, your electronic devices, your computer practices. On the other side were ordinary people who loved how the Internet and digital devices were empowering them to be creators and publishers in their own right. They just wanted to indulge their natural human urge to share, tinker, reuse, and transform culture. +={copyright law:expansion of} + +The dissent of the progressive copyright scholars and activists, though pungent, was hardly insurrectionist. These critics were reformers, not bomb throwers. Most objected to the overreaching scope and draconian enforcement of copyright law, not to its philosophical foundations. They generally argued that the problem wasn’t copyright law per se, but the misapplication and overextension of its core principles. + +2~ A New Story About the Public Domain +={copyright law:public domain vs.+15;public domain:copyright law, and+15|emergence of+21} + +One of the most notable outgrowths of all this activity was the development of a new story about the public domain. Scholars took a range of legal doctrines that were scattered among the sprawling oeuvre of copyright law and consolidated them under one banner, /{the public domain}/. The new framing helped give the public’s rights in cultural works a new moral standing and intellectual clarity. + +Even though copyright law has existed for three centuries, the term “public domain” did not surface in a U.S. Supreme Court decision until 1896. The public domain was first mentioned in U.S. copyright law in 1909, and while it occasionally merited passing reference or discussion in later decades, the concept was not the subject of a significant law review article until 1981. That article was “Recognizing the Public Domain,” by Professor David Lange.~{ David Lange, “Recognizing the Public Domain,” /{Law and Contemporary Problems}/ 44 (Autumn 1981). }~ “David’s article was an absolutely lovely piece that sunk without a trace,” recalls Jessica Litman. “When a bunch of us discovered [Lange’s article] in the late 1980s, it had been neither cited nor excerpted nor reprinted nor anything — because nobody was looking for a defense of the public domain. People were looking for arguments for extending copyright protection. David was ahead of his time.” + +The main reason that the public domain was ignored was that it was generally regarded as a nullity. “Public domain in the fields of literature, drama, music and art is the other side of the coin of copyright,” wrote M. William Krasilovsky in 1967.~{ M. William Krasilovsky, “Observations on the Public Domain,” /{Bulletin of the Copyright Society}/ 14, no. 205 (1967). }~ “It is best defined in negative terms.” Edward Samuels wrote that the public domain “is simply whatever remains after all methods of protection are taken into account.”~{ Edward Samuels, “The Public Domain in Copyright Law,” /{Journal of the Copyright Society}/ 41, no. 137 (1993), p. 138. }~ + +Lange himself acknowledged this conventional wisdom when he wrote that the public domain “amounts to a dark star in the constellation of intellectual property.” He took issue with this history, however, and insisted upon the affirmative value of the public domain. Lange dredged up a number of “publicity rights” cases and commentary to shed light on the problem: Bela Lugosi’s widow and son claimed that they, not Universal Pictures, should own the rights to the character Dracula. Representatives of the deceased Marx Brothers sought to stop a Broadway production spoofing 1930s musicals from using the Marx Brothers’ characters. DC Comics, owner of a trademark in the Superman character, sued to prevent a group of Chicago college students from calling their newspaper /{The Daily Planet}/. And so on. +={DC Comics;Lugosi, Bela;Marx Brothers+1} + +From such examples, Lange drove home a commonsense lesson about the derivative nature of creativity: we all depend on others to generate “new” works. Groucho, Chico, and Harpo Marx couldn’t “invent” their stage personas until, in classic vaudevillian tradition, they had adapted jokes and shtick from their peers. “In time,” Groucho wrote in his memoirs, “if [a comedian] was any good, he would emerge from the routine character he had started with and evolve into a distinct personality of his own. This has been my experience and also that of my brothers, and I believe this has been true of most of the other comedians.” + +To which Lange added, “Of course, what Groucho is saying in this passage is that although he and his brothers began as borrowers they ended as inventors. . . . It is a central failing in the contemporary intellectual property literature and case law that that lesson, so widely acknowledged, is so imperfectly understood.”~{ Lange, “Recognizing the Public Domain,” p. 162. }~ + +In example after example, Lange made the point that “as access to the public domain is choked, or even closed off altogether, the public loses too: loses the rich heritage of its culture, the rich presence of new works derived from that culture, and the rich promise of works to come.” Lange warned that “courts must dispel” the “impression of insubstantiality” from which the public domain suffers. Nothing will be resolved, he warned, “until the courts have come to see the public domain not merely as an unexplored abstraction but as a field of individual rights as important as any of the new property rights.” + +2~ What Is “Authorship”? +={authorship+13;copyright law:authorship, and+13|originality, and+13} + +Besides honoring the public domain, copyright reformers sought to develop a second, more subversive narrative. They questioned the very idea of individual “authorship” and “originality,” two central pillars of copyright law, The standard moral justification for granting authors exclusive rights in their works is the personal originality that they supposedly show in creating new works. But can “originality” and “authorship” be so neatly determined? What of the role of past generations and creative communities in enabling the creation of new works? Don’t we all, in the words of Isaac Newton, stand on the shoulders of giants? +={Newton, Isaac} + +The idea that sharing, collaboration, and adaptation may actually be important to creativity, and not merely incidental, was a somewhat daring theme in the early 1990s, if only because it had little recognition in copyright scholarship. While this line of analysis preceded the Internet, the arrival of the World Wide Web changed the debate dramatically. Suddenly there was a powerful, real-life platform for /{collective}/ authorship. Within fifteen years, sharing and collaboration has become a standard creative practice, as seen in Wikipedia, remix music, video mashups, machinima films, Google map mashups, social networking, and much else. +={orld Wide Web: collective authorship, and;authorship:collective} + +Of course, in the early 1990s, the promise of online networks was only dimly understood. But for Jessica Litman, the tightening noose of proprietary control had troubling implications for fair use and the ability of people to create and share culture: “Copyright law was no longer as open and porous as it had been, so I felt compelled to try to defend the open spaces that nobody was paying attention to.” Litman published a major article on the public domain in 1990, instigating a fresh round of interest in it and establishing lines of analysis that continue to this day.~{ Jessica Litman, “The Public Domain,” /{Emory Law Journal}/ 39, no. 965 (Fall 1990). }~ +={Litman, Jessica;copyright law:fair use doctrine, and;fair use doctrine:copyright law, and} + +She made the then-startling claim, for example, that “the very act of authorship in /{any}/ medium is more akin to translation and recombination than it is to creating Aphrodite from the foam of the sea. Composers recombine sounds they have heard before; playwrights base their characters on bits and pieces drawn from real human beings and other playwrights’ characters. . . . This is not parasitism; it is the essence of authorship. And, in the absence of a vigorous public domain, much of it would be illegal.” Litman argued that the public domain is immensely important because all authors depend upon it for their raw material, Shrink the public domain and you impoverish the creative process. + +The problem, said Litman, is that copyright law contains a structural contradiction that no one wants to acknowledge. The law requires “originality” in order for a work to be protected — but it cannot truly determine what is “original.” If authors could assert that their works were entirely original, and courts conscientiously enforced this notion, copyright law would soon collapse. Everyone would be claiming property rights in material that had origins elsewhere. Shakespeare’s estate might claim that Leonard Bernstein’s /{West Side Story}/ violates its rights in /{Romeo and Juliet}/; Beethoven would prevent the Bee Gees from using the opening chords of his Fifth Symphony. + +When one person’s copyright claims appear to threaten another person’s ability to create, the courts have historically invoked the public domain in order to set limits on the scope of copyright protection. In this backhanded way, the public domain helps copyright law escape from its own contradictions and ensures that basic creative elements remain available to all. As Litman explained: + +_1 Because we have a public domain, we can permit authors to avoid the harsh light of a genuine search for provenance, and thus maintain the illusion that their works are indeed their own creations. We can tolerate the grant of overbroad and overlapping deeds through the expedient assumption that each author took her raw material from the commons, rather than from the property named in prior deeds.~{ Litman, “The Public Domain,” p. 1012. }~ + +In effect, copyright law sets up a sleight of hand: it invites authors to plunder the commons with the assurance that their borrowings will be politely ignored — but then it declares the resulting work of authorship “original” and condemns any further follow-on uses as “piracy.” This roughly describes the early creative strategy of the Walt Disney Company, which built an empire by rummaging through the public domain of fairy tales and folklore, adding its own creative flourishes, and then claiming sole ownership in the resulting characters and stories. +={authorship+3;Walt Disney Company} + +As Litman unpacked the realities of “authorship,” she showed how the idea of “originality” serves as a useful fiction. Any author must draw upon aspects of culture and recombine them without ever being able to identify the specific antecedents, she pointed out. Judges, for their part, can never really make a rigorous factual determination about what is “original” and what is taken from the public domain. In reality, said Litman, authorship amounts to “a combination of absorption, astigmatism and amnesia.” The public domain is vague and shifting precisely because it must constantly disguise the actual limits of individual “originality.” + +English professor Martha Woodmansee and law professor Peter Jaszi helped expose many of the half-truths about “authorship” and “originality.” Their 1994 anthology of essays, /{The Construction of Authorship}/, showed how social context is an indispensable element of “authorship,” one that copyright law essentially ignores.~{ Martha Woodmansee and Peter Jaszi, eds., /{The Construction of Authorship: Textual Appropriation in Law and Literature}/ (Durham, NC: Duke University Press, 1994). }~ Thus, even though indigenous cultures collectively create stories, music, and designs, and folk cultures generate works in a collaborative fashion, copyright law simply does not recognize such acts of collective authorship. And so they go unprotected. They are vulnerable to private appropriation and enclosure, much as Stallman’s hacker community at MIT saw its commons of code destroyed by enclosure. +={Jaszi, Peter;Woodmansee, Martha;commons:enclosure of;enclosure movement} + +Before the Internet, the collaborative dimensions of creativity were hardly given much thought. An “author” was self-evidently an individual endowed with unusual creative skills. As the World Wide Web and digital technologies have proliferated, however, copyright’s traditional notions of “authorship” and “originality” have come to seem terribly crude and limited. The individual creator still matters and deserves protection, of course. But when dozens of people contribute to a single entry of Wikipedia, or thousands contribute to an open-source software program, how then shall we determine who is the “author”?~{ Henry Miller writes: “We carry within us so many entities, so many voices, that rare indeed is the man who can say he speaks with his own voice. In the final analysis, is that iota of uniqueness which we boast of as ‘ours’ really ours? Whatever real or unique contribution we make stems from the same inscrutable source whence everything derives. We contribute nothing but our understanding, which is a way of saying — our acceptance.” Miller, /{The Books in My Life}/ (New York: New Directions), p. 198. }~ By the lights of copyright law, how shall the value of the public domain, reconstituted as a commons, be assessed?~{ Rufus Pollock, “The Value of the Public Domain,” report for Institute for Public Policy Research, London, July 2006, at http://www.rufuspollock.org/ economics/papers/value_of_public_domain.ippr.pdf. }~ +={World Wide Web:collective authorship, and;creativity:collaborative} + +The Bellagio Declaration, the outgrowth of a conference organized by Woodmansee and Jaszi in 1993, called attention to the sweeping deficiencies of copyright law as applied. One key point stated, “In general, systems built around the author paradigm tend to obscure or undervalue the importance of the ‘public domain,’ the intellectual and cultural commons from which future works will be constructed. Each intellectual property right, in effect, fences off some portion of the public domain, making it unavailable to future creators.”~{ See James Boyle, /{Shamans, Software, and Spleens: Law and the Construction of the Information Society}/ (Cambridge, MA: Harvard University Press, 1995), p. 192. }~ +={Jaszi, Peter;Woodmansee, Martha;Bellagio Declaration;Boyle, James:Shamans, Software and Spleens, by+1;intellectual property:public domain vs.} + +Another fusillade of flaming arrows engulfed the fortress of “authorship” and “originality” in 1996, when James Boyle published /{Shamans, Software, and Spleens}/. With sly wit and deep analysis, this landmark book identified many of the philosophical paradoxes and absurdities of property rights in indigenous knowledge, software, genes, and human tissue. Boyle deftly exposed the discourse of IP law as a kind of Möbius strip, a smooth strip of logic that confusingly turns back on itself. “If a geography metaphor is appropriate at all,” said Boyle, “the most likely cartographers would be Dali, Magritte and Escher.”~{ James Boyle, “A Theory of Law and Information: Copyright, Spleens, Blackmail and Insider Trading,” /{California Law Review}/ 80, no. 1413 (1992), at http://www.law.duke.edu/boylesite/law&info.htm. }~ +={copyright law:philosophical challenges to} + +2~ “You Have No Sovereignty Where We Gather” +={copyright law:philosophical challenges to+13} + +The deconstruction of copyright law over the past twenty years has been a significant intellectual achievement. It has exposed the copyright law’s philosophical deficiencies, showed how social practice deviates from it, and revealed the antisocial effects of expanding copyright protection. Critics knew that it would be impossible to defend the fledgling cyberculture without first documenting how copyright law was metastasizing at the expense of free expression, creative innovation, consumer rights, and market competition. + +But as the millennium drew near, the tech-minded legal community — and law-minded techies — knew that critiques and carping could only achieve so much. A winnable confrontation with copyright maximalists was needed. A compelling counternarrative and a viable long-term political strategy had to be devised. And then somehow they had to be pushed out to the wider world and made real. + +That task was made easier by the intensifying cultural squeeze. The proprietarian lockdown was starting to annoy and anger people in their everyday use of music, software, DVDs, and the Web. And the property claims were growing more extreme. The American Society of Composers, Authors and Publishers had demanded that Girl Scout camps pay a public performance license for singing around the campfire. Ralph Lauren challenged the U.S. Polo Association for ownership of the word /{polo}/. McDonald’s succeeded in controlling the Scottish prefix Mc as applied to restaurants and motels, such as “McVegan” and “McSleep.”~{ These examples can be found in Bollier, /{Brand Name Bullies}/. }~ +={Lauren, Ralph} + +The mounting sense of frustration fueled a series of conferences between 1999 and 2001 that helped crystallize the disparate energies of legal scholarship into something resembling an intellectual movement. “A number of us [legal scholars] were still doing our own thing, but we were beginning to get a sense of something,” recalls Yochai Benkler, “It was no longer Becky Eisenberg working on DNA sequences and Pamela Samuelson on computer programs and Jamie Boyle on ‘environmentalism for the ’Net’ and me working on spectrum on First Amendment issues,” said Benkler. “There was a sense of movement.”~{ Interview with Yochai Benkler, February 7, 2006.}~ (“Environmentalism for the ’Net” was an influential piece that Boyle wrote in 1998, calling for the equivalent of an environmental movement to protect the openness and freedom of the Internet.)~{ James Boyle, “A Politics of Intellectual Property: Environmentalism for the Net,” /{Duke Law Journal}/ 47, no. 1 (October 1997), pp. 87–116, at http://www .law.duke.edu/boylesite/Intprop.htm. }~ +={Boyle, James+1;Benkler, Yochai+1;Eisenberg, Rebecca;Samuelson, Pamela} + +“The place where things started to get even crisper,” said Benkler, “was a conference at Yale that Jamie Boyle organized in April 1999, which was already planned as a movement-building event.” That conference, Private Censorship/Perfect Choice, looked at the threats to free speech on the Web and how the public might resist. It took inspiration from John Perry Barlow’s 1996 manifesto “A Declaration of the Independence of Cyberspace.” It is worth quoting at length from Barlow’s lyrical cri de coeur — first published in /{Wired}/ and widely cited — because it expresses the growing sense of thwarted idealism among Internet users, and a yearning for greater self-determination and self-governance among commoners. Barlow wrote: +={Barlow, John Perry+6} + +_1 Governments of the Industrial World, you weary giants of flesh and steel, I come from Cyberspace, the new home of Mind. On behalf of the future, I ask you of the past to leave us alone, You are not welcome among us, You have no sovereignty where we gather. + +_1 We have no elected government, nor are we likely to have one, so I address you with no greater authority than that with which liberty itself always speaks. I declare the global social space we are building to be naturally independent of the tyrannies you seek to impose on us. You have no moral right to rule us nor do you possess any methods of enforcement we have true reason to fear. + +_1 Governments derive their just powers from the consent of the governed. You have neither solicited nor received ours. We did not invite you, You do not know us, nor do you know our world. Cyberspace does not lie within your borders. Do not think that you can build it, as though it were a public construction project, You cannot. It is an act of nature and it grows itself through our collective actions. + +_1 You have not engaged in our great and gathering conversation, nor did you create the wealth of our marketplaces. You do not know our culture, our ethics, or the unwritten codes that already provide our society more order than could be obtained by any of your impositions. + +_1 You claim there are problems among us that you need to solve, You use this claim as an excuse to invade our precincts. Many of these problems don’t exist. Where there are real conflicts, where there are wrongs, we will identify them and address them by our means. We are forming our own Social Contract. This governance will arise according to the conditions of our world, not yours. Our world is different. + +As Barlow made clear, the Internet was posing profound new questions — not just about politics, but about the democratic polity itself. What would be the terms of moral legitimacy and democratic process in cyberspace? Would the new order be imposed by a Congress beholden to incumbent industries and their political action committees, or would it be a new social contract negotiated by the commoners themselves? In posing such questions, and doing it with such rhetorical panache, Barlow earned comparisons to Thomas Jefferson. + +The stirrings of a movement were evident in May 2000, when Benkler convened a small conference of influential intellectual property scholars at New York University Law School on “A Free Information Ecology.” This was followed in November 2001 by a large gathering at Duke Law School, the first major conference ever held on the public domain. It attracted several hundred people and permanently rescued the public domain from the netherworld of “nonproperty.” People from diverse corners of legal scholarship, activism, journalism, and philanthropy found each other and began to reenvision their work in a larger, shared framework. +={Benkler, Yochai;public domain:conferences about+1} + +Over three decades, copyright scholarship had become more incisive, impassioned, and focused on the public good — but much of the talk remained within the rarefied circles of the academy. What to do about the disturbing enclosures of the cultural commons remained a vexing, open question. The 1990s saw an eclectic smattering of initiatives, from EFF lawsuits and visionary manifestos to underfunded advocacy efforts and sporadic acts of hacker mischief and civil disobedience. All were worthwhile forms of engagement and exploratory learning. None were terribly transformative. Free software was growing in popularity in the 1990s, but its relevance to broader copyright struggles and the Internet was not yet recognized. Congress and the courts remained captive to the copyright-maximalist worldview. The idea of organizing a counter-constituency to lay claim to the public domain and forge a new social contract for cyberspace was a fantasy. Copyright law was just too obscure to excite the general public and most creators and techies. The commoners were too scattered and diverse to see themselves as an insurgent force, let alone imagine they might create a movement. + +1~ 3 WHEN LARRY LESSIG MET ERIC ELDRED + +/{A constitutional test case becomes the seed for a movement.}/ + +Once the value of the public domain became evident, and a few visionaries realized that the commons needed to be protected somehow, an important strategic question arose: Which arena would offer the best hope for success — politics, culture, technology, or law? + +The real answer, of course, was all of the above. Building a new digital republic would require a wholesale engagement with the politics of effecting democratic change and the challenges of building a cultural movement. It would require the invention of a shared technological infrastructure, and the development of legal tools to secure the commons. All were intertwined. But as a practical matter, anyone who aspired to stop the mass-media-driven expansions of copyright law had to choose where to invest his or her energy. In the mid-1990s, Lawrence Lessig decided that the greatest leverage would come through law. +={Lessig, Lawrence:law in contemporary context, and+2} + +Lessig, usually referred to as Larry, had the knowledge, talent, and good timing to conceptualize the politics of digital technologies at a ripe moment, the late 1990s, when the World Wide Web was exploding and people were struggling to understand its significance. However, Lessig was not content to play the sage law professor dispensing expertise at rarefied professional and scholarly gatherings; he aimed to become a public intellectual and highbrow activist. Through a punishing schedule of public speaking and a series of high-profile initiatives starting in 1998 and 1999, Lessig became a roving demigod-pundit on matters of the Internet, intellectual property, and cultural freedom. + +In the course of his frequent travels, he had a particularly significant rendezvous at the Starbucks on Church Street in Cambridge, Massachusetts. It was November 1998. A month earlier, Congress had enacted the Sonny Bono Copyright Extension Act. Lessig was eager to meet with one Eric Eldred, a retired navy contractor, to see if he would agree to be a plaintiff in the first federal case to challenge the constitutionality of the copyright clause. +={Copyright Term Extension Act;Eldred, Eric:Lessig, and;Lessig, Lawrence:Eldred, and} + +Eldred was a book enthusiast and computer programmer who had reached the end of his rope. Three years earlier, in 1995, he had launched a simple but brilliant project: a free online archive of classic American literature. Using his PC and a server in his home in New Hampshire, Eldred posted the books of Nathaniel Hawthorne, Henry James, Wallace Stevens, and dozens of other great authors whose works were in the public domain. Eldred figured it would be a great service to humanity to post the texts on the World Wide Web, which was just beginning to go mainstream. + +Eldred had previously worked for Apollo Computer and Hewlett-Packard and was experienced in many aspects of computers and software. In the late 1980s, in fact, he had developed a system that enabled users to post electronic text files and then browse and print them on demand. When the World Wide Web arrived, Eldred was understandably excited. “It seemed to me that there was a possibility of having a system for electronic books that was similar to what I had done before. I was interested in experimenting with this to see if it was possible.”~{ Interview with Eric Eldred, August 1, 2006; Daren Fonda, “Copyright Crusader,” /{Boston Globe Magazine}/, August 29, 1999, available at http://www .boston.com/globe/magazine/8-29/featurestory1.shtml; and Eric Eldred, “Battle of the Books: The Ebook vs. the Antibook,” November 15, 1998, at http://www.eldritchpress.org/battle.html. }~ + +So Eldred set out to build his own archive of public-domain books: “I got books from the library or wherever, and I learned how to do copyright research and how to scan books, do OCR [opticalcharacter recognition] and mark them up as HTML [the programming language used on the Web],” he said. “I just wanted to make books more accessible to readers.”~{ Interview with Eric Eldred, August 1, 2006. }~ + +Eldred didn’t realize it at the time, but his brave little archive, Eldritch Press, embodied a dawning cultural archetype — the selfpublished digital work meant to be freely shared with anyone in the world, via the Internet. Thanks to the magic of “network effects” — the convenience and value that are generated as more people join a network — Eldred’s Web site was soon receiving more than twenty thousand hits a day. A growing community of book lovers came together through the site. They offered annotations to the online books, comments, and links to foreign translations and other materials. In 1997, the National Endowment for the Humanities considered the site so educational and exemplary that it formally cited Eldritch Press as one of the top twenty humanities sites on the Web. +={Eldritch Press+12;Internet:communication system, as+3} + +Although it was only a one-person project, Eldritch Press was not just an idiosyncratic innovation. The convergence of telecommunications, personal computers, and software in the 1990s, otherwise known as the Internet, was facilitating an explosion of new genres of public expression. We are still grappling with how this new type of media system is different from broadcasting and other mass media. But we do know this: it invites mass participation because the system doesn’t require a lot of capital or professional talent to use. The system favors decentralized interactivity over centralized control and one-way communication. Ordinary people find it relatively inexpensive and versatile. Since everyone has roughly the same access and distribution capacities, the Internet is perhaps the most populist communication platform and egalitarian marketplace in human history. +={Internet:mass participation in} + +This was not the goal of the computer scientists who invented the Internet, of course. Working under the auspices of the U.S. military, they were chiefly concerned with building a communications system that would allow academic researchers to share computerized information cheaply and easily. The idea was that intelligence and innovation would arise from the “edges” of a “dumb” network, and not be controlled by a centralized elite in the manner of broadcasting or book publishing. The Internet — a network of networks — would be a platform open to anyone who used a shared set of freely accessible “protocols,” or standardized code, for computer hardware and software.~[* The Internet protocols that enable different computers and networks to connect despite their differences is TCP/IP, which stands for TransmissionControl Protocol/Internet Protocol. These protocols enabled the commons known as the Internet to emerge and function, and in turn to host countless other commons “on top” of it.]~ +={Internet:protocols of+1} + +What was radically new about the network architecture was its freedom: No special qualifications or permissions were needed to communicate or “publish.” No one needed to pay special fees based on usage. Anyone could build her own innovative software on top of the open protocols, It is a measure of the system’s power that it has spawned all sorts of innovations that were not foreseen at the outset: in the 1990s, the World Wide Web, instant messaging, peer-to-peer file sharing, and Web logs, and, in the 2000s, podcasts, wikis, social networking software, and countless other applications. The open, shared protocols of the Internet provided an indispensable communications platform for each of these innovations to arise. +={free culture:Internet, of the;Internet:architecture of} + +In building his online archive, Eric Eldred was part of this new cultural cohort of innovators. He not only shared Richard Stallman’s dream — to build an open, sharing community. He also came to share Stallman’s contempt for the long arm of copyright law. The problem, in Eldred’s case, was the corporate privatization of large portions of the public domain. In the 1990s, the Walt Disney Company was worried that its flagship cartoon character, Mickey Mouse, would enter the public domain and be freely available for anyone to use. Mickey, originally copyrighted in 1928, was nearing the end of his seventy-five-year term of copyright and was due to enter the public domain in 2003. +={copyright law:public domain vs.+6;Eldred, Eric:public domain, and;public domain:copyright law, and+6|privatization of+2;Stallman, Richard:influence of;Walt Disney Company} + +Disney led a concerted campaign to extend the term of copyrights by twenty years. Under the new law, all works copyrighted after January 1, 1923, would be privately controlled for another twenty years. Corporations would be able to copyright their works for ninety-five years instead of seventy-five years, and the works of individual authors would be a private monopoly for the author’s lifetime plus seventy years. Thousands of works that were expected to enter the public domain in 1999 and following years would remain under copyright until 2019 and beyond. + +Congress readily enacted this twenty-year giveaway of monopoly rights on a unanimous vote, and without any public hearings or debate. Disney was the most visible beneficiary of the law, prompting critics to dub it the Mickey Mouse Protection Act. But its more significant impact was to deprive Americans of access to an estimated four hundred thousand cultural works from the 1920s and 1930s. Books by Sherwood Anderson, music by George Gershwin, poems by Robert Frost, and tens of thousands of other works would remain under private control for no good reason. The law was the eleventh time in the course of four decades that Congress had extended the term of copyright protection. American University law professor Peter Jaszi complained that copyright protection had become “perpetual on the installment plan.” +={Copyright Term Extension Act+6;Jaszi, Peter;copyright law:expansion of|purpose of+3} + +The law was astonishingly inefficient and inequitable as well. To preserve the property rights of the 2 percent of works from this period that still had commercial value, the law also locked up the remaining 98 percent of works (whose owners are often unknown or unable to be located in order to grant permissions). Indeed, it was these “orphan works” — works still under copyright but not commercially available, and with owners who often could not be found — that represent an important “feedstock” for new creativity. The Sonny Bono Act showered a windfall worth billions of dollars to the largest entertainment businesses and authors’ estates. +={copyright law:orphan works} + +At a more basic level, the copyright term extension showed contempt for the very rationale of copyright law. Copyrights are intended as an inducement to authors to create works. It is a government grant of monopoly property rights meant to help authors earn money for producing books, music, film, and other works. But, as Lessig pointed out, “You can’t incent a dead person. No matter what we do, Hawthorne will not produce any more works, no matter how much we pay him.” Jack Valenti replied that longer copyright terms would give Hollywood the incentive to preserve old films from deteriorating and make them available. + +The copyright term extension act privatized so many of the public domain books on the Eldritch Press Web site, and so offended Eldred’s sense of justice, that in November 1998 he decided to close his site in protest. The new law meant that he would not be able to add any works published since 1923 to his Web site until 2019. “I can no longer accomplish what I set out to do,” said Eldred.~{ Ibid. }~ +={Eldred, Eric:public domain, and|Lessig, and+3;Lessig, Lawrence+3:Eldred, and+3} + +As luck had it, Larry Lessig was looking for an Everyman of the Internet. Lessig, then a thirty-seven-year-old professor at Harvard Law School, was looking for a suitable plaintiff for his envisioned constitutional test case. He had initially approached Michael S. Hart, the founder of Project Gutenberg, the first producer of free electronic books. At the time, the project had nearly six thousand public-domain books available online. (It now has twenty thousand books; about 3 million books are downloaded every month.) Hart was receptive to the case but had his own ideas about how the case should be argued. He wanted the legal complaint to include a stirring populist manifesto railing against rapacious copyright holders. Lessig demurred and went in search of another plaintiff.~{ Richard Poynder interview with Lawrence Lessig, “The Basement Interviews: Free Culture,” April 7, 2006, p. 26, available at http://poynder.blogspot.com/ 2006/03/basement-interviews.html. See also Steven Levy, “Lawrence Lessig’s Supreme Showdown,” /{Wired}/, October 2002, pp. 140–45, 154–56, available at http://www.wired.com/wired/archive/10.10/lessig.html. Project Gutenberg is at http://wwwgutenberg.org. }~ +={Hart, Michael S.;Project Gutenberg} + +After reading about Eldred’s protests in the /{Boston Globe}/, and meeting with him over coffee, Lessig asked Eldred if he would be willing to be the plaintiff in his envisioned case. Eldred readily agreed. As a conscientious objector and draft resister during the Vietnam War, he was ready to go to great lengths to fight the Sonny Bono Act. “Initially, I volunteered to violate the law if necessary and get arrested and go to jail,” Eldred said. “But Larry told me that was not necessary.” A good thing, because under the No Electronic Theft Act, passed in 1997, Eldred could be charged with a felony. “I could face jail, fines, seizure of my computer, termination of my Internet service without notice — and so all the e-books on the Web site could be instantly lost,” he said. +={No Electronic Theft Act (1997)} + +It was the beginning of a landmark challenge to the unchecked expansion of copyright law. The case would turbocharge Lessig’s unusual career and educate the press and public about copyright law’s impact on democratic culture. Most significantly, it would, in time, spur the growth of an international free culture movement. +={copyright law:expansion of;Eldred, Eric:copyright law, and} + +2~ Larry Lessig’s Improbable Journey +={Lessig, Lawrence+49} + +Since Lessig looms so large in this story, it is worth pausing to understand his roots. Raised by culturally conservative, rock-ribbed Republican parents in central Pennsylvania, Lessig was a bright kid with a deep enthusiasm for politics. “I grew up a right-wing lunatic Republican,” Lessig told journalist Steven Levy, noting that he once belonged to the National Teen Age Republicans, ran a candidate’s unsuccessful campaign for the Pennsylvania state senate, and attended the 1980 Republican National Convention, which nominated Ronald Reagan for president. Larry’s father, Jack, was an engineer who once built Minuteman missile silos in South Dakota (where Lessig was born in 1961), and who later bought a steelfabrication company in Williamsport, Pennsylvania.~{ Wikipedia entry, at http://en.wikipedia.org/wiki/Lessig; Levy, “Lawrence Lessig’s Supreme Showdown.” }~ +={Lessig, Lawrence:background of+4} + + +Lessig initially thought he would follow in his father’s footsteps, and so he went to the University of Pennsylvania to earn degrees in economics and management. Later, studying philosophy at Trinity College in Cambridge, England, he faced growing doubts about his deep-seated libertarian worldview. Hitchhiking through Eastern Bloc countries, Lessig gained a new appreciation for the role of law in guaranteeing freedom and making power accountable. “There were many times when people in Eastern Europe would tell me stories about the history of the United States that I had never been taught: things like the history of how we treated Native Americans; and the history of our intervention in South America; and the nature of our intervention in South East Asia,” Lessig told Richard Poynder in 2006. “All of those were stories that we didn’t tell ourselves in the most accurate and vivid forms.” These experiences, said Lessig, “opened up a channel of skepticism in my head.”~{ Poynder interview with Lessig, April 7, 2006. }~ + +Lessig’s sister Leslie once told a reporter that Larry came back from Cambridge a very different person: “His views of politics, religion, and his career had totally flipped.”~{ Levy, “Lawrence Lessig’s Supreme Showdown.” }~ No longer aspiring to be a businessman or a philosopher, Lessig set his sights on law and entered the University of Chicago Law School in 1986. He transferred the next year to Yale Law School (to be near a girlfriend), groomed himself to be a constitutional law scholar, and graduated in 1989. + +Although he now considered himself a liberal, Lessig spent the next two years in the service of two of the law’s most formidable conservatives. He clerked for circuit court judge Richard Posner in 1988–89, followed by a year clerking for Supreme Court justice Antonin Scalia during the 1990–91 term. His educational odyssey complete, the thirty-year-old Lessig settled into the life of a tenured law professor at the University of Chicago Law School. +={Posner, Richard;Scalia, Antonin} + +One of Lessig’s early scholarly concerns — adjudication — was not exactly a warm-up for tub-thumping activism. But it did curiously prefigure his later interest in using law as a tool to effect political change. In a 1993 law review article, Lessig wondered how courts should interpret the law when public sentiment and practice have changed. If a judge is going to be true to the original meaning of a law, Lessig argued, he must make a conscientious “translation” of the law by taking account of the contemporary context. A new translation of the law is entirely justified, and should supplant an old interpretation, Lessig argued, if prevailing social practices and understandings have changed, The important thing in interpreting law, therefore, is “fidelity in translation.”~{ Lawrence Lessig, “Fidelity in Translation,” /{Texas Law Review}/ 71, no. 1165 (May 1993). }~ +={law:social change, and+8;Lessig, Lawrence: law in contemporary context, and+8} + +Lessig elaborated on this theme in a 1997 article that spent twenty-seven dense pages pondering how two different Supreme Courts, separated by nearly a century, could look to identical words in the Constitution and reach precisely opposite conclusions.~[* The Erie ruling held that federal common law, previously recognized by the U.S. Supreme Court in 1842, was unconstitutional.]~ It is not as if one Court or the other was unprincipled or wrong, Lessig wrote. Rather, any court must take account of contemporary social norms and circumstances in “translating” an old law for new times. Lessig called this dynamic the "/{Erie}/-effect," a reference to the U.S. Supreme Court’s 1938 ruling in /{Erie Railroad Co. v. Tompkins}/. The /{Erie}/-effect is about the emergence of “a kind of contestability about a practice within a legal institution,” which prompts “a restructuring of that practice to avoid the rhetorical costs of that contestability.”~{ Lawrence Lessig, “Erie-Effects of Volume 110: An Essay on Context in Interpretive Theory,” /{Harvard Law Review}/ 110, no. 1785 (1997). }~ +={Erie Railroad Co. v. Tompkins+1} + +Lessig described how an /{Erie}/-effect might be exploited to catalyze a political shift (paraphrased here): /{identify}/ a socially contested law, aim to /{force}/ the conflicting social practice into the foreground by /{inflaming}/ conventional discourse, and then /{argue}/ for a change in legal interpretation in order to relieve the contestability that has been alleged.~{ Ibid., p. 1809. }~ If the conflict between the law and actual social practice can be made vivid enough, a court will feel pressure to reinterpret the law. Or the court will defer to the legislature because the very contestability of the law makes the issue a political question that is inappropriate for a court to resolve. One notable instance of the /{Erie}/-effect in our times, Lessig pointed out, was the successful campaign by feminist law scholar Catherine MacKinnon to define sexual harassment in the workplace as a form of illegal discrimination. The point was to transform popular understanding of the issue and then embody it in law. +={MacKinnon, Catherine} + +Lessig was not especially focused on tech issues until he ran across Julian Dibbell’s article “A Rape in Cyberspace,” which appeared in the /{Village Voice}/ in December 1993.~{ Julian Dibbell, “A Rape in Cyberspace: How an Evil Clown, a Haitian Trickster Spirit, Two Wizards, and a Cast of Dozens Turns a Database into a Society,” /{Village Voice}/, December 21, 1993, pp. 36–42, reprinted in Mark Stefik, /{Internet Dreams: Archetypes, Myths, and Metaphors}/ (Cambridge, MA: MIT Press, 1997), pp. 293–315, Dibbell quote at p. 296. }~ The piece described the social havoc that ensued in an online space, LambdaMOO, hosted at Xerox Palo Alto Research Center. One pseudonymous character “raped” another in the virtual space, using cruel words and graphic manipulations. The incident provoked an uproar among the thousand members of LambdaMOO, and had real emotional and social consequences. Yet, as Dibbell pointed out, “No bodies touched. Whatever physical interaction occurred consisted of a mingling of electronic signals sent from sites spread out between New York City and Sydney, Australia.” +={Dibbell, Julian:"A Rape in Cyberspace"+1;law:cyberspace, of+3} + +For Lessig, the LambdaMOO “rape” had an obvious resonance with Catherine MacKinnon’s arguments in her 1993 book /{Only Words}/. Does a rape in cyberspace resemble the harms inflicted on real women through pornography? Lessig saw intriguing parallels: “I really saw cyberspace as a fantastic opportunity to get people to think about things without recognizing the political valences. That’s all I was interested in; it was purely pedagogical.”~{ Interview with Lawrence Lessig, March 20, 2006. }~ +={MacKinnon, Catherine} + +To explore the issues further, Lessig developed one of the first courses on the law of cyberspace. He taught it in the spring semester of 1995 at Yale Law School, where he was a visiting professor, and later at the University of Chicago and Harvard law schools. During the Yale class, an exchange with a student, Andrew Shapiro, jarred his thinking in a new direction: “I was constantly thinking about the way that changing suppositions of constitutional eras had to be accounted for in the interpretation of the Constitution across time. Andrew made this point about how there’s an equivalent in the technical infrastructure [of the Internet] that you have to think about. And then I began to think about how there were norms and law and infrastructure — and then I eventually added markets into this — which combine to frame what policymaking is in any particular context.”~{ Ibid. }~ +={Shapiro, Andrew} + +This line of analysis became a central theme of Lessig’s startling first book, /{Code and Other Laws of Cyberspace}/, published in 1999.~{ Lessig, /{Code and Other Laws of Cyberspace}/ (New York: Basic Books, 1999). }~ /{Code}/ took on widespread assumptions that the Internet would usher in a new libertarian, free-market utopia. Cyberlibertarian futurists such as Alvin Toffler, Esther Dyson, George Gilder, and John Gilmore had routinely invoked cyberspace as a revolutionary force that would render government, regulation, and social welfare programs obsolete and unleash the transformative power of free markets.~{ Esther Dyson, George Gilder, George Keyworth, and Alvin Toffler, “Cyberspace and the American Dream: A Magna Carta for the Knowledge Age,” Progress and Freedom Foundation, August 1994, available at http://www.pff .org/issues-pubs/futureinsights/fil.2magnacarta.html. }~ In the libertarian scenario, individual freedom can flourish only if government gets the hell out of the way and lets individuals create, consume, and interact as they see fit, without any paternalistic or tyrannical constraints. Prosperity can prevail and scarcity disappear only if meddling bureaucrats and politicians leave the citizens of the Internet to their own devices. As Louis Rossetto, the founder and publisher of /{Wired}/, bluntly put it: “The idea that we need to worry about anyone being ‘left out’ is entirely atavistic to me, a product of that old economics of scarcity and the 19th century social thinking that grew out of it.”~{ David Hudson, interview with Louis Rossetto, “What Kind of Libertarian,” /{Rewired}/ (Macmillan, 1997), p. 255. }~ +={code:law, as+4;law:code as+4;Lessig, Lawrence:Code and Other Laws of Cyberspace+4;Dyson, Esther;Gilder, George;Gilmore, John;Rossetto, Louis;Toffler, Alvin;Internet:architecture of+2|freedom of+1;cyberspace: economic effects of} + +Lessig was more wary. In /{Code}/, he constructed a sweeping theoretical framework to show how freedom on the Internet must be actively, deliberately constructed; it won’t simply happen on its own. Inspired by conversations with computer programmer Mitch Kapor, who declared that “architecture is politics” in 1991, Lessig’s book showed how software code was supplanting the regulatory powers previously enjoyed by sovereign nation-states and governments. The design of the Internet and software applications was becoming more influential than conventional sources of policymaking — Congress, the courts, federal agencies. /{Code is law}/, as Lessig famously put it. +={Kapor, Mitch} + +What was worrisome, Lessig warned, was how relatively small changes in software code could alter the “architecture of control” governing the Internet. The current architecture was not necessarily stable and secure, in other words. Moreover, any future changes were likely to be animated by private, commercial forces and not publicly accountable and democratic ones. Lessig illustrated this point with a disarmingly simple drawing of a dot representing an individual, whose range of behaviors is affected by four distinct forces: software architecture, the market, law, and social norms. Each of these factors conspires to regulate behaviors on the Internet, Lessig argued — and commercial forces would clearly have the upper hand. + +/{Code}/ was a powerful and sobering rebuttal to libertarian assumptions that “keeping government out” would safeguard individual freedom. Its analysis quickly became the default conceptual model for talking about governance on the Internet. It helped situate many existing policy debates — Internet censorship, digital privacy, copyright disputes — in a larger political and policy framework. Although many readers did not share Lessig’s pessimism, /{Code}/ helped expose an unsettling truth — that a great many legislators, federal agencies, and courts were largely oblivious to the regulatory power of software code. They didn’t have a clue about the technical structures or social dynamics affecting life on the Internet, let alone how existing law would comport with this alien domain. +={Internet:freedom of} + +/{Code}/ was widely praised and widely read. But it was only one project of that period that catapulted Lessig to international prominence. In the mid-1990s, Charles Nesson, a bold-thinking, highflying evidence professor at Harvard Law School, was organizing the Berkman Center for Internet & Society. The new project aspired to study “the most difficult and fundamental problems of the digital age,” and show public-interest leadership in addressing them. Nesson, who had become modestly famous for his role in the W. R. Grace litigation chronicled in Jonathan Harr’s /{A Civil Action}/, recruited Lessig to be the Berkman Center’s marquee star in 1997. It was an irresistibly prestigious and visible perch. +={Berkman Center for Internet & Society;Nesson, Charles} + +This was demonstrated within months, when Judge Penfield Jackson tapped Lessig to be a “special master” in one of the most important antitrust cases in a generation, /{U.S. v. Microsoft}/.~{ Steven Levy, “The Great Liberator,” /{Wired}/, October 2002, and Poynder interview with Lessig, April 7, 2006. }~ Lessig’s assignment was to sift through the welter of technical claims and counterclaims in the case and produce a report with recommendations to the court. The government alleged that Microsoft had abused its monopoly power in its sales of its operating system and Web browser, particularly in “bundling” the browser with the Windows operating system. +={Jackson, Penfield;Microsoft:antitrust lawsuit against+1;Lessig, Lawrence:Microsoft lawsuit, and+1} + +Microsoft soon raised questions about Lessig’s neutrality as a special master. Among other objections, the company cited his book’s claim that software code is political and a passage that said Microsoft was “absolutely closed” compared to an open-standards body. It also dredged up an e-mail in which Lessig facetiously equated using Micosoft’s Internet Explorer with “selling one’s soul.” After nearly eight weeks on the job, the Court of Appeals, citing a technicality, took Lessig off the case, to his enduring disappointment. He has been deeply frustrated by the implication that he had been removed for bias (the court made no such finding) and by his abrupt banishment from a plum role in a landmark case. +={Lessig, Lawrence:Code and Other Laws of Cyberspace} + +2~ Waging the /{Eldred}/ Case + +Back at the Berkman Center, however, there were plenty of opportunities to influence the digital future. The center was a hothouse of venturesome ideas and eccentric visionaries. It was a place where John Perry Barlow could drop by to talk with Lessig and Berkman co-founder Jonathan Zittrain, one of the early cyberlaw experts. The center drew upon the ideas of intellectual property guru William (Terry) Fisher; Charles Nesson, who specialized in launching Big Ideas; and a self-renewing batch of bright law students eager to make their mark on a hip and emerging field of law. Richard Stallman at nearby MIT was an occasional visitor, as was MIT computer scientist Hal Abelson, who combined deep technical expertise with an appreciation of the social and democratic implications of digital technologies. It was during this time, in 1998, that Lessig and Abelson jointly taught The Law of Cyberspace: Social Protocols at Harvard Law School. The class was an attempt to make sense of some novel legal quandaries exploding on the Internet, such as computer crime, identity authentication, digital privacy, and intellectual property. +={Berkman Center for Internet & Society+5;Nesson, Charles;Abelson, Hal:cyberlaw, and;Barlow, John Perry;Fisher, William (Terry);Stallman, Richard;Zittrain, Jonathan;Internet:legal quandaries arising on;law:cyberspace, of} + +While nourished by the work of his academic colleagues, Lessig was determined to come up with ingenious ways to /{do something}/ about the distressing drift of copyright law. It was important to take the offensive. Notwithstanding the pessimism of /{Code}/, Lessig’s decidedly optimistic answer was to gin up a constitutional challenge to copyright law. Many legal experts and even sympathetic colleagues were skeptical. Peter Jaszi, a leading intellectual law professor at American University, told a reporter at the time, “It’s not so much that we thought it was a terrible idea but that it was just unprecedented. Congress has been extending copyright for 180 years, and this is the first time someone said it violated the Constitution.”~{ David Streitfeld, “The Cultural Anarchist vs. the Hollywood Police State,” /{Los Angeles Times Magazine}/, September 22, 2002, p. 32. }~ Others worried that an adverse ruling could set back the larger cause of copyright reform. +={Jaszi, Peter;Lessig, Lawrence:Code and Other Laws of Cyberspace;law:social change, and+3;copyright law:expansion of} + +In the spirit of the commons, Lessig and his Berkman Center colleagues decided that the very process for mounting the /{Eldred}/ lawsuit would be different: “Rather than the secret battles of lawyers going to war, we will argue this case in the open. This is a case about the commons; we will litigate it in the commons. Our arguments and strategy will be developed online, in a space called ‘openlaw.org.’ Key briefs will be drafted online, with participants given the opportunity to criticize the briefs and suggest other arguments. . . . Building on the model of open source software, we are working from the hypothesis that an open development process best harnesses the distributed resources of the Internet community. By using the Internet, we hope to enable the public interest to speak as loudly as the interests of corporations.”~{ Lawrence Lessig, “Commons Law,” June 24, 1999, posted on www.intellectu alcapital.com/issues/issue251/item5505.asp, and Open Law archive at http:// cyber.law.harvard.edu/openlaw. }~ +={Eldred v. Reno/Eldred v. Ashcroft+28;Lessig, Lawrence:Eldred v. Reno, and+28|law in contemporary context, and+1} + +Emulating the open-source development model was a nice touch, and perhaps useful; dozens of people around the world registered at the Openlaw site and posted suggestions. Some of the examples and legal critiques were used in developing the case, and the model was later used by lawyers in the so-called DeCSS case, in which a hacker broke the encryption of a DVD. But it turns out that open, distributed creativity has its limits in the baroque dance of litigation; it can’t work when secrecy and confidentiality are important, for example. + +The case, /{Eldred v. Reno}/ — later renamed /{Eldred v. Ashcroft}/ when the Bush II administration took office — was filed in federal district court in Washington, D.C., on January 11, 1999.~{ /{Eldred v. Reno}/ (later, Eldred v. Ashcroft), 537 U.S. 186 (2003), affirming 239 F. 3d 372. }~ The complaint argued that the Copyright Term Extension Act violated Article 1, section 8, clause 8, of the Constitution, which provides that copyright protection shall be of limited duration. It also argued that the Term Extension Act violated the free speech clause of the First Amendment. In some respects, the case could never have been waged without the foundation of legal scholarship produced in the 1990s, which rehearsed a great many of the arguments presented to the Court. In opposition were motion picture studios, the music industry, and book publishers. They argued that Congress had full authority under the Constitution to extend copyright terms, as it had done since the beginning of the republic. +={copyright law:expansion of;Copyright Term Extension Act;Copyright Clause, U.S. Constitution} + +In October 1999, the U.S. District Court brusquely dismissed the case without even holding a trial. Lessig and his Berkman colleagues were not entirely surprised, and quickly set about filing an appeal with the U.S. Court of Appeals for the District of Columbia Circuit. Going beyond the Openlaw experiment at Berkman, they enlisted the support of several lawyers at Jones, Day, Reavis & Pogue. On appeal, Lessig was allowed to argue the case personally to a panel of judges. But once again, in February 2001, the case was dismissed. Lessig considered it a significant victory that it was a 2-1 ruling, however, which meant that a further appeal was possible. Lessig was also encouraged that the dissenter had been the court’s most conservative member, Judge David Sentelle. Lessig requested that the full circuit court hear the case — a petition that was also rejected, this time after picking up support from a liberal dissenter, Judge David Tatel. +={Jones, Day, Reavis & Pogue+4;Sentelle, David;Tatel, David} + +Normally, this would have been the end of the road for a case. Very few appeals court cases are accepted for review by the U.S. Supreme Court, particularly when the case has not even been argued at trial and no other courts have passed judgment on the statute. So it was quite surprising when the Supreme Court, in February 2002, accepted /{Eldred}/ for review and scheduled oral arguments for October 2002. +={Eldred v. Reno/Eldred v. Ashcroft:Supreme Court, and+11} + +At this point, Lessig realized he needed the advice and support of some experienced Supreme Court litigators. He enlisted help from additional lawyers at Jones, Day; Alan Morrison of Public Citizen Litigation Group; Kathleen Sullivan, the dean of Stanford Law School; and Charles Fried, a former solicitor general under President Reagan. Professor Peter Jaszi and the students of his law clinic drafted an amicus brief. +={orrison, Alan;Fried, Charles;Jaszi, Peter;Sullivan, Kathleen} + +A key concern was how to frame the arguments. Attorney Don Ayer of Jones, Day repeatedly urged Lessig to stress the dramatic harm that the Bono Act was inflicting on free speech and free culture. But as Lessig later confessed, “I hate this view of the law. . . . I was not persuaded that we had to sell our case like soap.”~{ Lessig, “How I Lost the Big One,” /{Legal Affairs}/, March/April 2004, available at http://www.legalaffairs.org/issues/March-April-2004/story_lessig_marap r04.msp. }~ Lessig was convinced that the only way /{Eldred}/ could prevail at the Supreme Court would be to win over the conservative justices with a matter of principle. To Lessig, the harm was obvious; what needed emphasis was how the Sonny Bono Act violated “originalist” principles of jurisprudence. (Originalist judges claim to interpret the Constitution based on its “original” meanings in 1791, which includes a belief that Congress has strictly enumerated powers, not broad legislative discretion.) +={Ayer, Don;law:originalist principles of+2} + +“We tried to make an argument that if you were an originalist— in the way these conservative judges said they were in many other cases — then you should look to the original values in the Copyright Clause,” said Lessig. “And we argued that if you did that then you had to conclude that Congress had wildly overstepped its constitutional authority, and so the law should be struck down.”~{ Lessig interview with Richard Poynder, April 7, 2006, p. 25. }~ Flaunting the harm caused by the copyright term extension struck Lessig as showy and gratuitous; he considered the harm more or less selfevident. In the aftermath of a public debate that Lessig once had with Jack Valenti, a questioner on Slashdot, a hacker Web site, suggested that Lessig would be more persuasive if he asserted “a clear conception of direct harm . . . than the secondary harm of the copyright holders getting a really sweet deal.” Lessig conceded that such a focus “has been a weakness of mine for a long time. In my way of looking at the world, the point is a matter of principle, not pragmatics. . . . There are many others who are better at this pragmatism stuff. To me, it just feels insulting.”~{ “Lawrence Lessig Answers Your Questions,” Slashdot.org, December 21, 2001, Question 1, “The question of harm,” posted by “caduguid,” with Lessig response, available at http://interviews.slashdot.org/article.pl?sid=01/12/21/155221. }~ +={copyright law:expansion of;Copyright Clause, U.S. Constitution;Valenti, Jack} + +And so, despite warnings to the contrary, Lessig’s legal strategy relied on a call to uphold originalist principles. Having clerked for Justice Scalia and Judge Posner, Lessig felt that he understood the mind-set and sympathies of the conservative jurists. “If we get to the Supreme Court,” Lessig told Slashdot readers in December 2001, “I am certain that we will win. This is not a left/right issue. The conservatives on the Court will look at the framers’ Constitution— which requires that copyrights be granted for ‘limited times’ — and see that the current practice of Congress . . . makes a mockery of the framers’ plan. And the liberals will look at the effect of these never-ending copyrights on free speech, and conclude that Congress is not justified in this regulation of speech. The Supreme Court doesn’t give a hoot about Hollywood; they will follow the law.”~{ Lessig response to question 11, Slashdot.org, “Will the extension of copyright continue?” posed by “Artifice_Eternity,” available at http://interviews.slash dot.org/article.pl?sid=01/12/21/155221. }~ +={Posner, Richard;Scalia, Antonin;Copyright Clause, U.S. Constitution;copyright law:expansion of+5;Copyright Term Extension Act+5} + +Lessig took pride in the fact that thirty-eight amicus briefs were filed on behalf of /{Eldred}/. They included a wide range of authors, computer and consumer electronics companies, and organizations devoted to arts, culture, education, and journalism. Besides the usual suspects like the Free Software Foundation, Electronic Frontier Foundation, and Public Knowledge, supporting briefs were filed by fifteen economists including Kenneth Arrow and Milton Friedman, Phyllis Schlafly of the Eagle Forum, and the Intel Corporation. + +At oral arguments, Lessig immediately confronted a skeptical bench. Justice Sandra Day O’Connor worried about overturning years of previous copyright term extensions. Justice William Rehnquist proposed. “You want the right to copy verbatim other people’s books, don’t you?” And when Justice Anthony Kennedy invited Lessig to expound upon the great harm that the law was inflicting on free speech and culture, Lessig declined the opportunity. He instead restated his core constitutional argument, that copyright terms cannot be perpetual. “This was a correct answer, but it wasn’t the right answer,” Lessig later confessed in a candid postmortem of the case. “The right answer was to say that there was an obvious and profound harm. Any number of briefs had been written about it. Kennedy wanted to hear it. And here was where Don Ayer’s advice should have mattered. This was a softball; my answer was a swing and a miss.”~{ See http://www.supremecourtus.gov/oral_arguments/argument_transcripts/ 01-618.pdf. See also Lessig, “How I Lost the Big One,” and Linda Greenhouse, “Justices Hear Arguments in Challenge to Copyrights,” /{New York Times}/, October 10, 2002. A number of Supreme Court opinions in the /{Eldred}/ case can be found at the Openlaw archive at http://cyber.law.harvard.edu/openlaw/ eldredvreno. The /{Loyola Los Angeles Law Review}/ held a symposium on /{Eldred v. Ashcroft}/, available at http://llr.lls.edu/volumes/v36-issue1. }~ No justices spoke in defense of the Sonny Bono Act. +={Ayer, Don;Kennedy, Anthony;O’Connor, Sandra Day;Rehnquist, William} + +Yet they had clear reservations about the Supreme Court’s authority to dictate the length of copyright terms. + +A few months later, on January 15, 2003, the Supreme Court announced its ruling: a 7-2 defeat for Eldred. The majority opinion, written by Justice Ruth Bader Ginsburg, did not even raise the “enumerated powers” argument or engage with originalist philosophy. “We are not at liberty to second-guess Congressional determinations and policy judgments of this order, however debatable or arguably unwise they may be,” Ginsburg wrote.~{ 537 U.S. 186 (1993). See also “Court Majority Says It Won’t Second-Guess Congress,” /{New York Times}/, January 16, 2007, p. A22. }~ She likewise ignored the idea that there is a “copyright bargain” between the American people and copyright holders, which entitles the public to certain rights of access to the public domain. As for copyright’s impact on free speech, Ginsburg invoked the fair use doctrine and the “idea/expression dichotomy” (the notion that ideas are freely available but expression can be copyrighted) as sufficient protections for the public. She ignored the fact that both doctrines were (and are) under fierce assault. +={Ginsburg, Ruth Bader;law:originalist principles of;fair use doctrine:copyright law, and;copyright law:fair use doctrine, and|balance of public and private rights} + +Justices Stephen Breyer and John Paul Stevens accepted Lessig’s arguments, and wrote separate dissents. Breyer — a respected scholar of copyright law since his famous 1970 essay “The Uneasy Case for Copyright”~{ Stephen Breyer, “The Uneasy Case for Copyright,” /{Harvard Law Review}/ 84, no. 281 (1970). }~ —agreed that copyright terms had effectively become perpetual, and that the law was therefore unconstitutional. Stevens complained that the majority decision reneged on the copyright bargain and made copyright law “for all intents and purposes judicially unreviewable.” +={Breyer, Stephen;Stevens, John Paul} + +In assessing the broad impact of the /{Eldred}/ ruling, copyright scholar Siva Vaidhyanathan cited law professor Shubha Ghosh’s observation that the /{Eldred}/ ruling had effectively “deconstitutionalized” copyright law. /{Eldred}/ pushed copyright law +={Ghosh, Shubha;Vaidhyanathan, Siva+1} + +_1 farther into the realm of policy and power battles and away from principles that have anchored the system for two centuries. That means public interest advocates and activists must take their battles to the public sphere and the halls of Congress. We can’t appeal to the Founders’ wishes or republican ideals. We will have to make pragmatic arguments in clear language about the effects of excessive copyright on research, teaching, art and journalism. And we will have to make naked mass power arguments with echoes of “we want our MP3” and “it takes an industry of billions to hold us back.”~{ Siva Vaidhyanathan, “After the Copyright Smackdown: What Next?” /{Salon}/, January 17, 2003, at http://www.salon.com/tech/feature/2003/01/17/copy right.print.html. }~ +={copyright law:balance of public and private rights} + +2~ A Movement Is Born +={Eldred v. Reno/Eldred v. Ashcroft:effects of+12} + +The /{Eldred}/ case had a paradoxical effect. Early on, Lessig had said, “We didn’t want to make it a big political cause. We just wanted to make it an extension of the existing Supreme Court jurisprudence, because we realized that the only way to win the case was to win the conservatives’ view, and the conservatives were not likely to be motivated by great attacks on media concentration.”~{ Interview with Poynder, April 7, 2006, p. 25. }~ The upshot of the Court’s ruling was to intensify the political battles over copyright law. While such resistance was already growing, the /{Eldred}/ ruling and the publicity surrounding it spawned a new generation of “copyfighters.” Lessig had wanted to protect the commons through law, only to find that the courts were unwilling to offer any help. Any answers would now have to be pursued through politics, culture, and technology — and ingenious uses of law where feasible. How to proceed in this uncharted territory became the next challenge, as we see in chapter 4. + +After four years of relentless work, Lessig was frustrated and dejected. “I had failed to convince [the Supreme Court] that the issue was important,” he wrote in a frank confessional, “and I had failed to recognize that however much I might hate a system in which the court gets to pick the constitutional values that it will respect, that is the system we have.”~{ Lessig, “How I Lost the Big One.” See also Lessig, /{Free Culture}/ (New York: Penguin, 2004), pp. 228–48. }~ For a constitutional law scholar, it was a rude awakening: constitutional originalists could not be taken at their word! Scalia and fellow justice Clarence Thomas had declined to stand behind their jurisprudential principles. +={law:originalist principles of+1;Scalia, Antonin;Thomas, Clarence} + +Yet Lessig had certainly been correct that /{Eldred}/ would not succeed unless it convinced the Court’s conservative majority. The fact that the originalist gambit failed was perhaps the strongest message of all: /{nothing}/ would convince this Court to rein in the excesses of copyright law. + +Even before the Supreme Court had delivered its ruling, Lessig admitted his misgivings about the power of law to solve copyright’s failings: “The more I’m in this battle, the less I believe that constitutional law on its own can solve the problem. If Americans can’t see the value of freedom without the help of lawyers, then we don’t deserve freedom.”~{ Lessig response to Question 11, “Cyberspace Amendment,” posed by “kzinti,” in Slashdot, available at http://interviews.slashdot.org/article.pl?sid=01/12/ 21/155221. }~ Yet mobilizing freedom-loving Americans to seek redress from Congress was also likely to be doomed. Hollywood film studios and record companies had showered some $16.6 million and $1.8 million, respectively, on federal candidates and parties in 1998. Legislators know who butters their bread, and the public was not an organized influence on this issue. No wonder a progressive copyright reform agenda was going nowhere. +={Copyright Term Extension Act+1;Eldred v. Reno/Eldred v. Ashcroft:Supreme Court, and;law:limited power of;copyright law:expansion of+1} + +Four years after the /{Eldred}/ ruling, Lessig had some second thoughts about the “Mickey Mouse” messaging strategy. Opponents of the copyright term extension, including Lessig, had often flaunted Mickey motifs in their dealings with the press and railed at the “Mickey Mouse Protection Act.” Yet in 2006, Lessig lamented to one interviewer that “the case got framed as one about Mickey Mouse. Whereas the reality is, who gives a damn about Mickey Mouse? The really destructive feature of the Sonny Bono law is the way it locks up culture that has no continuing commercial value at all. It orphaned culture. So by focusing on Mickey Mouse, the Court thought this was an issue of whether you believed in property or not. If, however, we had focused people on all the culture that is being lost because it is locked up by copyright, we might have succeeded.”~{ Interview with Poynder, April 7, 2006, pp. 26–27. }~ + +The lasting impact of the /{Eldred}/ case, ironically, may have less to do with the law than with the cultural movement it engendered. The lawsuit provided a powerful platform for educating the American people about copyright law. A subject long regarded as arcane and complicated was now the subject of prominent articles in the /{New York Times}/, /{Salon}/, computer magazines, wire services, and countless other publications and Web sites. A cover story for the /{Los Angeles Times}/'s Sunday magazine explained how the case could “change the way Hollywood makes money — and the way we experience art.” /{Wired}/ magazine headlined its profile of Lessig “The Great Liberator.” Lessig himself barnstormed the country giving dozens of presentations to librarians, technologists, computer programmers, filmmakers, college students, and many others. Even Lessig’s adversary at the district court level, Arthur R. Miller, a Harvard Law School professor, agreed, “The case has sparked a public discussion that wasn’t happening before.” +={Miller, Arthur R.} + +Lessig’s orations often provoked the fervor of a revival meeting — and led to more than a few conversions. This may appear surprising because Lessig, with his receding hairline and wireframe glasses, strikes an unprepossessing pose. In the professorial tradition, he can sometimes be didactic and patronizing. But on the stage, Lessig is stylish, poised, and mesmerizing. His carefully crafted talks are intellectual but entertaining, sophisticated but plainspoken— and always simmering with moral passion. He typically uses a customized version of Keynote, a Macintosh-based program similar to PowerPoint, to punctuate his dramatic delivery with witty visuals and quick flashes of words. (Experts in professional presentations have dubbed this style the “Lessig Method,” and likened it to the Takahashi Method in Japan because slides often use a single word, short quote, or photo.)~{ Garr Reynolds’s blog on professional presentation design, “The ‘Lessig Method’ of Presentation,” October 5, 2005, available at http://presentationzen .blogs.com/presentationzen/2005/10/the_lessig_meth.html. }~ + +More than a sidebar, Lessig’s public speaking has been an important aspect of his leadership in building a commons movement. His talks have helped some fairly sequestered constituencies in technical fields — computer programming, library science, Internet policy, copyright law — understand the larger political and cultural significance of their work. The results have sometimes been galvanizing. As one veteran hacker told me in 2006, “There’s a whole connoisseurship of Lessig talks. He’s a little past his peak right now — but there was a period where, like when he gave the lecture at OSCON [a conference of open-source programmers], when he was done, they wanted to start a riot. People were literally milling around, looking for things to smash. He was saying to these people who worked on open source, ‘There’s a larger world context to your work. The government is doing things — and you can stop them!’ ”~{ Interview with Aaron Swartz, October 10, 2006. }~ +={Lessig, Lawrence:public speaker, as} + +Following oral arguments before the Supreme Court, the movement — such as it was — had a rare gathering of its leaders. Public Knowledge co-hosted a luncheon for those who had aided the lawsuit. The diners spanned the worlds of libraries, computers, Internet publishing, public-interest advocacy, and many other fields. The event was held at Washington’s Sewall-Belmont House, where the National Woman’s Party once led the fight for women’s suffrage. This prompted Gigi Sohn, president of Public Knowledge, to declare, “We, too, are building a movement.”~{ Amy Harmon, “Challenge in Copyright Case May Be Just a Beginning,” /{New York Times}/, October 14, 2002. }~ +={Sohn, Gigi} + +So after arguing — and losing — before the U.S. Supreme Court, what does a copyright superstar do for an encore? + +A seed had already been planted at the Starbucks meeting four years earlier. Eldred recalls telling Lessig, “I think this case is very important, and I think you’re the right guy for this. But at the same time, I’d like to talk to you about something else. I really think that we need to start up some sort of a copyright conservancy, which would be sort of like a nature conservancy. It would allow people to donate books to the public domain; we could then take ownership of them. They could maybe have a tax deduction for them, and we could — instead of having the book privately owned — they would be in the public domain, maybe before the copyright term expired. We could sort of have an independent group maintain this conservancy, and allow the books to be put on the Internet for free.” +={copyright conservancy;Eldred, Eric:copyright conservancy, and|Lessig, and+1;Lessig, Lawrence:Eldred, and+1;Eldred, Eric:Lessig, and|public domain, and} + +Eldred remembers that Lessig “was sort of stunned. He didn’t have anything to say for a little while. We sort of looked at each other, and I think he was very shocked and surprised that I said that. And he said, ‘I don’t think we can do it until we’ve done the work on the copyright term extension act suit, but I promise to do it.’”~{ Interview with Eric Eldred, August 1, 2006. }~ + +:B~ PART II + +:C~ The Rise of Free Culture + +1~intro_ii [Intro] -# + +To the commoners seeking to build a new cultural universe, the failure of the /{Eldred}/ case in the U.S. Supreme Court was both depressing and liberating. It confirmed what the legal scholars of the 1990s had long suspected— that both Congress and the courts were captives to a backward-looking vision of copyright law. Government was tacitly committed to a world of centralized and commercial mass media managed by elite gatekeepers. That was not likely to change soon. +={Eldred v. Reno/Eldred v. Ashcroft:effects of:Supreme Court, and} + +As for helping build a new digital republic with a more open, democratic character, the Clinton administration made its intentions clear in its infamous White Paper. It wanted to convert the gift economy of the Internet into a wall-to-wall marketplace. It wanted to give sellers absolute control over content and limit the disruptions of innovative newcomers. The government, acting on behalf of the film, record, and book industries, had no desire to legitimize or fortify the sharing culture that was fast gaining a hold on the Internet. Quite the contrary: strengthening the public’s fair use rights, access to the public domain, and online free speech rights might interfere with the perceived imperatives of electronic commerce. /{Freedom}/ would therefore have to be defined as the freedom of consumers to buy what incumbents were selling, not as a robust civic freedom exercised by a sovereign citizenry. +={Clinton administration:White Paper;fair use doctrine:White Paper vs.;White Paper [on copyright];gift economy;Internet:gift economy of;free culture:Internet, of the} + +By the conclusion of /{Eldred}/, in 2003, it was clear that the copyright dissidents were not just confronting one policy battle or another; they were confronting an antiquated and entrenched worldview. While Lessig, Eldred, and the growing band of commoners realized that it was important to pay close attention to pending legislation and lawsuits, many of them also realized that the real challenge was to develop a new vision — and then try to actualize it. +={Eldred, Eric:copyright law, and} + +A more affirmative, comprehensive vision was needed to supersede the limited intellectual parameters of copyright law.Copyright law was a mode of property discourse, after all, and that discourse simply could not adequately express the aspirations of hackers, citizen-journalists, librarians, academics, artists, democrats, and others trying to secure open online spaces for themselves. The online insurgents acknowledged the great importance of fair use and the public domain, but they also considered such doctrines to be vestiges of an archaic, fraying legal order. It was time to salvage what was valuable from that order, but otherwise instigate a new language, a new aesthetic, a new legal regime, a new worldview. +={copyright law:property rights, and;property rights:copyright law, and} + +This meant venturing into risky, unknown territory. Law professors accustomed to working within the comfort of the academy would have to clamber onto public stages and set forth idealistic, politically inflected scenarios for Internet culture. Activists accustomed to rhetorical critiques would have to initiate pragmatic, results-driven projects. Free software hackers would have to invent new software and digital protocols. Volunteers would need to be enlisted and organized and funding secured to sustain bare-boned organizational structures. Wholly new constituencies would have to be imagined and mobilized and brought together into something resembling a new movement. Part II, The Rise of Free Culture, describes the building of this foundation from 2000 to 2005. + +1~ 4 INVENTING THE CREATIVE COMMONS + +/{A public-spirited cabal schemes for a way to legalize sharing.}/ + +Larry Lessig remembers his Starbucks conversation with Eric Eldred as a “crystallizing moment,” a revelation that the stakes in copyright reform were much higher than he had originally imagined. Both Lessig and Eldred obviously wanted to win the lawsuit and recognized its importance. But Eldred had made clear that he didn’t just want to roll back regressive laws; he wanted to develop an affirmative and sustainable alternative. +={copyright law:property rights, and+1;property rights:copyright law, and+1;Eldred, Eric:copyright law, and+2|Lessig, and+16;Lessig, Lawrence:Eldred, and+16} + +This got Lessig thinking: “So, okay — you get the Supreme Court to strike the laws down, but you still live in a world where people think that everything is property and has to be owned. If nobody has a political awareness about why the judicial response makes sense, then it’s a pretty empty result.”~{ Interview with Lawrence Lessig, March 20, 2006. }~ Throughout the /{Eldred}/ case, paradoxically enough, Lessig says he was “skeptical” of the traditional liberal strategy of seeking redress through the courts. +={Eldred v. Reno/Eldred v. Ashcroft:Supreme Court, and} + + +The turning point for him, Lessig recalled, was in recognizing that Eldred was not just a plaintiff in a test case but “someone trying to build a movement around a practice of making things available in a way that took advantage of the infrastructure of the Net.”~{ Ibid. }~ True, Eldritch Press resembled an old-style archive of canonical works. Yet Eldred’s goal all along had been to host an active social community of book lovers, not just provide a repository for old texts. The Web site’s real importance was in the social activity it represented — the fact that thousands of participant-readers could come together around a self-selected amateur eager to build a new type of social community and information genre. +={Eldritch Press;World Wide Web:social activity on+7} + +Lessig told me that when he recognized Eldred’s Web site as a new type of social practice, it helped define the challenge: “The question became a very technical, legal one: How could we instantiate that movement?” Lessig said he needed to find a way to “disambiguate the social practice.” By that bit of tech-legalese, he meant, How could the practices and values animating Eldred’s Web site be articulated in law, denoted on the Web, and thereby be seen for what they were: a new mode of social practice and cultural freedom? + +It helps to remember that in 1998 and the following years, the legality of sharing online works and downloading them was highly ambiguous. Prevailing legal discourse set forth a rather stark, dualistic world: either a work is copyrighted with “all rights reserved,” or a work is in the public domain, available to anyone without restriction. The mental categories of the time offered no room for a “constituency of the reasonable,” in Lessig’s words. +={copyright law:public domain vs.;public domain:copyright law, and} + +Copyright law made nominal provisions for a middle ground in the form of the fair use doctrine and the public domain. But Lessig realized that fair use was “just a terrible structure on which to build freedom. There are basically no bright lines; everything is a constant debate. Of course, we don’t want to erase or compromise or weaken [these doctrines] in any sense. But it’s very important to build an infrastructure that doesn’t depend upon four years of litigation.” Or as Lessig was wont to put it in his impassioned performances on the stump: “Fuck fair use.”~{ Robert S. Boynton, “Righting Copyright: Fair Use and Digital Environmentalism,” /{Bookforum}/, February/March 2005, available at http://www.robert boynton.com/articleDisplay.php?article_id=1. }~ +={copyright law:fair use doctrine, and+2;fair use doctrine:copyright law, and+2;Lessig, Lawrence:fair use, on+2} + +This was a theatrical flourish, of course. Back in Palo Alto, Lessig in 2001 had launched the Center for Internet & Society at Stanford Law School, which actively takes on lawsuits seeking to vindicate the public’s fair use rights, among other things. One notable case was against Stephen Joyce, the grandson of novelist James Joyce. As executor of the Joyce literary estate, Stephen Joyce steadfastly prevented dozens of scholars from quoting from the great writer’s archive of unpublished letters.~{ See, e.g., D. T. Max, “The Injustice Collector,” /{New Yorker}/, June 19, 2006, pp. 34ff. }~ (After losing a key court ruling in February 2007, the Joyce estate settled the case on terms favorable to a scholar who had been denied access to the Joyce papers.) +={Joyce, Stephen} + +But Lessig’s intemperance toward fair use has more to do with the almost subliminal void in legal discourse and political culture. There was no way to talk about the social behaviors exemplified by Eldred’s Web site except through crabbed, legalistic rules. The only available language, the default vocabulary, is copyright law and its sanctioned zones of freedom, such as fair use. Lessig wanted to open up a new, more bracing line of discourse. “We wanted to rename the social practice,” he said. It sounds embarrassingly grandiose to state it so bluntly, but in later years it became clear to Lessig and his loose confederation of colleagues that the real goal was to /{imagine and build a legal and technical infrastructure of freedom}/. +={Lessig, Lawrence:freedom, and|law in contemporary context, and+2} + +Initially, the goal was more exploratory and improvisational — an earnest attempt to find leverage points for dealing with the intolerable constraints of copyright law. Fortunately, there were instructive precedents, most notably free software, which by 2000, in its opensource guise, was beginning to find champions among corporate IT managers and the business press. Mainstream programmers and corporations started to recognize the virtues of GNU/Linux and opensource software more generally. Moreover, a growing number of people were internalizing the lessons of Code, that the architecture of software and the Internet really does matter. +={free software:open source software, as;GNU/Linux;software:open source;Internet:architecture of+1;Lessig, Lawrence:Code and Other Laws of Cyberspace} + +Even as he sought to prevail in /{Eldred}/, Lessig understood that enduring solutions could not be conferred by the U.S. Supreme Court; they had to be made real through people’s everyday habits. The commoners needed to build a new set of tools to actualize freedom on the Internet, and to develop a new language, a new epistemology, a new vision, for describing the value proposition of sharing and collaboration. The big surprise, as we will see in chapter 6, was the latent social energies poised to support this vision. +={Eldred v. Reno/Eldred v. Ashcroft+7:Supreme Court, and;Internet:freedom of;Lessig, Lawrence:Eldred v. Reno, and|freedom, and} + +2~ What If . . . ? + +Shortly after the /{Eldred}/ case was filed in January 1999, a number of Harvard Law students working with Lessig announced the formation of a new group, “Copyright’s Commons.”~{ The Copyright’s Commons Web site is now defunct but can be found at the Internet Archive’s Wayback Machine, at http://cyber.law.harvard.edu/cc. }~ Led by Jennifer Love and Ashley Morgan, Copyright’s Commons published a monthly Web newsletter that provided updates on the progress of the /{Eldred}/ case and miscellaneous news about the public domain. +={Love, Jennifer;Morgan, Ashley;Copyright’s Commons+3:See also Creative Commons;Creative Commons (CC):Copyright’s Commons, as+3;Eldred v. Reno/Eldred v. Ashcroft:Copyright’s Commons, and+3} + +Copyright’s Commons described itself as “a coalition devoted to promoting the public availability of literature, art, music, and film.” It was actually a named plaintiff in the /{Eldred}/ case. + +That spring, Copyright’s Commons announced a new project that it called the “counter-copyright [cc] campaign.” Billed as “an alternative to the exclusivity of copyright,” the campaign invited the general public to “show your support for the public domain by marking your work with a [cc] and a link to the Copyright’s Commons website. . . . If you place the [cc] icon at the end of your work, you signal to others that you are allowing them to use, modify, edit, adapt and redistribute the work that you created.” +={counter-copyright (cc) campaign} + +The project may have been an imaginative call to arms, but there was no infrastructure behind it except one Web page, and no background material except a Web link to the Open Source Initiative. Wendy Seltzer, a Harvard Law student at the time, recalled that the [cc] symbol produced by Copyright’s Commons “was supposed to be a public domain dedication, but nobody had yet gone through all of the thinking about what was actually required to put something into the public domain, and did this satisfy the ‘affirmative act’ requirements [of the law]? Part of the germ of the Creative Commons was thinking about what would it take to make this — the [cc] symbol — an actual, meaningful, legally binding statement.”~{ Interview with Wendy Seltzer, September 28, 2006. }~ +={Seltzer, Wendy} + +Lessig, in the meantime, was keeping a frenetic schedule. He was overseeing the progress of the /{Eldred}/ lawsuit; traveling to give speeches to dozens of conferences and forums every year; promoting his book Code; and writing a monthly column in the /{Industry Standard}/ until it went under with the tech bubble collapse in 2001. The year before, Kathleen Sullivan of Stanford Law School persuaded Lessig to join its faculty and supervise a new law clinic, the Center for Internet and Society.~{ Ross Hanig, “Luring Lessig to Stanford Law School,” /{Recorder}/, October 17, 2001, at http://www.law.com. }~ Along the way Lessig also got married to Bettina Neuefeind, a human rights lawyer. +={Sullivan, Kathleen;Neuefeind, Bettina;Center for Internet and Society;Lessig, Lawrence:Code and Other Laws of Cyberspace;Lessig, Lawrence:Eldred v. Reno, and+1} + +Work on /{Eldred}/ intensified after the district court dismissed the case in October 1999. Lessig embarked on a new round of legal strategizing with colleagues to prepare the appeals court brief, which was submitted in May 2000. Throughout this period, intellectual property (IP) thinkers and tech activists — especially those in the Lessig/Cambridge/Stanford axis — were highly attuned to the gathering storm in copyright and software policy. + +One of the most tumultuous developments was Napster, a homemade file-sharing software program that had become an international sensation. Released in June 1999, Napster was the creation of hacker Shawn Fanning, then a student at Northeastern University in Boston. Within a year, the free program had been downloaded by an estimated 70 million users, drawing fierce denunciations by the recording industry and Washington officials. Napster used centralized file directories on the Internet to connect users to music files on thousands of individual computers. By enabling people to download virtually any recorded music in existence, for free, it was as if the fabled “cosmic jukebox” had arrived. Of course, much of the copying was blatantly illegal. Yet consumers welcomed Napster as one of the few vehicles they had for thumbing their nose at a reactionary music industry that refused to offer digital downloads. The Recording Industry Association of America (RIAA) sued Napster in December 1999, and succeeded in shutting it down in July 2001.~{ Wikipedia entry, at http://en.wikipedia.org/wiki/Napster. }~ +={Fanning, Shawn;Napster+2;Recording Industry Association of America (RIAA)+1} + +The Napster craze intensified the polarized property discourse that Lessig and his colleagues were trying to transcend. Napster encouraged an either/or debate by suggesting that a song is either private property or contraband; there was no middle ground for fair use or the public domain. While the RIAA and acts like Metallica and Madonna railed against massive copyright infringements, defenders of Napster were quick to point out its promotional power. An album produced by the English rock band Radiohead, for example, was downloaded for free by millions of people before its release — a fact that many credit with pushing the album, Kid A, to the top of the Billboard CD sales chart. But such claims carried little weight against those defending what they considered their property rights. +={Radiohead;property rights:copyright law, and+2;copyright law:property rights, and+2|public domain vs.+1;public domain:copyright law, and+1} + +The controversy over Napster was clearly influential in shaping the debate over how to protect the public domain. Berkman Center co-director Jonathan Zittrain recalls, “If we’re trying to hang the hopes of the community on the right just to copy stuff, we’re going to lose — and maybe we should. [The issue] is actually about the right to manipulate the symbols and talismans of our culture” — what Professor Terry Fisher likes to call “semiotic democracy.”~{ Interview with Jonathan Zittrain, September 28, 2006. }~ +={Zittrain, Jonathan;Fisher, William (Terry);free culture:sharing ethic of+1;copyright law:sharing economy vs.+1;democracy:semiotic} + +The problem was that copyright discourse, at least in the hands of the record and film industries, refused to acknowledge that the sharing and reuse of works might be necessary, desirable, or legal. The concept did not compute. There was a conspicuous void in the prevailing terms of debate. So the challenge facing the Cambridge copyright cabal was really a riddle about epistemology, law, and culture rolled into one. How could a new type of free culture, independent of the market, be brought into existence? And how could the creative works of this imagined culture be made legally “shareable” instead of being automatically treated as private property? + +This was an unprecedented challenge. When culture was chiefly a set of analog media — books, records, film — there had been affirmative legal limits on the scope of copyright. Before 1978, the law regulated only commercial uses of a work and only works that had been formally registered, which meant that most works automatically remained in the public domain. Moreover, there was a natural, physical “friction” preventing copyright holders from over-controlling how a work could circulate and be used. When words were fixed in books and sounds embedded in vinyl, people could circulate those objects freely, without having to ask permission from copyright holders. In the digital world, however, the physical constraints of analog media disappeared. Copyright holders now claimed that every digital blip, however transient, constituted a “copyright event” subject to their unilateral control. In practice, this greatly weakened the rights a person could enjoy under the fair use doctrine. +={copyright law:public domain vs.+3;public domain:copyright law, and+3;fair use doctrine:copyright law, and+1|digital age, in+1;copyright law:digital age, in+1|fair use doctrine, and+1|limits on the scope of} + +In a sense, the entire legal and cultural framework for free culture needed to be reimagined so it could function in the digital environment. The terms of fair use essentially had to be renegotiated — an undertaking that copyright law had never had to tackle in the past. But how might that be achieved when both Congress and the courts were beholden to the copyright maximalists’ worldview? + +Such were the kinds of conversations that swirled around the Berkman Center, Harvard Law School, MIT, and a handful of progressive intellectual property circles. Such discussions had been going on for years, especially in the context of free software and public-domain scholarship, but now they were reaching the lay public. The Napster and /{Eldred}/ cases were vehicles for educating the press and the public, and Lessig’s book /{Code}/ was becoming must reading for anyone who cared about Internet governance and digital culture. +={Berkman Center for Internet & Society;Napster;Lessig, Lawrence:Code and Other Laws of Cyberspace} + +Amid this swirl of copyright controversy, MIT professor Hal Abelson had lunch with Lessig at the Harvard Faculty Club in July 2000. The two had co-taught a class on cyberlaw two years earlier and shared many interests in the confluence of copyright and technology. One topic that day was Eric Eldred’s idea of a copyright conservancy — a “land trust” for public-domain works. On August 1, 2000, Abelson sent Zittrain an e-mail: +={Abelson, Hal:copyright conservancy idea, and+5|cyberlaw, and+5;Eldred, Eric:copyright conservancy, and;Zittrain, Jonathan;Eldred v. Reno/Eldred v. Ashcroft:effects of;copyright conservancy+35} + +_1 /{Here’s an idea that we might be able to get going, and where the Berkman Center could help.}/ + +_1 /{Let’s set up a tax-free, charitable foundation to which artists and record label companies could donate the copyright for recorded music. I’m thinking of all the old music for which there isn’t currently an active market.}/ + +_1 /{The foundation would arrange for this stuff to be loaded for free onto the internet and give the public permission to use it. The artists and record labels get a tax writeoff. The RIAA and Napster hug and kiss, and everyone goes home happy.}/ +={Recording Industry Association of America (RIAA)} + +_1 /{What do you think?}/ + +_1 /{Hal}/ + +Zittrain loved the idea, and suggested that it might make a great clinical project for Harvard Law students that fall. But he wondered if the Copyright Clearinghouse Center — a licensing and permissions organization for music — already offered such a service (it didn’t). Lessig proposed that Stanford and Harvard law schools jointly develop the program. He immediately identified one glaring problem: it would be difficult to “establish a process for valuing gifts of copyrighted stuff that would be clearly understood and would be accepted by the IRS.” +={Zittrain, Jonathan+1} + +What ensued was a lengthy and irregular series of e-mail conversations and social encounters through which the idea was chewed over and refined. Lessig acted as the “supernode” among a small group of participants that initially included Zittrain, Eldred, Nesson, and Diane Cabell, a lawyer and administrator at the Berkman Center. Within a month, others were invited into the conversation: Richard Stallman; Duke Law professors James Boyle and Jerome H. Reichman; and documentary film producer Eric Saltzman, who had just become director of the Berkman Center. +={Berkman Center for Internet & Society;Boyle, James:CC formation, and;Cabell, Diane;Reichman, Jerome H.;Nesson, Charles;Saltzman, Eric;Stallman, Richard:Copyright’s Commons, and;Eldred, Eric:copyright conservancy, and} + +A digital archive for donated and public-domain works had great appeal. Just as land trusts acted as trustees of donated plots of land, so the Copyright’s Commons (as Lessig proposed that it be named) would be a “conservancy” for film, books, music, and other works that were either in the public domain or donated. Six weeks after Abelson’s original suggestion, Lessig produced a “Proposal for an Intellectual Property Conservancy” for discussion purposes.~{ Lawrence Lessig, “Proposal for the Intellectual Property Conservancy,” e-mail to ipcommons group, November 12, 2000. }~ He now called the concept “an IP commons” — “the establishment of an intellectual property conservancy to facilitate the collection and distribution under a GPL-like license of all forms of intellectual property.” As elaborated by two Harvard Law School students, Chris Babbitt and Claire Prestel, “The conservancy will attempt to bridge the gap between authors, corporate copyright holders and public domain advocates by providing a repository of donated works which we believe will create a more perfect ‘market’ for intellectual property.”~{ Chris Babbitt and Claire Prestel, “Memorandum to Michael Carroll, Wilmer Cutler Pickering, ‘IP Conservancy,’ ” October 24, 2000. }~ +={belson, Hal:copyright conservancy idea, and+2;Babbitt, Chris;Prestel, Claire;Copyright’s Commons+27;Creative Commons (CC):Copyright’s Commons, as+27;IP Commons+27;Lessig, Lawrence:Copyright’s Commons, and+27} + +Friendly critiques started arriving immediately. Stallman considered the proposal a “good idea overall,” but as usual he objected to the words, such as “intellectual property” and “copyright protection,” which he considered “propaganda for the other side.”~{ E-mail from Richard Stallman to Lessig, September 11, 2000. See also http:// www.gnu.org/philosophy/words-to-avoid.html. Stallman suggested calling the project the “Copyright and Patent Conservancy.” }~ Abelson, a friend and colleague of Stallman’s at MIT, was not finicky about word choices, but he did believe that software donations should be directed to the Free Software Foundation, not to the envisioned project. FSF already existed, for one thing, but in addition, said Abelson, “It may be detrimental to have people initially associate this [new project] too closely with the FSF. . . . We need to craft a public position that will unify people. An FSF-style ‘let’s undo the effects of all those evil people licensing software’ is not what we want here.”~{ E-mail from Hal Abelson to Lessig, September 12, 2000. }~ Some people suggested attracting people to the conservancy by having “jewels” such as material from the estates of deceased artists. Another suggested hosting special licenses, such as the Open Audio License, a license issued by the Electronic Frontier Foundation in 2001 that lets musicians authorize the copying and reuse of their songs so long as credit is given and derivative songs can be shared. +={Stallman, Richard:Copyright’s Commons, and;Abelson, Hal:Free Software Foundation, and+1;Electronic Frontier Foundation (EFF);Free Software Foundation} + +The most difficult issue, said Abelson, was the economics of the project. The care and maintenance of donations, such as the master version of films, could be potentially huge expenses. Digitizing donated works could also be expensive. Finally, there were questions about the economic incentives to potential donors. Would people really wish to donate works that have significant cash value? + +Answers to such questions were hardly self-evident, but there were encouraging signs. After Lessig gave a speech at the University of Michigan in September 2000, a man came up to him and announced, “I’m one of the people who benefited by the Mickey Mouse Protection Act.” It was Robert Frost, Jr., son of the great poet. Frost said, “I obviously need to check with my family, but we may be interested in becoming a contributor to your conservancy.”~{ E-mail from Lawrence Lessig to ipcommons group, September 8, 2000. }~ If Robert Frost’s estate could come forward with his literary legacy, perhaps there were others willing to do the same. +={Frost, Robert, Jr.;Lessig, Lawrence:public speaker, as;Copyright Term Extension Act} + +When Berkman Center director Eric Saltzman joined the conversation, he raised a series of difficult questions about the whole idea: +={Saltzman, Eric+1} + +_1 Why would a person or corp. donate copyrighted materials? Larry’s draft implies a benefit to the IP owner — does this mean broader Internet facilitated use, and not merely a tax deduction? Under what circumstances, if any, does the Conservancy charge for use of its IP? If a user modifies a story, say, producing a screenplay, to whom does that screenplay belong? Would a motion picture based upon that screenplay owe $$ to the Conservancy? If so, how much (this is the damages phase of the /{Rear Window}/ case)?~{ This case, /{Stewart v. Abend}/, 100 S. Ct. 1750 (1990), required the copyright owners of Alfred Hitchcock’s movie /{Rear Window}/ to pay damages to the author of a book upon which the film was based. Saltzman was concerned that the conservancy would be liable for any illicit derivative works. See Daniel A. Saunders, “Copyright Law’s Broken Rear Window: An Appraisal of Damage and Estimate of Repair,” /{California Law Review}/ 80, no. 1 (January 1992), pp. 179–245. }~ Wouldn’t a new, hopeful band prefer to allow free use of its song(s) on a commercially promoted site like MP3.com rather than the Conservancy site? All asking: How to make the Conservancy into a useful garden, not a well-meaning weed patch of unwanted, neglected IP?~{ E-mail to ipcommons group, September 18, 2000. }~ + +By early October 2001, some of these questions had been provisionally answered. For example: Only digital works would be accepted initially. No limitations or restrictions would be set on the use of donated works. Prospective academic partners would include the University of California at Berkeley, Duke, Harvard, MIT, and Stanford. Lessig suggested both Richard Stallman and Jack Valenti as possible board members. The central goal was to develop a new sort of noncommercial space in cyberspace for the sharing and reuse of music, visual art, film, literature, nonfiction, academic work, software, and science.~{ E-mail from Lawrence Lessig to ipcommons group, November 12, 2000. }~ +={Valenti, Jack;Stallman, Richard:Copyright’s Commons, and} + +But many questions still hung in the air. Could the free software ethic really translate to other creative genres? Would tax incentives elicit donations of works? Would independent appraisals of donated works be needed? How would the conservancy search the titles of works and get permissions clearances? + +For all of its brainpower and commitment, Lessig’s rump caucus might not have gotten far if it had not found a venturesome source of money, the Center for the Public Domain. The center — originally the Red Hat Center — was a foundation created by entrepreneur Robert Young in 2000 following a highly successful initial public offering of Red Hat stock. As the founder of Red Hat, a commercial vendor of GNU/Linux, Young was eager to repay his debt to the fledgling public-domain subculture. He also realized, with the foresight of an Internet entrepreneur, that strengthening the public domain would only enhance his business prospects over the long term. (It has; Young later founded a print-on-demand publishing house, Lulu.com, that benefits from the free circulation of electronic texts, while making money from printing hard copies.) +={Young, Robert;Red Hat;GNU/Linux:Red Hat, and;enter for the Public Domain+2;public domain:Center for Public Domain+2} + +The director of the center, Laurie Racine, a former geneticist and business professor, was skilled at making shrewd strategic grants and “character bets” in public-domain activism. Because the center was not hobbled by the bureaucracy or timidity that afflicts many large foundations, it was able to make swift decisions and bold bets on innovative projects. (I came to work closely with Racine on a number of projects, including the co-founding of Public Knowledge, in 2001.) +={Racine, Laurie+1} + +Lessig met with Racine in October 2000. On a napkin, he sketched his idea for expanding copyright for authors. He came away with funding for a meeting at the Berkman Center and, later, a $100,000 commitment to launch the IP conservancy; the Center for the Public Domain eventually put up $1 million to get the project going, well before other funders saw the promise of the idea. Racine wanted her new center to be associated with “a project that has broad vision, credibility, range and staying power.” She saw Lessig’s project as having all of those things.~{ E-mail from Lawrence Lessig to ipcommons group, October 11, 2000, which contained e-mail from Laurie Racine to Lessig, October 25, 2000. }~ The grant was based more on the concept than a specific plan, however. At the time it was not entirely clear if the project would own and manage digital works, host Web services that made things freely available, or provide legal and software tools — or something else.~{ E-mail from Lawrence Lessig to ipcommons group, November 12, 2000. }~ There was, nonetheless, a great sense of mission and urgency to get under way. + +Interestingly, two similar initiatives were also in the early stages of development. The Knowledge Conservancy, led by David Bearman at Carnegie Mellon University in Pittsburgh, had a similar model of accepting donations of materials and making them available online. It focused more on sponsorship donations and memberships, while Lessig’s group was more oriented toward legal research and Web hosting of works. Another project, OpenCulture.org, planned to compensate artists for contributions to the public domain, but apparently it never took off.~{ http://web.archive.org/web/*/http://Openculture.org. }~ Lessig and his group were not averse to joining forces with others, but they were intent on vetting their own business model, such as it was, before joining anyone else’s venture. +={Bearman, David;OpenCulture.org;Knowledge Conservancy} + +One turning point came in January 2001 after Saltzman had met with several lawyers at Wilmer, Cutler & Pickering, a prominent law firm in Washington, D.C.~{ Contained in e-mail from Christina Ritchie to ipcommons group, December 15, 2000. }~ After conversations with attorneys David Johnson and Michael W. Carroll, it became clear that a nonprofit trust managing donated material could face considerable liability if it turned out that the donors did not actually own the works. To explore this issue, Carroll produced a much-praised legal memo that raised a red flag: “What if we were fools, and the person who gave us the rights [to a work] actually never had the rights and suddenly we get sued for infringement?” asked Carroll.~{ Michael Carroll, “Potential Copyright Liability and DMCA Safe Harbor Relief for Creative Commons,” appendix to “Briefing Book for Creative Commons Inaugural Meeting,” May 7, 2001. }~ One successful lawsuit could sink the whole enterprise. +={Carroll, Michael W.;Johnson, David R.;Wilmer, Cutler & Pickering;Saltzman, Eric+8} + +The project was caught in a conundrum. It wanted to legalize a whole set of social practices for sharing and reusing creative works — but establishing a content intermediary for that purpose appeared to be financially prohibitive under the law. It could be hugely expensive to clear titles and indemnify the organization and future users against copyright infringement risks. +={copyright law:sharing economy vs.+1} + +For a few months, various people in Lessig’s orbit suggested complicated schemes to try to finesse the legal problems. For example, one way that the conservancy could reduce its liability would be to simply point to the Web locations of public-domain materials, in the style of Napster’s centralized index of songs. This would also avoid the nuisance and expense of clearing titles on thousands of works. Another idea was to create a “three zone system” of content — Zone A for content that the conservancy owned and licensed; Zone B for content that was merely hosted at the conservancy site with no copyright representations; and Zone C, a simple search engine with links to public-domain content. Each of these zones, in turn, raised a flurry of complicated, speculative legal issues.~{ E-mail from Eric Saltzman to ipcommons group, January 19, 2001. }~ +={Napster;Copyright’s Commons:three zone system of} + +None of the proposed alternatives got much traction, especially when Saltzman took a closer look at the realities of tax deductions for donors. Saltzman came to see that tax breaks would have very little incentive value for most potential donors, and establishing the cash value of donations would be difficult in any case. Moreover, if donors were getting little in return for their donations, they would be wary of signing a form indemnifying the conservancy against legal liability. On top of all this, Saltzman, like others, had misgiving about “the idea of the federal treasury contributing public money [in the form of tax expenditures].” In short, the conservancy approach seemed plagued with many complicated and perhaps insoluble problems. + +As if to keep the pot boiling, newcomers kept adding new thoughts. Two leading thinkers about the public domain in science, Paul Uhlir and Jerome H. Reichman, urged that the group expand its mission to include scientific research and take an international perspective.~{ E-mail from Paul Uhlir and Jerry Reichman, January 30, 2001. }~ (Uhlir directs the international scientific and technical information programs at the National Academy of Sciences/ National Research Council; Reichman is an intellectual property professor at Duke Law School.) Both were keenly aware of the dangers to scientific progress if copyright and patent protection continued to expand. +={Reichman, Jerome H.;Uhlir, Paul;copyright law:expansion of} + +In January 2001, the caucus reached one point of consensus— that the primary function of this commons should be “to facilitate free/low-cost public use of original works.” It also agreed upon a name. Asked to vote on a name from a list that included IP Commons, Dot-commons, Sui Generous, IP Conservancy, and Public Works, Saltzman piped up, “May I suggest another name? CREATIVE COMMONS.” When the final poll results were counted, Creative Commons was the clear winner with five votes, with one vote apiece for the remaining names. A later poll pitted “The Constitution’s Commons” against “Creative Commons” (CC) in a final runoff. The vote tally is lost to history, but we do know which name prevailed.~{ E-mails from ipcommons listserv to ipcommons group, January 11, 12, 13, 16, 2001. }~ +={Creative Commons (CC):development of+10|function of} + +Viewpoints quickly diverged on how a commons ought to be structured and what metrics of success should be used. Should it seek to maximize the number of donations or the number of downloads? Should it develop quality holdings in a given field or provide the widest possible breadth of content? Should it focus on social interaction and creative reuses of works? Should the focus be on producers or consumers of intellectual property? Should the organization focus on individuals or institutions? And how would it be different from other rights clearance organizations and content archives? The group seemed mired in a great cloud of uncertainty. + +For the next nine months, the group intensified its debate about how to build the envisioned conservancy. After law student Dotan Oliar sketched out possible “business models,” Saltzman persuaded a friend at McKinsey & Company, the consulting firm, to provide a pro bono assessment.~{ Dotan Oliar, “Memo on Creative Commons — Towards Formulating a Business Plan,” March 19, 2001. }~ “The McKinsey folks were very skeptical and, I think, had a hard time fitting this into their [business] framework,” recalled one student at the meeting, Chris Babbitt. After the meeting, he was convinced that Creative Commons could not possibly host a content commons: “It would just be huge amounts of material, huge costs, and we didn’t have the money for that.” ~{ Interview with Chris Babbitt, September 14, 2006. }~ +={Babbitt, Chris+1;McKinsey & Company;Oliar, Dotan} + +Feeling the need to force some concrete decisions, Saltzman and Lessig convened twenty-eight people for an all-day meeting in Hauser Hall at Harvard Law School, on May 11, 2001, to hash out plans. “What we’re trying to do here is /{brand the public domain}/,” Lessig said. A briefing book prepared by Chris Babbitt posed a pivotal question to the group: Should Creative Commons be structured as a centralized Web site or as an distributed, open-source licensing protocol that would allow content to be spread across cyberspace? The centralized model could be “an eBay for opensource IP” or a more niche-based commons for out-of-print books, film, or poetry. A mock Web site was actually prepared to illustrate the scenario. The home page read: “The member sites listed on the CommonExchange have been certified by Creative Commons to offer high-quality, non-infringing content on an unrestricted basis. Please feel free to use and pass these works along to others. We invite you to donate works of your own to help maintain the digital Commons.”~{ The mock-up can be found at http://cyber.law.harvard.edu/creativecom mons/site.htm. }~ +={public domain:branding of} + +The distributed commons model would resemble the Chicago Mercantile Exchange or the New York Stock Exchange — “a trusted matchmaker to facilitate the transaction of securing rights,” according to the briefing book. “Just as corporations or commodities producers must meet certain criteria before they are listed on the Exchange, we could condition ‘listing’ in the Commons on similar criteria, albeit reflecting open source rather than financial values.”~{ “Briefing Book for Creative Commons Inaugural Meeting,” May 7,2001, p.10. }~ The virtue of the distributed model was that it would shift costs, quality control, and digitization to users. Creative Commons would serve mostly as a credentialing service and facilitator. On the other hand, giving up control would be fraught with peril — and what if Creative Commons’ intentions were ignored? + +Several participants remember Lessig, Nesson, and Zittrain pushing for the distributed model, which seemed a bolder and riskier option. “Larry was the lead advocate for a distributed commons, where it would be focused on a license mechanism that we then would release to the world, and we let the world do with it what it will,” one attendee recalled. “At the time, I think, XML-type capabilities were just coming around, and Larry was very confident that that was the direction to go.”~{ Interview with Chris Babbitt, September 14, 2006. }~ XML, or Extensible Markup Language, is a programming language that uses self-created “tags” that help Internet users aggregate and share digital content residing on different computer systems. Lessig envisioned XML tags embedded in any Creative Commons–licensed work, which could then be used to identify shareable content on the Internet. +={Nesson, Charles;Zittrain, Jonathan;XML (Extensible Markup Language);Creative Commons (CC) licenses+3} + +This perspective carried the day, and the “conservancy” model of the commons was formally abandoned. CC would serve as a licensing agent. The licenses would enable authors’ works to be made available online in an easy, low-cost way without the full restrictions of copyright law. A standardized set of licenses would overcome the ambiguities of the fair use doctrine without overriding it. Creators could voluntarily forfeit certain copyright rights in advance— and signal that choice — so that others could freely reuse, share, and distribute CC-licensed works. +={authorship:Creative Commons, and+2} + +Jonathan Zittrain remembers being skeptical at first: “So this whole thing is just about some tags? It’s about /{licensing}/? How boring.” Upon reflection, however, he saw the value of CC licensing as a way to create a new default. “As soon as you realize — ‘Well, wait a minute! It’s just about authors being able to express their desires!’”~{ Interview with Jonathan Zittrain, September 28, 2006. }~ + +More than a menu of individual choices, the licenses would constitute an embryonic cultural order — a “constitutional system” to direct how text, images, and music could circulate in the online world, based on authors’ individual choices. But the new focus on licenses raised its own set of novel quandaries. What options should an author be able to choose? What suite of licenses made sense? While licensing terms may be boring and legalistic, the architecture could have potentially profound implications for cultural freedom — which is why the legal minds involved in the licenses spent so much time arguing seemingly obscure points. +={Creative Commons (CC):function of} + +However these debates were resolved, everyone agreed that it was time to incorporate Creative Commons as a nonprofit group, assemble a board, recruit a chief executive officer, and of course raise more money. The stated goal: “to expand the shrinking public domain, to strengthen the social values of sharing, of openness and of advancing knowledge and individual creativity.”~{ Oren Bracha and Dotan Oliar, “Memo: May 7th Consensus Regarding the Creative Commons Project,” August 20, 2001, p. 1. }~ +={public domain:CC licenses, and+1;Creative Commons (CC) licenses:public domain, and+1} + +There was a certain audacity if not unreality to the whole venture. Law professors don’t go around inventing ambitious public projects to revamp the social applications of copyright law. They don’t generally muck around with software, contract law, and artists to build an imagined “sharing economy.” “There was always this lingering suspicion in the back of my mind,” recalled Babbitt in 2006, “that it [Creative Commons] would be kind of a rich man’s folly, and this would just be some little thing — a niche experiment — that really wouldn’t turn out to have merited the sort of sustained interest of this high-caliber group of people.”~{ Interview with Chris Babbitt, September 14, 2006. }~ +={Babbitt, Chris} + +2~ Crafting the Licenses +={Creative Commons (CC) licenses+14} + +If Creative Commons licenses were going to enable artists to determine future uses of their works — on less restrictive terms than copyright law — what did actual artists think of the whole idea? To get a crude baseline of opinion, Laura Bjorkland, a friend of Lessig’s and manager of a used-book store in Salem, Massachusetts, conducted an unscientific survey. She asked about a dozen writers, photographers, painters, filmmakers, and a sculptor if they would be interested in donating their works to a commons, or using material from one? Most of them replied, “I’ve never even /{thought}/ of this before. . . .”~{ Laura Bjorkland, “Regarding Creative Commons: Report from the Creative Community,” in “Briefing Book for Creative Commons Inaugural Meeting,” May 7, 2001, pp. 16–19. }~ +={Bjorkland, Laura;Creative Commons (CC):function of} + +A classical composer said he “loved the idea of a Nigerian high school chamber group playing one of my string quartets without paying royalties . . . but I would not want a film studio or pop song writer using one of my themes on a commercial project, even if my name’s attached, without royalties.” Some artists worried about others making money off derivatives of their work. Many complained that artists earn so little anyway, why should they start giving away their work? Others were reluctant to see their work altered or used for violence or pornography. Photographers and visual artists found it “a little scary” to let their signature style be used by anyone. + +In short, there was no stampede for starting a public-domain conservancy or a set of licenses. Some worried that the CC licenses would be a “case of innovation where’s there’s no current demand.” Another person pointed out, more hopefully, that it could be a case of “changing the market demand with a new model.”~{ Oren Bracha and Dotan Oliar, “Memo: May 7th Consensus Regarding the Creative Commons Project,” August 20, 2001, p. 3, note 9. }~ + +The Lessig caucus was clearly struggling with how best to engage with the networked environment. Napster had demonstrated that, in the dawning Internet age, creativity would increasingly be born, distributed, and viewed on the Web; print and mass media would be secondary venues. For a society still deeply rooted in print and mass media, this was a difficult concept to grasp. But Michael Carroll, the Washington lawyer who had earlier vetted the conservancy’s liability issues, shrewdly saw network dynamics as a potentially powerful tool for building new types of digital commons. In 2001, he had noticed how a bit of Internet folk art had become an overnight sensation. Mike Collins, an amateur cartoonist from Elmira, New York, had posted the cartoon below on Taterbrains, a Web site.~{ http://politicalhumor.about.com/od/funnypictures/ig/100-Funny-Pictures/ Confusing-Florida-Ballot.htm. }~ The image suddenly rocketed throughout the cyberlandscape. Everyone was copying it and sharing it with friends. +={Carroll, Michael W.+4;Collins, Mike+4;Napster} + +{ vs_db_1.png }http://viralspiral.cc/ + +% group{ +% +% Official Florida Presidential Ballot +% Follow the arrow and Punch the appropriate dot. +% +% Bush Buchanan Gore Nadar +% (c) 2000 Mike Collins, Taterbrains.com +% +% }group + +Carroll observed: + +_1 [Collins] distributed his design initially without a motive to profit from it. But the scale of distribution surpassed what he imagined, and in a subsequent interview he expressed some resentment over those who had made money from T-shirts and other paraphernalia using his design. But he appears to have taken no actions to enforce his copyright, the notice notwithstanding. Copyright lawyers would consider the unlicensed distribution of this work “leakage” — that is, a violation of law but not worth pursuing. + +_1 But if we could take steps to make it cheap, easy and desirable for the Mike Collinses of the world to stick a CC tag on something like this before sending it out, “leakage” becomes legal, changing the terms of the debate.~{ E-mail from Michael Carroll to Molly Van Houweling and Larry Lessig, October 15, 2001. }~ + +CC tags could make nonproprietary culture the default, reversing the presumption of copyright law. Everyone agreed with this general approach, but implementing it was rife with difficult questions. As Saltzman recalled: “What kind of relationship did we want to encourage between the creator/licensor and the user? Should it be totally automated? Should it invite some back-and-forth? Should there be a requirement that licensors provide contact information?”~{ Interview with Eric Saltzman, April 11, 2006. }~ The General Public License for software had shown the feasibility of a license for safeguarding a commons of shared code. Could it work in other creative sectors? It would be critical to strike the right balance. As law student Chris Babbitt put it, “Too little protection for the donor’s interests and no one will donate; too little room for the users to use the work, and the service is useless.”~{ “Briefing Book,” p. 12. }~ +={Babbitt, Chris;Saltzman, Eric;General Public License (GPL):legal enforceability of} + +If there were going to be several licenses, the next question was how many, and of what sort? There are many different types of creativity, after all. Should each one have its own set of special licenses? The Berkman conclave agreed that there should be a publicdomain license enabling creators to grant a nonexclusive, royaltyfree license on their works, without the viral conditions of the GPL. As for other licenses, five ideas were put on the table for consideration: a license authorizing free reuses of a work so long as proper attribution is given to the author; a license authorizing noncommercial uses; and a license authorizing free reuses but prohibiting derivative uses. Other suggestions included a license authorizing academic uses only and a “timed donations” license, which would allow an artist to revoke a work from the commons after a stipulated number of years.~{ Ibid. }~ Neither of these two licenses gained support from the group. + +There were also lots of open questions about how to structure the specific terms of the licenses. Should they be perpetual? Will the licensor be liable for “downstream” uses of a work that are deemed an infringement? Will licensors be required to identify themselves? Should licensors be able to add their own separate warranties and representations? Crafting the licenses meant going beyond the abstract rhetoric of the commons. These licenses had to be serious, operational legal instruments that courts would recognize as valid. + +Another concern was making the new CC licenses compatible with existing licenses seeking similar goals. MIT had produced the first such license for its OpenCourseWare initiative, which allows Internet users to use the university’s curricula and syllabi (see chapter 12). To ensure that CC- and MIT-licensed content would be compatible, the CC lawyers deliberately wrote a license that would meet MIT’s needs. Another license, the GNU Free Documentation License (FDL), was being used on Wikipedia, among other online sites. But the FDL, originally intended for software documentation materials, was incompatible with the CC licenses. Stallman refused to alter the FDL, and Wikpedia was already under way and committed to the FDL. This quirk of history meant that Wikipedia content and CC-licensed content could not legally be combined. As we will see in chapter 9, this was the beginning of a rancorous schism in the free culture world, and the beginning of a heated philosophical/ political debate over which licenses truly promote “freedom.” +={ree Documentation License;GNU Project:GNU FDL;Stallman, Richard:GNU Project, and;Wikipedia:GNU FDL, and} + +As this overview suggests, licensing complexities can quickly soar out of control and become overwhelming. Yet the very point of the Creative Commons licenses was to simplify the sharing and reuse of digital material. CC planners wanted to help ordinary people bypass the layers of mind-numbing legalese that make copyright law so impenetrable and inaccessible. The Creative Commons was all about empowering individuals and avoiding lawyers. A proliferation of licensing choices would only lead to license incompatibilities, a Balkanization of content on the Internet, and more lawyers. Sharing and interoperability go together, as Stallman’s early experiences with his Emacs Commune showed. +={Emacs;Stallman, Richard:Emacs, and} + +Somehow, therefore, the licenses had to thread three needles at once. They needed to align (1) the technical dynamics of the Internet with (2) the legal realities of copyright law and (3) the everyday needs of people. The ingenious solution was to create licenses on three layers: a “lawyer-readable” license that could stand up in court, a “human-readable” license that could be understood by ordinary people, and a “machine-readable” license that could be recognized by search engines and other software on the Internet. Each “layer” expressed the same license terms in a different way — an unexpected twist on Lessig’s concern for “fidelity in translation.” The formal license was called the “Legal Code” (or “legal source code”); the machine-readable translation of the license was called “Digital Code”; and the plain-language summary of the license, with corresponding icons, was the “Commons Deed” (or the “happy deed”). + +2~ Branding the Public Domain in Code +={code:branding the public domain in+8;Creative Commons (CC):code, and+8;public domain:branding of+8} + +As the lawyers brooded and debated the licensing terms, another complicated debate was unfolding on the tech side of CC: how to brand the public domain in software code. If code is law, then it was imperative for Creative Commons to find some way to represent CC licenses in digital code. Abelson, Lessig, and others understood that the future of the Internet was likely to include all sorts of automated, computer-to-computer functions. One of the best ways to promote a new body of “free content” on the Web, therefore, would be to develop machine-readable code that could be inserted into any digital artifact using a Creative Commons license. That way, search engines could more easily identify CC-licensed works by their terms of use, and help assemble a functionally accessible oeuvre of digital content that was free to use. +={code:law, as;law:code as+1;Abelson, Hal:CC licensing, and;Internet:freedom of;Internet:future of;Lessig, LawrenceCC licenses, and;World Wide Web:free content on} + +At this time, in 2001, the founder of the World Wide Web, Tim Berners-Lee, and others at the World Wide Web Consortium, based at MIT, were trying to conceptualize the protocols for a new “logical layer” of code on top of the World Wide Web. They called it the Semantic Web. The idea is to enable people to identify and retrieve information that is strewn across the Internet but not readily located through conventional computer searches. Through a software format known as RDF/XML,~[* RDF, or Resource Description Framework, is a way to make a statement about content in a digital artifact. XML, or Extensible Markup Language, is a way to write a specialized document format to send across the Web, in which certain content can be marked up, or emphasized, so that other computers can “read” it.]~ digital content could be tagged with machine-readable statements that would in effect say, “This database contains information about x and y.” Through Semantic Web protocols and metatags on content, it would be possible to conduct searches across many types of digital content — Web pages, databases, software programs, even digital sensors — that could yield highly specific and useful results. +={Berners-Lee, Tim;Semantic Web+6;World Wide Web:Semantic Web+6|protocols for+6;RDF/XML} + +Unfortunately, progress in developing the Semantic Web has been bogged down in years of technical disagreement and indifference among the larger Web community. Some critics argue that the project has stalled because it was being driven by a small corps of elite software theorists focused on databases, and not by a wider pool of decentralized Web practitioners. In any case, the Creative Commons became one of the first test cases of trying to implement RDF/XML for the Semantic Web.~{ For background, see “The Semantic Web: An Introduction,” at http://in fomesh.net/2001/swintro; Aaron Swartz and James Hendler, “The Semantic Web: A Network of Content for the Digital City,” at http://blogspace.com/ rdf/SwartzHendler; and John Markoff, “Entrepreneurs See a Web Guided by Common Sense,” /{New York Times}/, November 12, 2006. }~ The project was led initially by Lisa Rein, a thirty-three-year-old data modeler who met Lessig at an O’Reilly open-source software conference. Lessig hired her as CC’s first technical director in late 2001 to embed the CC legal licenses in machine-readable formats. +={Rein, Lisa+2;Swartz, Aaron;Lessig, Lawrence:CC licenses, and} + +Writing the XML code was not so difficult, said Rein; the real challenge was “deciding what needed to be included and how you represent the licenses as simply as possible.”~{ Interview with Lisa Rein, December 20, 2006. }~ This required the lawyers and the techies to have intense dialogues about how the law should be faithfully translated into software code, and vice versa. Once again, there were complicated problems to sort through: Should there be a central database of CC-licensed content? How could machine-readable code be adapted if the legal licenses were later modified? + +Rein got an unexpected assist in the project from programming whiz Aaron Swartz, who had heard about Creative Commons and volunteered to help write the RDF/XML code. Swartz was an esteemed member of the RDF core working group at the World Wide Web Consortium (W3C), and so was intimately involved in Semantic Web deliberations. He was also a fifteen-year-old junior high school student living with his parents in Chicago. “I remember these moments when I was sitting in the locker room, typing on my laptop, in these [W3C] debates, and having to close it because the bell rang and I had to get back to class,” Swartz recalled. At CC, he was given the title of “Volunteer Metadata Coordinator.” His job was “to design the RDF schema and what the XML documents would look like, and work that out with my friends at the W3C and get their approval on things.”~{ Interview with Aaron Swartz, October 10, 2006. }~ For his troubles, Swartz received an in-kind donation of a laptop computer and travel expenses, rather than a salary. “At the time, I felt bad,” said Swartz. “They were a nonprofit doing work I believe in. I didn’t feel I should be taking their money when I didn’t need it.” With later help from Ben Adida, the CC team managed to develop an RDF that could attach CC licenses to Web pages. But since the Semantic Web protocols were still in flux, and not widely used, the effort amounted to a speculative gamble on future and widespread adoption of those protocols. +={RDF/XML+3;Adida, Ben;Swartz, Aaron;World Wide Web Consortium (W3C);Internet:future of|protocols of+3} + +Although inspired by the Semantic Web and by Lessig’s analysis in /{Code}/, the RDF/XML coding was also provoked by the growing specter of digital rights management (DRM), the reviled systems used by film and music companies to lock up their content. The Creative Commons dreamed of developing an “anti-DRM” code to express the idea, “This content is and shall remain free.” Professor Hal Abelson remembered that “we even used the phrase, ‘DRM of the public domain.’”~{ Interview with Hal Abelson, April 14, 2007. }~ The coinage that Lessig later popularized is “digital rights expression” — metadata that indicate that a digital object can be shared and reused. There was a passing fear that CC’s digital rights expression code might infringe on DRM patents; one company known for its aggressive patent defense raised concerns. But once it was made clear that the CC’s RDF code amounted to a label, and did not execute online rights on a person’s computer, the problem disappeared. +={Abelson, Hal:digital rights management, and;digital rights management (DRM);copyright law:digital age, in;Lessig, Lawrence:Code and Other Laws of Cyberspace} + +The machine-readable CC licenses were one of the first major buildouts of RDF protocols. Swartz ruefully recalled the reaction of his friends at W3C: “I got the sense that they thought it was sort of a silly project, that they were thinking about bigger and longer-term things.” Adida, who later replaced Swartz as the CC representative at the W3C, played a major role in helping develop the metatags and protocols. +={Adida, Ben;Swartz, Aaron+1} + +The RDF/XML coding was part of a larger CC strategy to brand the public domain via software code. Since RDF code alone is like a nail without a hammer, Creative Commons decided to develop a specialized search engine so that Internet users could locate CC-licensed content. Without such a search engine, Lessig said in April 2002, “there will be no way to demonstrate that we’ve produced anything useful.”~{ E-mail from Lawrence Lessig to Hal Abelson, April 22, 2002. }~ Swartz, who was not involved in the project, said, “I was impressed that they did it, because it was probably the biggest programming job I’d seen them do at the time.” In the meantime, the CC began a series of overtures to Google and Yahoo in an attempt to get their search engines to search for CC-licensed content. After years of lukewarm interest, both Google and Yahoo added CC-search capabilities in 2005. Creative Commons also nurtured the hope that once enough content contained CC metadata, software developers would develop new applications to let people browse, use, and distribute CC-tagged content. +={Google;Yahoo;Lessig, Lawrence:CC licenses, and} + +2~ The Action Shifts to Palo Alto +={Creative Commons (CC) licenses:evolution of+31} + +By the fall of 2001, Creative Commons was still an idea without definition. The project gained new momentum in September 2001 when Lessig hired a former student, Molly Shaffer Van Houweling, to be the first director of the organization. Van Houweling, a sophisticated yet plainspoken law scholar with strong executive skills, had just finished clerking for Supreme Court justice David Souter. She set about incorporating the Creative Commons, organizing the board, building a Web site, and hammering out final versions of the licenses. +={Souter, David;Van Houweling, Molly Shaffer+5} + +Once a key foundation grant was secured — $1 million from the Center for the Public Domain — the Creative Commons was incorporated in Massachusetts (home to many key backers of the project) on December 21, 2001. The first board members included several legal scholars (Boyle, Carroll, Lessig), a computer scientist (Abelson), two filmmakers (Saltzman and Davis Guggenheim, a friend of Lessig’s), and a Web publisher (Eldred). Charged with breathing life into a fragile idea, Van Houweling settled into a small office on the third floor of Stanford Law School (before the project was reassigned to basement offices). +={Abelson, Hal;Boyle, James:CC board, on|CC formation, and;Carroll, Michael W.;Center for Public Domain;public domain:Center for Public Domain;Saltzman, Eric;Eldred, Eric:Creative Commons, and;Guggenheim, Davis} + +In January 2002, Glenn Otis Brown, a lawyer and former student of Lessig’s, was hired as assistant director. Brown had been a law student at Harvard Law School, where he had known Van Houweling and taken a constitutional law course from Lessig. An affable Texan who had flirted with a journalism career, Brown had just finished a year of clerking for a circuit court judge. He was due to start a job in New York City the following week when he got a call from Van Houweling. “She and Larry were starting something to do with copyright at Stanford,” recalled Brown. “I knew pretty much nothing else about it except it was a nonprofit and it was going to be a fulltime job. . . . The next thing I knew, I was moving to California.”~{ Interview with Glenn Otis Brown, June 9, 2006. }~ +={Brown, Glenn Otis:CC formation, and+1} + +Lessig, Van Houweling, and Brown took the menu of licenses proposed by two graduate students, Dotan Oliar and Oren Bracha, and sought to refine them and make them as legally bulletproof as possible.~{ Oren Bracha and Dotan Oliar, “Memo: Presentation of Two Possible Creative Commons Layer 1 Architectures,” October 1, 2001. }~ They were torn about the process to adopt. “We didn’t want to do a collective drafting process with the entire Internet community,” said Van Houweling. “That didn’t seem practical. And yet we were a little nervous, I think, about not knowing what our potential user base would want to use.” Lessig was unfazed. Release of the licenses “isn’t going to be like a movie premiere,” he told Van Houweling, but more of an evolutionary process. The idea was to get the licenses in circulation, monitor their progress, and make changes as necessary.~{ Interview with Molly Van Houweling, March 21, 2006. }~ +={Lessig, Lawrence:CC licenses, and+1;Bracha, Oren;Oliar, Dotan;Brown, Glenn Otis:CC licensing, and} + +Two of the most prestigious law firms in Silicon Valley, Cooley Godward Kronish and Wilson, Sonsini, offered pro bono legal assistance to the effort. Attorney John Brockland, an expert in opensource software licenses at Cooley Godward and a former student of Lessig’s, was the architect of the final licenses, assisted by Catherine Kirkman, a licensing attorney at Wilson, Sonsini. Brockland recalled, “One of the drafting challenges was to write something that could be broadly useful across a wide range of copyrighted works and would not be tied to particular nuances of the way the copyright statute works.”~{ Interview with John Brockland, January 5, 2007. }~ Most copyright licenses are drafted for specific clients and particular circumstances, not for the general public and all types of copyrighted works. +={Brockland, John;Cooley Godward Kronish;Kirkman, Catherine;Wilson, Sonsini, Goodrich & Rosati;copyright law:licenses for;open source software:legal implications of} + +Much of the discussion, said Van Houweling, “revolved around the values that we wanted to embed in the licenses, and what were the outer limits of those values?” Ultimately, she said, “we opted for a menu of licenses that was weighted toward the nonproprietary [content]. . . . We wanted to subsidize a certain set of choices that are otherwise underserved.”~{ Interview with Molly Van Houweling, March 21, 2006.}~ The point was to facilitate the rise of a sharing culture, after all, not to replicate the baroque dysfunctions of copyright law. +={copyright law:purpose of} + +Since the CC licenses were trying to articulate a new “middle ground” of voluntary choices for sharing, it had to grapple with all sorts of fine legal complexities. How exactly should they define a derivative work? What should be considered a noncommercial reuse of a work? Can you dedicate a work to the public domain? + +Some artists felt that they ought to be able to prohibit derivative uses of their works in pornography or hate speech. Hal Abelson adamantly disagreed. If the licenses had an “offensive uses” clause, as either a standard or optional term, it would open up a can of worms and put Creative Commons on the side of censors. That view readily prevailed. +={Abelson, Hal} + +A primary concern was anticipating how the licenses might be interpreted by the courts. Wendy Seltzer was worried that the CC licenses might become entangled with court cases involving the fair use doctrine. She wanted to make sure that the CC licenses were not seen as limiting or waiving a person’s fair use rights in any way. Her concern, shared by many others, resulted in an explicit disclaimer stating that intention. “I’m really glad that we did that,” recalled Glenn Brown, then the assistant director of CC, “because we ended up pointing to that over and over and over again — to make clear that this was something that went above and beyond fair use.”~{ Interview with Glenn Otis Brown, June 9, 2007. }~ +={Brown, Glenn Otis:fair use, and;Seltzer, Wendy;fair use doctrine:CC licenses, and;Creative Commons (CC) licenses:fair use, and} + +To ensure that the licenses would be enforceable, the CC lawyers built on the same legal base as the GPL; the licenses were crafted not as contracts, but as conditional permissions based on copyright law. A contract requires that the licensee have the opportunity to accept or reject the terms of an agreement, which would not be the case here. A conditional permission, by contrast, is the legal prerogative of a copyright holder. She is simply offering advance permission to use a CC-licensed work (to share, modify, distribute, etc.) so long as the specified terms are respected. +={General Public License (GPL):legal enforceability of;copyright law:conditional permission license|enforceability of|CC licenses, and+21;Creative Commons (CC) licenses:copyright law, and+21|version 1.0 of+21|enforceability of} + +Countless lawyerly refinements of a very technical nature were made to the licenses to ensure that they would be specific as needed, vague enough to be versatile, and rigorous enough to survive a court’s scrutiny.~{ The lawyers also wrestled with a host of imponderables that had no obvious answers, such as: What if people started spoofing the licenses by using them in inappropriate ways? Should the Creative Commons establish a central registry for CC-licensed works as a way to ensure the credibility of the project? (After long debate, the idea was ultimately rejected.) Would the Creative Commons be held liable for contributory negligence if someone used a CC license on a copyrighted song? (The CC took its chances.) Would the Creative Commons lose its trademark if it allowed anyone to use its trademarked logo? (Several lawyers warned that CC licensing of its trademark could not be properly policed.) Glenn Otis Brown worried that the board might be sued for facilitating the unauthorized practice of law. “I don’t know how long I spent calling up different insurance brokers trying to get a quote,” he recalled. “People had no idea what I was talking about. We ended up going all the way to Lloyd’s of London to ask them,” said Brown, laughing. “They wrote back and said, ‘You can’t insure that.’ ” }~ + +The first set of licenses, version 1.0, was completed in the spring of 2002 and included eleven choices. The six basic licenses, listed here in order of least restrictive to most restrictive, included: +={Creative Commons (CC) licenses:types of+12} + +!_ Attribution (BY). +Authorizes free reuses (download, distribution, modifications, commercial uses, etc.) so long as the author is credited for the original creation. + +!_ ShareAlike (SA). +Authorizes free reuses so long as credit is given and the new work is licensed under the same terms. + +!_ No Derivatives (ND). +Authorizes free reuses so long as the new work is unchanged and in whole. + +!_ NonCommercial (NC). +Authorizes free reuses so long as they are not commercial in nature. + +!_ NonCommercial ShareAlike (NC-SA). +Requires free reuses so long as the new work is passed along on the identical terms as the original work (so, for example, works that use a NonCommercial ShareAlike work will also have to be distributed as NonCommercial ShareAlike works). + +!_ NonCommercial No Derivatives (NC-ND). +Authorizes free reuses so long as credit is given, no changes are made, the work is kept intact, and it is not used commercially. This is the most restrictive CC license. + +Because each of these six basic choices can be combined with other CC licenses, copyright holders had five additional choices: + +!_ Attribution-ShareAlike (BY-SA). +Authorizes free reuses so long as the author is credited and the new work is licensed under the same terms. + +!_ Attribution-NonCommercial (BY-NC). +Authorizes free reuses so long as the author is credited and the new work is used for noncommercial purposes. + +!_ Attribution NonCommercial-ShareAlike (BY-NCSA). +Authorizes free reuses so long as the author is credited, the new work is used for noncommercial purposes, and the new work is passed along using this same license. + +!_ Attribution-No Derivatives (BY-ND). +Authorizes free reuses so long as the author is credited and the new work is unchanged and in whole. + +!_ Attribution No Derivatives-ShareAlike (BY-ND-SA). +Authorizes free reuses so long as the author is credited, the new work is unchanged and in whole, and the new work is passed along using this same license. + +It soon became clear that very few people were choosing any of the five licenses that did not require attribution of the author (the SA, ND, NC, NC-SA, and NC-ND licenses). So in May 2004 Creative Commons decided to “retire” those licenses, leaving the six most commonly used ones today (BY, BY-SA, BY-NC, BY-NC-SA, BY-ND, and BY-ND-SA). + +Still another choice was offered to copyright holders, a “public domain dedication,” which is not a license so much as “an overt act of relinquishment in perpetuity” of any rights in the work. The public domain dedication places no restrictions whatsoever on subsequent reuses of the work. +={public domain dedication} + +To the first-time user, the licenses may seem a little daunting.~{ A FAQ at the Creative Commons Web site answers the most frequent user questions about the licenses. It is available at http://wiki.creativecommons .org/. }~ The full implications of using one or another license are not immediately obvious. The tagline for the licenses, “Some Rights Reserved,” while catchy, was not really self-explanatory. This became the next big challenge to Creative Commons, as we see in chapter 6: how to educate creators about a solution when they may not have realized they even had a problem. + +By December 2002, the three levels of code — legal, digital, and human — had been coordinated and finalized as version 1.0. The organization was set to go public, which it did at a splashy coming-out party in San Francisco. The gala featured appearances by the likes of rapper DJ Spooky (an ardent advocate for remix culture) and a London multimedia jam group, People Like Us. Lessig proudly introduced the licenses as “delivering on our vision of promoting the innovative reuse of all types of intellectual works, unlocking the potential of sharing and transforming others’ work.”~{ http://creativecommons.org/press-releases/entry/3476. }~ +={DJ Spooky;People Like Us;code:levels of;Lessig, Lawrence:CC licenses, and+2} + +Perhaps the biggest surprise was a set of video testimonials from both ends of the copyright spectrum — John Perry Barlow of Electronic Frontier Foundation and Jack Valenti of the Motion Picture Association of America. With uncharacteristic solemnity, Barlow said: “I personally think there is something deeply oxymoronic about the term ‘intellectual property.’ But as long as we have set up a huge matrix of laws and social understandings that traffic in that assumption, we have to meet the conditions as we have found them and use what exists to preserve the human patrimony.” The silvermaned Valenti saluted the “Lessig compact” that is both “respectful of, and supports, copyright” while allowing people “to give up some of their copyrighted material, or all of it, and put it on the creative commons for others to view it or hear it.” “Larry, I hope that my supporting you in this doesn’t ruin your reputation,” Valenti joked.~{ See http://mirrors.creativecommons.org/cc-barlow-valenti.mov. }~ +={Barlow, John Perry;Electronic Frontier Foundation (EFF);Valenti, Jack+1} + +Many copyfighters were not thrilled to have an arch-adversary like Valenti praise their efforts at their moment of triumph. Yet that was a deliberate part of Lessig’s strategy: to assert a politically neutral middle ground from which to remake the social landscape of creativity. The question raised in some people’s mind was whether something so politically unassailable could have significant impact. Still others saw it as a welcome base upon which to build a new sharing economy. + +The CC launch party can be seen as a watershed moment in the struggle to protect the public domain. It announced a novel gambit to transcend the political impasse over copyright reform, a way to address copyright abuses without getting embroiled in a pitched and unwinnable confrontation. It legitimized all sorts of activities that had historically been seen as morally problematic, if not illegal. While building on the idea of the public domain developed over the preceding twenty years, Creative Commons inaugurated a new story about the commons, creativity, and the value of sharing. Watching the rocking party and savoring the hard work completed, Glenn Brown remembers a friend musing to him, “I wonder if we’ll see another legal hack like this in our careers.” +={Creative Commons (CC) licenses:launch of|public domain, and;public domain:CC licenses, and} + +1~ 5 NAVIGATING THE GREAT VALUE SHIFT + +/{Amateurs discover new tools for creating value: open networks and self-organized commons.}/ + +“It was never really clear to me what was going to happen after we launched the licenses,” recalled Glenn Otis Brown. “Would our work be done?” The intense push to craft the licenses and release them now over, Brown and his colleagues were only too happy to ease up in their work. (Van Houweling had left in 2002 to teach law; she is now at the University of California at Berkeley.) Despite his enthusiasm for the licenses, Brown had his private doubts about their future success. “To be honest, I was pretty scared,” he said. “I was worried they were going to go nowhere, and that I was going to be blamed for that.”~{ Interview with Glenn Otis Brown, August 10, 2006. }~ +={Brown, Glenn Otis:CC licensing, and+1;Van Houweling, Molly Shaffer;Creative Commons (CC) licenses:evolution of+10} + +In January 2003, a month after the CC licenses were announced, however, the project took on a new urgency. The Supreme Court handed down its /{Eldred}/ ruling, sending a clear signal that the courts were not much interested in reforming copyright law. Soon after this crushing disappointment, Lessig began to intensify his focus on the Creative Commons. “The pressure really increased,” said Brown, “but that’s also when things started to get a lot more fun. That’s when the staff started working on things /{all the time}/ and we got a stable, permanent staff, instead of contractors.” +={Eldred v. Reno/Eldred v. Ashcroft:Copyright’s Commons, and|Supreme Court, and;Lessig, Lawrence:CC licenses, and+9} + +What began as a modest licensing experiment began to take on the character of a permanent campaign. Working from the themes in /{The Future of Ideas}/, Lessig came to see the Creative Commons as more than a nonprofit custodian of some free public licenses; it was a champion for a bracing new vision of culture. This broader orientation meant reaching out to various creative sectors and the general public with messages that were both practical (“here’s how to use the licenses”) and idealistic (“you, too, can build a better world”). +={Lessig, Lawrence:The Future of Ideas;Creative Commons (CC):function of+5|social movement, as+5} + +The band of enterprising law scholars and techies who once saw their challenge as one of bolstering the public domain began to widen their gaze to the vast world of creativity and democratic culture. Social practice, not theory, became the animating force in their work. + +This meant reaching out to writers, musicians, filmmakers, photographers, librarians, academics, and other creators. All faced worrisome threats to their freedoms in the digital environment, as we saw in chapter 2. Lessig and the small Creative Commons staff made it their job to speak to these threats, promote the licenses, and set forth an alternative to the corporate media’s vision of culture. + +“Our single, overarching aim,” said Lessig in December 2002, “is to build the public domain, by building projects that expand the range of creative work available for others to build upon.”~{ Lawrence Lessig, Creative Commons press release, December 19, 2002; “CC in Review: Lawrence Lessig on How It All Began” [weekly e-mail series], October 12, 2005. }~ In an attempt to credential the licenses, the Creative Commons touted endorsements by a number of educational institutions (MIT, Rice University, Stanford Law School), public-spirited tech enterprises (iBiblio, the Internet Archive, O’Reilly & Associates), and venturesome musicians (DJ Spooky, Roger McGuinn of the Byrds). +={DJ Spooky;McGuinn, Roger;Lessig, Lawrence:public domain, and;public domain:CC licenses, and;Creative Commons (CC) licenses:public domain, and} + +As if by spontaneous replication, people from far-flung corners of the Internet began to use the licenses on their blogs, their MP3 music files, their photographs, their books. Week after week, the Creative Commons’s blog trumpeted the new recruits — the blog for book designers (Foreword), the database of metadata about music (MusicBrainz), the online storytelling Web site (Fray), the 2004 presidential campaign of Dennis Kucinich. +={Kucinich, Dennis} + +But the larger challenge for Creative Commons was finding ways to reach new constituencies who knew little about technology or copyright law. Why should they bother to use a CC license? This was a major public education challenge. Besides appearing at many conferences and cultivating press coverage, Glenn Brown spent a lot of time developing a Web site that could explain the licenses clearly. Great pains were taken to develop a precise, intuitive user interface to help people learn about the licenses and choose the right one for them. Copyright law was complicated enough; the CC licenses had to be seen as a simple alternative. +={Brown, Glenn Otis:CC licensing, and+1;copyright law:licenses, and+3} + +Advertisers have plenty of trouble communicating the virtues of mouthwash in a crowded public sphere. Could something as dry and forbidding as copyright law ever be made lucid and even hip? Although not a trained marketer, Glenn Brown had a knack for communicating things simply.Working with graphic designer Ryan Junell and Web designer Matt Haughey, Brown developed a site that combined a certain institutional authority with contemporary pizzazz. This style was on abundant display in a series of jaunty and entertaining Flash animations that explained the rationale for Creative Commons. +={Haughey, Matt;Junell, Ryan+1} + +Junell designed the now-familiar CC logo as a deliberate counterpoint to the copyright logo, ©. “I thought that Creative Commons should have something like the copyright logo since it deals with the same stuff,” said Junell. “It should be something really simple and pure.”~{ Interview with Ryan Junell, September 23, 2006. }~ Junell set his sights on making the CC logo a standard, ubiquitous symbol. He hoped that it would eventually be incorporated into the Unicode, an international registry for every character in any language used in software, from % to ∆ to ≠. +={Creative Commons (CC):logo of;Unicode} + +In promoting its licenses, Creative Commons fashioned itself as a neutral, respectable defender of individual choice. “Our tools are just that — tools,” said Haughey, who was then developing the CC Web site. “Our model intentionally depends on copyright holders to take responsibility for how they use those tools. Or how they don’t use them: If you’re unsure and want to keep your full copyright, fine. If you choose to allow others to re-use your work, great.”~{ Matthew Haughey, “Blogging in the Public Domain,” Creative Commons blog post, February 5, 2003, at http://creativecommons.org/weblog/entry/3601. }~ While many CC users were enthusiastically bashing copyright law, Lessig and the CC staff made it a point to defend the basic principles of copyright law — while extolling the value of collaborative creativity and sharing under CC licenses. +={Haughey, Matt} + +Despite praise by the heads of the Motion Picture Association of America and the Recording Industry Association of America, the licenses nonetheless did attract critics. Some in the music industry regarded the licenses as a Trojan horse that would dupe unsuspecting artists. David Israelite, president and CEO of the National Music Publishers’ Association, told /{Billboard}/, “My concern is that many who support Creative Commons also support a point of view that would take away people’s choices about what to do with their own property.”~{ Susan Butler, “Movement to Share Creative Works Raises Concerns in Music Circles,” /{Billboard}/, May 28, 2005.}~ /{Billboard}/ went on to cite the cautionary tale of a songwriter who was being kept alive by his AIDS medications, thanks to the royalties from a highly successful song. “No one should let artists give up their rights,” said Andy Fraser of the rock group Free. Other critics, such as John Dvorak of /{PC Magazine}/, called the CC licenses “humbug” and accused them of adding “some artificial paperwork and complexity to the mechanism [of copyright],” while weakening the rights that an author would otherwise enjoy.~{ John C. Dvorak, “Creative Commons Humbug: This Scheme Doesn’t Seem to Benefit the Public,” PC Magazine, July 28, 2005. }~ Still others had cultural scores to settle and criticized “anything advocated by clever, sleek young lawyers.”~{ Researchers at the Economic Observatory of the University of Openness, “Commercial Commons,” on the online journal /{Metamute}/, at http://www .metamute.org/?q=en/Commercial-Commons. }~ +={Creative Commons (CC) licenses:critics of;sraelite, David;Recording Industry Association of America (RIAA);Dvorak, John;Fraser, Andy} + +Putting aside such quibbles and prejudices, the CC licenses seemed a benign enough idea. Given its reliance on copyright law, how could any entertainment lawyer object? Yet the real significance of the licenses was only appreciated by those who realized that a Great Value Shift was kicking in. For them, the licenses were a useful legal tool and cultural flag for building a new sharing economy. +={Great Value Shift} + +2~ The Great Value Shift +={Great Value Shift+9;Inernet:Great Value Shift, and+9} + +In retrospect, the CC licenses could not have been launched at a more propitious moment. Networked culture was exploding in 2003. Broadband was rapidly supplanting dial-up Internet access, enabling users to navigate the Web and share information at much faster speeds. Prices for personal computers were dropping even as computing speeds and memory capacity were soaring. Sophisticated new software applications were enabling users to collaborate in more powerful, user-friendly ways. The infrastructure for sharing was reaching a flashpoint. + +Put another way, the original promise of the Internet as a gift economy was coming into its own. Originally built as a platform for efficient sharing among academic researchers, the Internet by 2003 was being used by some 600 million people worldwide.~{ Nielsen/Net Ratings estimated 585 million Internet users in 2002; the International Telecommunications Union estimated 665 million. See http://www2 .sims.berkeley.edu/research/proiects/how-much-info-2003/internet.htm. }~ The open framework for sharing was no longer just a plaything of technophiles and academics; it was now insinuated into most significant corners of the economy and social life. As it scaled and grew new muscles and limbs, the Internet began to radically change the ways in which wealth is generated and allocated. +={Internet:gift economy of+1} + +I call this the Great Value Shift — a deep structural change in how valuable things are created for commerce and culture. The shift is not only a fundamental shift in business strategy and organizational behavior, but in the very definition of wealth. On the Internet, wealth is not just financial wealth, nor is it necessarily privately held. Wealth generated through open platforms is often /{socially created value}/ that is shared, evolving, and nonmonetized. It hovers in the air, so to speak, accessible to everyone. +={Internet:socially created value of+1;value:creation of} + +Creative Commons had the good fortune to introduce its licenses just as the Great Value Shift was picking up momentum. The types of distributed innovation first seen in free software were now popping up in every imaginable corner of cyberspace. The social content was not just about listservs and newsgroups, but instant messaging networks, Web logs, podcasts, wikis, social networking sites, collaborative archives, online gaming communities, and much else. +={free software:Great Value Shift, and} + +“What we are seeing now,” wrote Yochai Benkler in his book, /{The Wealth of Networks}/, “is the emergence of more effective collective action practices that are decentralized but do not rely on either the price system or a managerial structure for coordination.” Benkler’s preferred term is “commons-based peer production.” By that, he means systems that are collaborative and non-proprietary, and based on “sharing resources and outputs among widely distributed, loosely connected individuals who cooperate with each other.”~{ Yochai Benkler, /{The Wealth of Networks: How Social Production Transforms Markets and Freedom}/ (New Haven, CT: Yale University Press, 2006), p. 60. }~ +={Benkler, Yochai:The Wealth of Networks+1;commoners:sharing by;commons-based peer production} + +Informal social relationships, working in the unregimented, free space of open platforms, were beginning to change economic production and culture. “Behaviors that were once on the periphery— social motivations, cooperation, friendship, decency — move to the very core of economic life,” Benkler argued.~{ Benkler at the iCommons Summit, Dubrovnik, Croatia, June 15, 2007. }~ Money and markets do not necessarily control the circulation of creativity; increasingly, online communities — large numbers of people interacting with one another on open platforms — are the engines that create value. + +The CC licenses were launched at a moment when the new modes of value creation were just gaining a foothold. + +We do not yet have well-accepted theoretical models for understanding this new “socioeconomic space”; the online environments are still so new, and much is still in flux.~{ An excellent overview of these new spaces is Don Tapscott and Anthony D. Williams, /{Wikinomics: How Mass Collaboration Changes Everything}/ (New York: Portfolio, 2006). }~ But it has not escaped the notice of major corporations that online social dynamics can result in some radically more effective models for organizing employees and engaging with customers. A /{BusinessWeek}/ cover story touted “The Power of Us” in June 2005, profiling the ways in which companies like Procter & Gamble use mass collaboration for R&D; Hewlett-Packard had created a virtual stock market among its staff to gather collective estimates that have improved sales forecasts.~{ Robert D. Hof, “The Power of Us: Mass Collaboration on the Internet Is Shaking Up Business,” /{BusinessWeek}/, June 20, 2005, pp. 73–82. }~ The /{Economist}/ has written about the “fortune of the commons” that can result when there are open technical standards, and business professors such as Henry Chesbrough have examined new “open business models.”~{ “The Fortune of the Commons,” Economist, May 8, 2003; Henry Chesbrough, /{Open Business Models: How to Thrive in the New Innovation Landscape}/ (Cambridge, MA: Harvard Business School Press, 2006). }~ +={Chesbrough, Henry;Hewlett-Packard;Procter & Gamble} + +Before looking at the many creative sectors that have adopted the CC licenses — the focus of chapter 6 — it helps to understand the Great Value Shift that open networks have catalyzed. In one market after another, open networks have helped new competitors slash all sorts of business costs while enhancing their capacity to innovate and respond to changing consumer demand. Open networks have also given rise to new types of social platforms on the Web, often known as Web 2.0, which are making it economically attractive to serve niche markets. This is the so-called Long Tail. Yet even these sweeping changes in market structure are facing a qualitatively different kind of competition — from the commons sector. It turns out that informal online communities based on trust, reciprocity, and shared social norms can perform a great many tasks more efficiently than markets, and with some measure of social pleasure and fun. +={Long Tail;Web 2.0:Great Value Shift, and} + +2~ The Endangered Economics of Centralized Media +={Centralized Media+7;media:See also Centralized Media} + +The dominant systems of communications in the twentieth century — radio, broadcast and cable television, recorded music, theatrical film — required large amounts of centralized capital, corporate management, and professional control. These media have very different business models and practices, but they all rely upon centralized control of capital and distribution to large, fairly undifferentiated audiences. Each depends upon efficiencies derived from high-volume sales and a limited spectrum of commercial choices. + +Centralized Media also dictate certain economic and social identities for people. There are “sellers,” who are the prime source of expertise, innovation, and production, and there are “consumers,” who passively buy, or don’t buy, what is offered. Sellers mostly determine what choices are offered to buyers, and they tend to have greater market power and information than consumers. Interactions between sellers and consumers are mostly brief and transactional; there is little ongoing conversation or relationship between seller and buyer. + +Much of the strength of the Centralized Media derives from its control of critical “choke points” of product development and distribution. By controlling the technical standards for a product, its retail distribution or its brand identity, a company can maximize its competitive advantages and limit competition. The high concentration of capital needed to communicate through a Centralized Media outlet is itself a useful way to limit competition. No surprise that only large, publicly traded corporations and rich individuals own and control Centralized Media — and that their messages tend to be overtly commercial or commercial-friendly. +={Centralized Media:choke points of|competition, and+4} + +While this paradigm is obviously quite attractive for those investors with a piece of the action, it also entails some very large costs that are not readily evident. Companies have to spend a lot on advertising to build a brand identity that can enhance sales. Their “blockbuster” business model entails large upfront costs in order to reap large financial returns. Centralized Media require expensive systems for finding, recruiting, and developing stars; an elaborate marketing apparatus to find and retain customers; and legal and technological means to identify and prosecute “piracy” of creative works. +={Centralized Media:piracy, and;piracy} + +In a more static environment, this model worked fairly well. But as the Internet revolution proceeded in the 2000s, distributed media started to undercut the economic logic of Centralized Media. Your personal computer, connected to other computers via inexpensive telecommunications and software, can do things more cheaply. Distributed online media not only avoid the costly overhead needed by Centralized Media, they can generate dynamic, interactive, and sociable types of communication: /{user-generated content!}/ While this amateur content is wildly variable in quality, it does have this virtue: it is more culturally diverse and authentic than the homogenous, overproduced programming of Centralized Media. And because distributed media are not economically driven to amass large, undifferentiated audiences, the content can be more idiosyncratic, passionate, and, in its own ways, creative. There is no “fifty-seven channels and nothing on” problem. The problem is how to locate what you want from among millions of choices. +={Centralized Media:Internet vs.;Internet:Centralized Media vs.} + +For all these reasons — but mostly because of the economics— conventional media are becoming more vulnerable to the most advanced Internet-based competitors (Amazon, eBay, Google, Yahoo) as well as to new types of nonmarket social production (e.g., Craigslist, Wikipedia, special-interest affinity groups). We may even be approaching a point at which the historic cost structures and risk management strategies of major media companies are no longer sustainable. Some analysts fret about the long-term viability of American newspapers, whose stock value fell by 42 percent, or $23 billion, between 2005 and 2008. Broadcast and cable television have similar fears. They worry, correctly, that Internet venues are siphoning away “eyeballs” by providing more timely and convenient alternatives. While the amateur videos of YouTube may not have the production quality of NBC, broadcast and cable television cannot ignore an upstart platform that in 2006 was attracting more than 100 million video downloads /{per day}/ and had a market valuation of $1.65 billion when bought by Google that year. No wonder Cable News Network co-hosted a presidential debate with YouTube in 2007; it needed to reassert its cultural relevance. +={Amazon;eBay;Google;Yahoo;Craigslist;Wikipedia;YouTube} + +Large media companies are struggling to support some huge financial, administrative, and marketing burdens simply to “tread water” and retain some measure of their customary market dominance. This helps explain why Centralized Media are so keenly focused on influencing Congress and the Federal Communications Commission. They want to lock in competitive advantages through regulation. (Consider the fierce battles over media ownership rules, spectrum allocation policies, anticopying technology mandates such as the “broadcast flag,” new copyright and trademark protections, must-carry rules for cable operators, and on and on.) Centralized Media’s great interest in securing legal and regulatory privileges for themselves suggests their relative weakness and decline. For them, it is easier to chase market advantages through political interventions than through innovation, superior performance, and price. +={Centralized Media:ownership rules for;media:ownership of+3} + +2~ The Economic Advantages of Open Media +={Centralized Media:Internet vs.+7;Internet:Centralized Media vs.+7;media:open+7} + +By contrast, a profusion of new ventures are finding that a company can thrive on the open networks of the Internet. Even a startup without brand recognition or regulatory preferences can compete on the merits — price, quality, responsiveness — against entrenched giants. They can leverage user-generated content and the vast reservoir of value previously known as the public domain. The success of thousands of new Internet businesses reflects an epochal shift in the terms of competition — a Great Shift in how value is created. +={Great Value Shift} + +The most significant shifts in the history of capitalism have come when new mechanisms lower the costs of managing risk and serving latent market demand. We are apparently in such a stage of economic transformation today. The genius of the Renaissance banks and the Dutch insurance and shipping companies, for example, was to reinvent the structure of markets through new financial and legal instruments that enabled commercial trust and transparency to work on a larger scale. The limited liability corporation was also a powerful innovation for diversifying risk, coordinating people, and deploying capital on a scale that was previously impossible.~{ I am indebted to my friend John Clippinger for this insight, as explained in his book /{A Crowd of One: The Future of Individual Identity}/ (New York: Public Affairs, 2007), chapter 7, “Transforming Trust: Social Commerce in Renaissance Florence,” pp. 97–114. }~ +={markets:restructuring+1} + +In like fashion, the Internet is now facilitating some deep shifts in the cost structures and scale of markets. Innovative online business models are significantly undercutting the (expensive) cost structures of traditional Centralized Media, and in the process sometimes creating entirely new sorts of markets (search engine advertising, discounted travel, specialty niches) and more open, competitive markets. +={Internet:socially created value of+3} + +One of the most intriguing developments is a set of “open business models” that shun closed, proprietary technical standards and content restrictions. Unlike the classic industrial business models of the twentieth century, the new open business models make money by aggressively insinuating themselves into open networks. They are able to identify new trends, mobilize talent, interact with customers, and develop customized products more rapidly than competitors. They are also building ingenious new business models “on top of ” social behaviors of online users. (See chapter 10.) +={open business models} + +MySpace, for example, hosts a social network of more than 100 million “friends” (a claim that, even if inflated by inactive user accounts, is indisputably huge). eBay consolidated the world’s garage sales and flea markets into a more efficient market by developing Web-based software that “manages” social trust and reputation and evolves with user interests. Amazon has become a premier online retail Web site by hosting a platform open to all sorts of online vendors and spurred by the recommendations and collective purchase records of buyers. Google devised its famous PageRank search algorithms to aggregate the Web-surfing “wisdom of the crowd,” making online searches vastly more useful. +={Amazon:eBay;Google;MySpace} + +The basic point is that open media platforms are significantly reducing business coordination and communication costs by leveraging people’s natural social behaviors in ways that conventional businesses simply cannot. Open Web platforms allow large and diverse groups to organize themselves and their projects more easily. Individuals have greater self-defined choice and the capacity to express their own market demand; they need not be constrained by the choices presented to them in the market. The Internet has opened up gushing channels of virtual word of mouth, which is a more trusted form of consumer information than advertising. Those companies with excellent products use favorable word of mouth to reduce their marketing and distribution costs. “Smart mobs” can elevate obscure bloggers and Web sites because they regard them as more trustworthy, expert, and authentic (or entertaining) than those of Centralized Media. Many conservatives now trust the Drudge Report and Free Republic more than CBS News, just as many liberals trust DailyKos and Huffington Post more than CBS News. Indeed, the very genre of “objective journalism” — an artifact of the economic necessity of appealing to broad, lowest-commondenominator audiences — is now in jeopardy. +={Internet:virtual word of mouth on;media:participatory;Centralized Media:competition, and+3;markets:restructuring+1} + +As people migrate to the Web, advertising revenues for Centralized Media are eroding further, setting off a scramble to devise new advertising vehicles to reach fugitive Internet users. It is a chase that cannot be avoided because that’s where the eyeballs are. Moreover, the value proposition of open networks is too attractive to ignore. But because that value proposition is so radically different from conventional media — a company must revamp its organizational structures, strategies, marketing, etc. —it raises some wrenching choices for Centralized Media: Should they “go native” and let their products loose on open networks? Or would that destroy their entrenched business models for television shows, theatrical films, music CDs, and other content? The vast infrastructure and business practices of Centralized Media cannot be summarily abandoned, but neither can they remain economically tenable over the long haul without significant changes. For now, Centralized Media are attempting an ungainly straddle of both worlds. +={Centralized Media:alternatives vs.} + +2~ Web 2.0: A New Breed of Participatory Media +={media:participatory+17;Centralized Media:alternatives vs.+17;Web 2.0:applications+17} + +At the time, Eric Eldred’s Web repository of public-domain books could be seen as a modest little experiment. In retrospect, it can be seen as a dawning cultural archetype. It betokened the power of the amateur.~{ Dan Hunter and F. Gregory Lastowka, “Amateur-to-Amateur,” /{William and Mary Law Review}/ 46, no. 951 (December 2004). }~ While Centralized Media continue to have greater resources, talent, and political clout, amateurs are finding their voices and new online venues. A significant cultural emancipation is under way. Creative expression need no longer cater to corporate gatekeepers and the imperatives of the mass market. A no-name amateur can produce useful and influential work without having to go through New York, Los Angeles, London, or Tokyo. The do-ityourself culture is flourishing and expanding. With little money or marketing, anyone can launch a viral spiral that, with enough luck and panache, can sweep across global culture. +={Eldritch Press} + +It is only now dawning on some media chieftains that the biggest threat to Centralized Media is not piracy or online competitors, but /{nonmarket alternatives}/: you, me, and the online friends that we can attract. Hollywood and record labels might rail against “pirates” and demand stronger copyright protection, but the real longterm threat to their business models is the migration of consumer attention to amateur creativity and social communication. Social production on open networks has become a powerful creative and economic force in its own right. Ordinary people can now find their own voices and develop folk cultures of their own that may or may not use the market. +={piracy;Centralized Media:piracy, and} + +After the tech bubble of 2000–2001 burst, the surviving techies and entrepreneurs developed a remarkable range of cheap, versatile software that took to heart the lessons of free software and open networks. Blogs, wikis, social networking software, peer-to-peer file-sharing and metadata tools began to migrate from the tech fringe to the mainstream. There have been many conceptual frames and buzzwords associated with this new order — “smart mobs” (Howard Rheingold), “the wisdom of crowds” (James Surowiecki), “wikinomics” (Don Tapscott and Anthony D. Williams) — but the catchphrase that has gained the most currency is “Web 2.0,” a term launched by Tim O’Reilly in a canonical 2003 essay.~{ Tim O’Reilly, “What Is Web 2.0: Design Patterns and Business Models for the Next Generation of Software,” O’Reilly Media Web site, September 30, 2005, at http://www.oreilly.com/pub/a/oreilly/tim/news/2005/09/30/what-isweb-20.html. }~ +={O’Reilly, Tim;Rheingold, Howard;Surowiecki, James;Tapscott, Don;Williams, Anthony D.;free software:economic effects of;open source software:economic implications of} + +O’Reilly, a prominent publisher of books on open-source software, coined Web 2.0 to describe the fluid social dynamics that occur on open Web platforms — wikis, blogs, social networking Web sites, and other open, collaborative platforms — where people have the freedom to share and reuse work. Web 2.0 amounts to a worldview that celebrates open participation as a way to create valuable collective resources. It regards open technical protocols and content as the basis for this process (whether managed as a commons or a business), and dismisses closed, proprietary regimes as both socially and economically questionable. In essence, Web 2.0 honors socially created value as the basis for value creation, which market players may or may not be able to exploit. +={Web 2.0:coining of term} + +Blogging is more of a social medium than is generally supposed, for example. It is not just the outburst of some ranter in his pajamas, as the stereotype has it, but a social medium that connects people in new ways. Most blogs have a blogroll — a list of admired blogs— which enables the readers of one blog to identify other bloggers engaged in similar conversations. Permalinks — stable Web addresses for blog content — enable people to make reliable Web citations of content, which means that people can coalesce around a shared body of work. And RSS feeds— “Really Simple Syndication” — allow people to “subscribe” to individual blogs and Web sites, enabling them to keep abreast of a sprawling set of communities. +={blogging+1;permalinks;RSS feeds} + +The rise of blog-tracking companies like Technorati and Alexa has also helped blogging become a durable social genre. These companies inventory and rank blogs, and help people discover blogs for virtually any subject of interest — cocktail mixing, high-energy physics, needlework design. By 2007, there were an estimated 100 million blogs in existence (although many were inactive or abandoned), making the blogosphere a powerful cultural force in its own right. There was also a flood of online “news aggregators” — Web sites that cherry-pick their own mix of pieces from the wire services, newspapers, Web sites, blogs, and other online sources. With huge audiences, news aggregators like the Drudge Report (1.6 million unique monthly visitors) and the Huffington Post (773,000 visitors) have begun to rival major daily newspapers in reach and influence. + +Another seminal social innovation has been Wikipedia, a strange and wondrous cultural eruption. Founded by Jimmy Wales and Larry Sanger in January 2001, the English-language Wikipedia began to gain serious momentum in the months after the CC licenses were released, and by early 2003 hosted 100,000 articles. (A “wiki” is a special type of Web site that allows anyone who accesses it to add or modify its contents.) After two years, Wikipedia had amassed a collection of 400,000 articles and inspired the launch of affiliated Wikipedias in more than 100 languages. In May 2008, +={Sanger, Larry;Wales, Jimmy;Wikipedia:social movement, as+2} + +Wikipedia featured 10.2 million articles in 255 languages; 2.3 million of the articles were in English. By harnessing the energies of tens of thousands of volunteers to write an infinitely expandable “encyclopedia,” Wikipedia has become the leading symbol for a radically new way of compiling and editing knowledge.~{ Wikipedia statistics from http://en.wikipedia.org/wiki/Wikipedia:About. }~ Remarkably, the Wikimedia Foundation, the umbrella organization that funds Wikipedia and many sister projects, had fewer than twenty paid employees in 2008 and a budget of less than $2 million. + +Wikipedia has also spun off affiliated multilingual, free-content wikis on various subjects. Wikispecies is compiling an inventory of the world’s species, Wikiquote is collecting thousands of memorable quotations, the Wikimedia Commons is providing freely usable media files, and Wikibooks is assembling open-content textbooks. Wiki software has been adopted by dozens of different online communities, giving rise to scores of collaborative Web sites such as Conservapedia (for American political conservatives), Intellipedia (for U.S. intelligence agencies), Wookieepedia (for Star Wars fans), Wikitravel (for travelers), and OpenWetWare (for biological researchers). + +In the months following the launch of the CC licenses, peer-topeer (P2P) file sharing was also expanding rapidly. Long associated with illicit sharing of copyrighted music, P2P software in fact has many entirely legitimate uses in science, education, and diverse creative sectors. One of the key attractions of P2P software is its efficiency. It does not need to route information through centralized servers; information can be rapidly shared by routing digital files directly to participants, computer to computer, or by passing it through key nodes in an on-the-fly manner. Even after the courts shut down Napster in 2002, a variety of other P2P software applications — Grokster, Lime Wire, KaZaA, Gnutella, BitTorrent — continued to facilitate online sharing and collaboration. Some thirty-five companies, including Hollywood studios, are sufficiently impressed with the efficiencies of P2P that they have licensed BitTorrent technology to distribute their video content. +={Napster;software:P2P} + +Peer-to-peer file sharing has also unleashed radically new types of knowledge creation: volunteers who join the NASA Clickworkers project to count and classify craters on Mars, “citizen scientists” who help compile an interactive database of butterfly and bird sightings, or geneticists from around the world who submit data to the Human Genome Project and share access to the database. + +Although the tech world and some Internet users had known about various networking tools for years, the general public was largely in the dark until the presidential campaign of Vermont governor Howard Dean in 2002 and 2003. At the time, Dean was considered a long-shot antiwar candidate with little base and little money. Within a few short months, however, thanks to Dean’s outspoken style and his campaign’s skillful use of the Internet, he became the front-runner in a field of twelve candidates. Dean did not use the Internet as a simple publishing tool, but as a way to stimulate decentralized collaboration and thereby organize a diverse community of supporters. The campaign was not just about Dean, but about the participation of 640,000 volunteers who virtually organized themselves through various online tools. The campaign became a dynamic conversation between the candidate and voters — and generated a gusher of more than $50 million, most of it donations of a hundred dollars or less. So much was raised that Dean famously asked his supporters whether he should forgo federal matching funds, and instead raise more money from them. They agreed. The campaign ultimately imploded, of course, after his famous “Dean’s Scream” speech — itself a complex story — but what is notable is how the Dean campaign vividly demonstrated the speed and power of viral networks. +={Dean, Howard;Interenet:political campaigns on} + +By 2003 many ordinary people knew about the Napster controversy, the record industry’s scorched-earth litigation tactics against consumers, and the Supreme Court’s ruling in the /{Eldred}/ case. So people welcomed blogs, wikis, and other Web 2.0 applications as tools to emancipate themselves culturally. In the mass media era, people had few tools or sufficient money to speak to the general public or organize their own communities of interest. But now, using a lightweight infrastructure of software code and telecommunications, people could build stable online communities that reflected their own values and social practices. No permission or payment necessary. No expensive capital investments. +={Napster;Eldred v. Reno/Eldred v. Ashcroft;Internet:socially created value of+4;communities:online+4} + +In many instances, amazingly, virtual communities are performing tasks that existing markets are not performing as efficiently or with as much social trust and goodwill. Craigslist, the free want-ad service that has significantly undercut classified advertising in newspapers, is one of the more stellar examples. In South Korea, OhmyNews.org uses thirty-six thousand citizen-journalists to write up to two hundred online stories a day. The publication is considered the sixth-most influential media outlet in Korea, based on a national magazine poll. Countless specialty blogs are considered more expert and timely sources of information and analysis than mainstream newspapers and magazines. +={OhmyNews.org;Craigslist;blogging} + +Taken together, the new participatory media platforms constitute something new under the sun — a globally accessible space that is both personal and public, individual and social. The riot of unfiltered expression that has materialized on the Internet is often dismissed as stupid, unreliable, and silly; or praised as brilliant, stylish, and specialized; or simply accepted as idiosyncratic, irregular, and local. It is all of these things, of course, and that is precisely the point. +={Internet:mass participation in+1} + +If print culture honors the ethic of “edit, then publish,” the Internet inverts it: /{anything}/ can be made public . . . and then it is up to users to become their own editors. On the Internet, people do not “consume” content, they become active writers, editors, and critics in their own right. They use search engines, news aggregators, and favorite bloggers to identify what they want — or they create their own content, as desired. They are /{participants}/, not merely informed consumers who choose what some professional editor offers to them. +={blogging} + +The Web 2.0 environment was quite hospitable for the spread of the CC licenses. It enabled people to signal their willingness to share and their enthusiasm for cool niche fare as opposed to massaudience kitsch.Members of online communities could confidently share their work on wikis and collaborative Web sites, knowing that no one could appropriate their content and take it private. Socially, the licenses let people announce their social identity to others and build a countercultural ethos of sharing. The ethos became hipper and more attractive with every new antipiracy measure that Centralized Media instigated. +={Web 2.0:CC licenses, and;Creative Commons (CC) licenses:Web 2.0 environment, and} + +% Creative Commons (CC) licenses not separated from Creative Commons (CC) in index, try fix above this point + +2~ Open Networks and the Long Tail +={open networks+6} + +While technology and economics have been driving forces in shaping the new participatory platforms, much of their appeal has been frankly cultural. Amateur content on the Net may be raw and irregular, but it also tends to be more interesting and authentic than the highly produced, homogenized fare of commercial media. Some of it vastly outshines the lowest common denominator of mass media. Again, the cheap connectivity of the Internet has been key. It has made it possible for people with incredibly specialized interests to find one another and organize themselves into niche communities. For closeted homosexuals in repressive countries or isolated fans of the actor Wallace Beery, the Internet has enabled them to find one another and mutually feed their narrow interests. You name it, there are sites for it: the fans of obscure musicians, the collectors of beer cans, Iranian exiles, kite flyers. Freed of the economic imperative of attracting huge audiences with broad fare, niche-driven Internet content is able to connect with people’s personal passions and interests: a powerful foundation not just for social communities, but for durable markets. +={Internet:communication system, as+1} + +This, truly, is one of the more profound effects of networking technologies: the subversion of the “blockbuster” economics of the mass media. It is becoming harder and more expensive for film studios and broadcast networks to amass the huge, cross-demographic audiences that they once could. In the networked environment, it turns out that a diversified set of niche markets can be eminently profitable with lower-volume sales. While Centralized Media require a supply-side “push” of content, the Internet enables a demand-side “pull” of content by users. This radically reduces transaction costs and enhances the economic appeal of niche production. It is easier and cheaper for a company (or single creator) to “pull” niche audiences through word of mouth than it is to pay for expensive “push” advertising campaigns. Specialty interests and products that once were dismissed as too marginal or idiosyncratic to be profitable can now flourish in small but robust “pull markets.”~{ David Bollier, “When Push Comes to Pull: The New Economy and Culture of Networking Technology” (Washington, DC: Aspen Institute, 2006), at http://www.aspeninstitute.org/atf/cf/%7BDEB6F227-659B-4EC8-8F84-8 DF23CA704F5%7D/2005InfoTechText.pdf. }~ +={Centralized Media:Internet vs.;Internet:Centralized Media vs.} + +The term associated with this phenomenon is the “Long Tail” — the title of a much-cited article by Chris Anderson in the October 2004 issue of /{Wired}/ magazine, later expanded into a book. Anderson explained the “grand transition” now under way: +={Anderson, Chris+2;Long Tail+3} + +_1 For too long we’ve been suffering the tyranny of lowestcommon-denominator fare, subjected to brain-dead summer blockbusters and manufactured pop. Why? Economics. Many of our assumptions about popular taste are actually artifacts of poor supply-and-demand matching — a market response to inefficient distribution. . . . Hit-driven economics is a creation of an age without enough room to carry everything for everybody. Not enough shelf space for all the CDs, DVDs, and games produced. Not enough screens to show all the available movies. . . .~{ Chris Anderson, “The Long Tail,” /{Wired}/, October 2004, at http://www.wired .com/wired/archive/12.10/tail.html. }~ + +The “Long Tail” refers to the huge potential markets that can be created for low-volume niche books, CD, DVDs, and other products. More than half of Amazon’s book sales, for example, come from books that rank below its top 130,000 titles. The implication is that “the market for books that are not even sold in the average bookstore is larger than the market for those that are,” writes Anderson. “In other words, the potential book market may be twice as big as it appears to be, if only we can get over the economics of scarcity.” +={Amazon} + +Unconstrained by the size and tastes of a local customer base or by limited shelf space, online retailers such as Amazon, Netflix (DVDs), Rhapsody (music), and iTunes (music) are showing that the Long Tail can be a very attractive business model. These companies have developed new tools, such as collaborative filtering software and user recommendations, to drive demand for lesser-known titles at the far end of the Long Tail. This is just another instance of using new technologies that leverage people’s natural social dynamics, and in so doing inventing new types of markets. +={Amazon} + + +2~ Another Vehicle for Niche Communities: The Commons +={commons:niche communities as+13;communities:commons, and+13} + +If the Long Tail is a market vehicle for amassing niche communities, the commons is the social analogue. A commons does not revolve around money and market exchange, but around collective participation and shared values. It does not use property rights and contracts in order to generate value; it uses gift exchange and moral commitments to build a community of trust and common purpose. Such communities, it turns out, can generate significant “wealth” — as Richard Stallman demonstrated with free software. +={Stallman, Richard:free software, and;free software:economic effects of} + +Generically speaking, a commons is a governance regime for managing collective resources sustainably and equitably. The commons is generally associated with open fields, forests, and other natural resources that were collectively used by villagers for their subsistence needs. During the “enclosure movement” in medieval times and extending through the eighteenth century, British gentry and entrepreneurs began to privatize the commons and convert its resources into marketable commodities. Enclosures essentially dispossessed the commoners and installed a new market regime to manage resources that were previously shared. The commoners, unable to feed themselves or participate in markets, migrated to the industrial cities of England to become the wage slaves and beggars who populate Charles Dickens’s novels. +={commons:enclosure of+2|tragedy of+2;enclosure movement+2} + +Although markets tend to be more efficient than commons, they also tend to focus on that which can be sold and converted into cash. Markets presume that deserts and the public domain have no value because they have no marketable output. Markets also presume that a commons cannot be sustained because inevitably someone will overuse a shared resource — a practice known as “free riding” —and ruin it. This is the famous “tragedy of the commons” notion popularized by biologist Garret Hardin in a 1968 essay, which described how a few farmers will let their sheep overgraze a common pasture and so destroy it. +={Hardin, Garret;free riding;markets:efficiency of} + +The “tragedy of the commons” metaphor has ossified into a truism of neoclassical economics. It takes for granted that shared resources cannot be managed sustainably, and that private property regimes are much better stewards of resources. This prejudice was powerfully rebutted by political scientist Elinor Ostrom in her noted 1990 book /{Governing the Commons}/, which marshaled many empirical examples of natural resource commons that have been managed responsibly for decades or even hundreds of years. Ostrom’s scholarship has since given rise to a great deal of academic study of commons, particularly through the International Association for the Study of the Commons and the Workshop in Political Theory and Policy Analysis at Indiana University. It also inspired thinking about the commons by law scholars like Yochai Benkler, Lawrence Lessig, and James Boyle, who saw close parallels with the commons as they watched corporations use copyright law to enclose culture and information. +={Benkler, Yochai;Boyle, James;Ostrom, Elinor, Governing the Commons;Lessig, Lawrence:law in contemporary context, and;commons:types of+2} + +Cultural commons differ significantly from natural resource commons in this key respect: they are not finite, depletable resources like pastures or forests. Online commons tend to grow in value as more people participate, provided there is sufficient governance and common technical standards to enable sharing. Online commons, in short, are less susceptible to the dreaded “tragedy of the commons” and, indeed, tend to be highly generative of value. Their output does not get “used up” the way natural resources do. +={commons:online} + +The burden of Lessig’s 2001 book /{The Future of Ideas}/ was to argue that the Internet constitutes a great, underappreciated commons. It can serve as the infrastructure for tremendous wealth and innovation if its “layers” — the hardware, software, and content— remain sufficiently open and usable by all. The problem, he warned with great prescience, is that policymakers are generally blind to the value of the commons and markets are too eager to reap short-term individual gains. They fail to appreciate that too much private control at any “layer” of the Internet — through proprietary hardware or software, or excessive copyright or patent protection — can stifle personal freedom, market competition, and innovation. Lessig wanted to name the book /{Dot.commons}/, but his publisher rejected it as too obscure. +={Lessig, Lawrence:The Future of Ideas} + +One of the key advantages of treating key infrastructure (such as Internet transmission protocols and computer operating systems) as a commons is that people have the freedom to modify and improve them, with resulting benefits for all. Innovation and competition can flourish more readily. At the content layer, much of the appeal of the commons is the creative freedom, above and beyond what the market may enable. Precisely because it is a commons, and not a market, people’s freedoms are not constrained by marketability. A commons is a noncommercial, nongovernmental space that is free from corporate manipulations and government meddling. It offers a qualitatively different type of experience than the marketplace or government power. A commons tends to be more informal, a place where people know you by name, and where your contributions are known and welcomed. A commons based on relationships of trust and reciprocity can undertake actions that a business organization requiring extreme control and predictable performance cannot. +={commons:concept of+1} + +Precisely because a commons is open and not organized to maximize profit, its members are often willing to experiment and innovate; new ideas can emerge from the periphery. Value is created through a process that honors individual self-selection for tasks, passionate engagement, serendipitous discovery, experimental creativity, and peer-based recognition of achievement. The Open Prosthetics Project, for example, invites anyone to contribute to the design of a prosthetic limb and/or the specification of limbs that ought to be designed, even if they don’t know how to do it.~{ http://www.openprosthetics.org. }~ This has generated such unexpected innovations as limbs specifically adapted for rock climbers and an arm designed for fishing. Athletes who engage in “extreme sports” — skiing, biking, surfing — have been a rich source of ideas for new products, just as software hackers are among the first to come up with innovative programming ideas. +={commoners:sef-selection of;commons:value proposition of+1;value:creation of+1} + +Part of the value proposition of the commons at the content layer is that it can host a more diverse range of expression — personal, social, and creative — than the market, in part because it does not have the burden of having to sustain costly overhead and sell a product. It has other goals — the personal interests and whims of the commoners — and it can often meet those needs inexpensively. Yet the commons does in fact generate many marketable innovations, thanks to its open accessibility, the social relationships it enables and the free sharing and circulation of work. + +Seeing the success of online commons, Centralized Media have tried to fight back by embracing elements of user participation. They invite audiences to vote in polls (/{American Idol}/), publish lists of “most e-mailed” articles (major newspapers), and direct radio listeners to their Web sites for more information (National Public Radio). /{Time}/ magazine’s choice for the “Person of the Year” in 2006 — “You,” the primary driver of Web sites like MySpace and YouTube — was a landmark moment in media history: with a pinched smile and backhanded assertion of its cultural authority, Centralized Media formally acknowledged its most powerful competitor, Decentralized Media! +={Centralized Media:competition, and+2;commons:online;Time (magazine)} + +Yet for all the celebration of “you” as the master of your own fate in cyberspace, the question that is skirted is whether “you” can indeed retain control of your stuff in a Centralized Media environment. The point of conventional business models, after all, is to engineer a proprietary lock-in of customers through technological dependence, binding contract terms, frequent-buyer credits, brand loyalty, etc. That’s how companies have traditionally secured a more durable customer base and preempted competition. + +But the commons is about securing user freedoms, and not necessarily about prevailing in a market. Web 2.0 may or may not protect both concerns. Like the commons, Web 2.0 relies upon user-generated content, network effects, and bottom-up innovation. But Web 2.0 entrepreneurs, at the end of the day, need to make money. Their sites need to adopt business practices that protect revenue streams. Facebook is catering to advertisers, not users, when they sift through masses of users’ personal data in order to sell targeted advertising. MySpace at one point refused to let its users connect to rival Web sites and outside software “widgets.”~{ Rachel Rosmarin, “Why MySpace Blinked,” /{Forbes}/, April 24, 2007. }~ In this sense, Web 2.0 media may be “open,” but they are not necessarily “free,” as in freedom. Web 2.0 entrepreneurs are more likely to focus on protecting their market advantages than advancing user freedoms. The two issues may overlap substantially, but they are not identical. +={commons:concept of;Facebook;MySpace;Web 2.0:purposes of+1} + +Science-fiction writer William Gibson once wrote, “The future is already here; it’s just not well-distributed yet.” That sums up the Great Value Shift circa 2003. The efficiencies and affordances made possible by the Internet were there. They were enabling all sorts of pioneers to build new business models, new creative genres, and new online communities — but these innovations were unevenly distributed. More to the point, their potential was unevenly perceived, especially in many precincts of Washington officialdom and the corporate world. The challenge for amateurs venturing onto open platforms was to validate the new sorts of socially created value enabled by the Internet. +={Gibson, William;Great Value Shift;Internet:Great Value Shift, and} + +1~ 6 CREATORS TAKE CHARGE + +/{Rip, remix, burn, mashup — legally. The CC licenses facilitate new Internet genres and business models.}/ + +The first users of CC licenses understood that something different was going on; a different order was taking shape. More than just a legal tool, the CC licenses gave the tech vanguard a way to express their inchoate sense that a new and better world was possible, at least on the Internet. They yearned for a noncommercial sharing economy with a different moral calculus than mass media markets, and for markets that are more open, accountable, and respectful of customers. +={Creative Commons (CC) licenses:first users of+9|books, for+19} + +The early adopters were unusually informed about the politics of technology, skeptical of Big Media, and passionate about the artistic freedoms and social responsibility. They were a locally engaged but globally aware network of tech sophisticates, avant-garde artists, clued-in bloggers, small-/{d}/ democratic activists, and the rebellious of spirit: the perfect core group for branding the Creative Commons and instigating a movement. + +It only made sense that Cory Doctorow — copyfighter, sciencefiction writer, tech analyst, co-editor of the popular Boing Boing blog — became the first book author to use a CC license. Doctorow — then a thirty-two-year-old native of Canada, the son of Trotskyite schoolteachers, the European representative for the Electronic Frontier Foundation from 2002 to 2006 — is a singular character on the tech/intellectual property/free culture circuit. He can hold forth with intelligence, wry wit, and bravado on digital rights management, Internet economics, or the goofy gadgets and pop culture artifacts that he regularly showcases on Boing Boing. + +In January 2003, a month after the CC licenses were released, Doctorow published his first novel, /{Down and Out in the Magic Kingdom}/, under an Attribution, NonCommercial, No Derivative Works license (BY-NC-ND). Simultaneously, his progressive-minded publisher, Tor Books of New York City, sold hard copies of the book. “Why am I doing this thing?” Doctorow asked rhetorically: +={Doctorow, Cory:Down and Out in the Magic Kingdom+5} + +_1 Well, it’s a long story, but to shorten it up: first-time novelists have a tough row to hoe. Our publishers don’t have a lot of promotional budget to throw at unknown factors like us. Mostly, we rise and fall based on word-of-mouth. I’m not bad at word-of-mouth. I have a blog, Boing Boing (http://boingboingnet), where I do a /{lot}/ of word-ofmouthing. I compulsively tell friends and strangers about things I like. And telling people about stuff is /{way, way}/ easier if I can just send it to ’em. Way easier.~{ Cory Doctorow, “A Note About This Book,” February 12, 2004, and “A Note About This Book,” January 9, 2003, in /{Down and Out in the Magic Kingdom}/, available at http://www.craphound.com/down. }~ + +A year later, Doctorow announced that his “grand experiment” was a success; in fact, he said, “my career is turning over like a goddamned locomotive engine.” More than thirty thousand people had downloaded the book within a day of its posting. He proceeded to release a collection of short stories and a second novel under a CC license. He also rereleased /{Down and Out in the Magic Kingdom}/ under a less restrictive CC license — an Attribution, NonCommercial, ShareAlike license (BY-NC-SA), which allows readers to make their own translations, radio and film adaptations, sequels, and other remixes of the novel, so long as they are made available on the same terms.~{ Anna Weinberg,“Buying the Cow, Though the Milk Is Free: Why Some Publishers are Digitizing Themselves,” June 24, 2005, /{Book Standard}/, June 24, 2005, available at http://www.thebookstandard.com/bookstandard/news/publisher/ article_display.jsp?vnu_content_id=1000968186.}~ + +With some sheepish candor, Doctorow conceded: “I wanted to see if the sky would fall: you see writers are routinely schooled by their peers that maximal copyright is the only thing that stands between us and penury, and so ingrained was this lesson in me that even though I had the intellectual intuition that a ‘some rights reserved’ regime would serve me well, I still couldn’t shake the atavistic fear that I was about to do something very foolish indeed.” + +By June 2006, /{Down and Out in the Magic Kingdom}/ had been downloaded more than seven hundred thousand times. It had gone through six printings, many foreign translations, and two competing online audio adaptations made by fans. “Most people who download the book don’t end up buying it,” Doctorow conceded, “but they wouldn’t have bought it in any event, so I haven’t lost any sales. I’ve just won an audience. A tiny minority of downloaders treats the free e-book as a substitute for the printed book — those are the lost sales. But a much larger minority treats the e-book as an enticement to buy the printed book. They’re gained sales. As long as gained sales outnumber lost sales, I’m ahead of the game. After all, distributing nearly a million copies of my book has cost me nothing.”~{ Cory Doctorow, “Giving it Away,” Forbes.com, December 1, 2006, available at http://www.forbes.com/2006/11/30/cory-doctorow-copyright-tech-media _cz_cd_books06_1201doctorow.html. }~ In 2008, Doctorow’s marketing strategy of giving away online books to stimulate sales of physical books paid off in an even bigger way. His novel for teenagers, /{Little Brother}/, about a youthful hacker who takes on the U.S. government after it becomes a police state, spent weeks on the /{New York Times}/ bestseller list for children’s books. + +It is perhaps easier for a sci-fi futurist like Doctorow than a publishing business to take such a wild leap into the unknown. But that, too, is an important insight: artists are more likely to lead the way into the sharing economy than entrenched industries. “I’d rather stake my future on a literature that people care about enough to steal,” said Doctorow, “than devote my life to a form that has no home in the dominant medium of the century.” Book lovers and authors will pioneer the future; corporate publishing will grudgingly follow, or be left behind. + +Over the past few years, a small but growing number of pioneering authors have followed Doctorow’s lead and published books under Creative Commons licenses. While the hard evidence is scarce, many authors who use CC licenses believe that releasing free electronic versions of their books does not hurt, and probably helps, the sales of physical copies of their books. Lessig released his 2004 book, /{Free Culture}/, under an Attribution, NonCommercial license (BY-NC), and scores of authors and established publishers have since released books under CC licenses. Among the more notable titles: Yochai Benkler’s /{The Wealth of Networks}/ (Yale University Press, 2006), Kembrew McLeod’s /{Freedom of Expression}/ (Doubleday, 2005), Peter Barnes’s /{Capitalism 3.0}/ (Berrett-Koehler, 2006), and Dan Gillmor’s /{We the Media}/ (O’Reilly Media, 2004). +={Barnes, Peter:Capitalism 3.0;Benkler, Yochai:The Wealth of Networks;Gillmor, Dan:We the Media;Lessig, Lawrence:Free Culture;McLeod, Kembrew:Freedom of Expression} + +In 2006, Paulo Coelho, author of a bestselling book, /{The Alchemist}/, created a “pirate” blog site that invited readers to use BitTorrent and other file-sharing networks to download free copies of his books. After he put the Russian translation of /{The Alchemist}/ online, sales of hardcover copies in Russia went from around 1,000 a year to 100,000, and then to more than 1 million. Coelho attributes the success of foreign translations of his book to their free availability online.~{ Smaran, “Alchemist Author Pirates His Own Book,” TorrentFreak blog, January 24, 2008, at http://torrentfreak.com/alchemist-author-pirates-own-books080124. }~ Experiments such as these were likely influential in the launch of LegalTorrents, a site for the legal peer-to-peer distribution of CC-licensed text, audio, video games, and other content. +={blogging;Coelho, Paulo} + +The CC licenses have been useful, not just for helping individual authors promote their books, but in fueling open-access scholarly publishing. As we will see in chapter 11, the CC licenses help scientists put their “royalty-free literature” on the Internet — a move that enlarges their readership, enhances their reputations, and still enables them to retain copyrights in their works. + +Free culture publishing models are popping up in many unusual quarters these days. LibriVox, to take one instance, is a nonprofit digital library of public-domain audio books that are read and recorded by volunteers.~{ Mia Garlick, “LibriVox,” Creative Commons blog, December 5, 2006, at http://creativecommons.org/text/librivox. }~ Since it started in 2005, the group has recorded more than 150 books by classic authors from Dostoyevsky and Descartes to Jane Austen and Abraham Lincoln. All of them are free. Most are in English but many are in German, Spanish, Chinese, and other languages. +={Lessig, Lawrence:Free Culture+1} + +Founder Hugh McGuire said the inspiration for LibriVox was a distributed recording of Lessig’s book /{Free Culture}/ read by bloggers and podcasters, chapter by chapter. “After listening to that, it took me a while to figure out how to record things on my computer (which I finally did, thanks to free software Audacity). Brewster Kahle’s call for ‘Universal Access to all human knowledge’ was another inspiration, and the free hosting provided by archive.org and ibiblio.org meant that LibriVox was possible: there was no worry about bandwidth and storage. So the project was started with an investment of $0, which continues to be our global budget.” LibriVox’s mission, said McGuire, is the “acoustical liberation of books in the public domain.” +={Kahle, Brewster;LibriVox;McGuire, Hugh;bloging} + +Several publishing businesses now revolve around CC licenses. Wikitravel is a collaborative Web site that amasses content about cities and regions around the world; content is licensed under the CC Attribution, ShareAlike license (BY-SA).~{ “Wikitravel Press launches,” Creative Commons blog, August 3, 2007, at http://creativecommons.org/weblog/entry/7596. See also Mia Garlick, “Wikitravel,” Creative Commons blog, June 20, 2006, at http://creativecom mons.org/text/wikitravel. }~ In 2007, its founder joined with a travel writer to start Wikitravel Press, which now publishes travel books in a number of languages. Like the Wikitravel Web pages, the text in the books can be freely copied and reused. +={Wikitravel Press} + +Another new business using CC licenses is Lulu, a technology company started by Robert Young, the founder of the Linux vendor Red Hat and benefactor for the Center for the Public Domain.Lulu lets individuals publish and distribute their own books, which can be printed on demand or downloaded. Lulu handles all the details of the publishing process but lets people control their content and rights. Hundreds of people have licensed their works under the CC ShareAlike license and Public Domain Dedication, and under the GNU Project’s Free Documentation License.~{ Mia Garlick, “Lulu,” Creative Commons blog, May 17, 2006, at http://creative commons.org/text/lulu. }~ +={Lulu;Red Hat;Young, Robert;Center for the Public Domain;GNU Project:GNU FDL;public domain:Center for Public Domain} + +As more of culture and commerce move to the Internet, the question facing the book industry now is whether the text of a book is more valuable as a physical object (a codex) or as a digital file (intangible bits that can circulate freely), or some combination of the two. Kevin Kelly, the former editor of /{Wired}/ magazine, once explained: “In a regime of superabundant free copies, copies lose value. They are no longer the basis of wealth. Now relationships, links, connection and sharing are. Value has shifted away from a copy toward the many ways to recall, annotate, personalize, edit, authenticate, display, mark, transfer and engage a work.”~{ Kevin Kelly, “Scan This Book!” /{New York Times Magazine}/, May 14, 2006, p. 43. }~ +={Kelly, Kevin+1} + +What this means in practice, Kelly has pointed out, is that books become more valuable as they become more broadly known and socially circulated — the very functionalities that the Internet facilitates. If people can discover a book online and read portions of it, share it with friends, and add annotations and links to related materials, it makes a book more desirable than a hard-copy version that is an inert text on a shelf. As Kelly writes: “When books are digitized, reading becomes a community activity. Bookmarks can be shared with fellow readers. Marginalia can be broadcast. Bibliographies swapped. You might get an alert that your friend Carl has annotated a favorite book of yours. A moment later, his links are yours.”~{ Ibid., p. 45. }~ + +Needless to say, most book publishers and authors’ organizations are not yet prepared to embrace this newfangled value proposition. It seems way too iffy. A “sharing” business model would seemingly cannibalize their current revenues and copyright control with little guarantee of doing better in an open, online milieu. The bigger problem may be the cultural prejudice that an absolute right of control over any possible uses of a book is the best way to make money. +={open business models} + +In general, the publishing trade remains skeptical of the Internet, clueless about how to harness its marketing power, and strangers to CC licenses. And it could be years before mainstream publishing accepts some of the counterintuitive notions that special-interest Internet communities will drive publishing in the future. In a presentation that caused a stir in the book industry, futurist Mike Shatzkin said in May 2007 that this is already happening in general trade publishing: “We’re close to a tipping point, or maybe we’re past it . . . where Web-based branding will have more credibility than print, because print, needing more horizontal reach to be viable, won’t deliver the attention of the real experts and megaphones in each field.”~{ Mike Shatzkin, “The End of General Trade Publishing Houses: Death or Rebirth in a Niche-by-Niche World,” presented to the Book Expo America, New York, May 31, 2007, available at http://www.idealog.com/speeches/ endoftrade.htm. }~ +={Shatzkin, Mike} + +2~ DIY Videos and Film +={Internet:videos and films on+12;videos and film+12;World Wide Web:videos and film on+12} + +One of the biggest cultural explosions of the past decade has been amateur video on the Web. The volume of online video has been so great that there are actually many distinct genres of amateur video: short videos on YouTube, video mashups, “machinima” (a combination of video and online gaming images), amateur pornography, and hybrid forms that combine user videos with conventional broadcast and cable television shows. Just as the Great Value Shift has empowered musicians, so it is giving video- and filmmakers new powers to express themselves as they wish, and reach huge audiences via the Internet. This power represents a potentially major threat to the cultural dominance of the television and film industries, as reflected in various schemes by the networks and studios to establish their own online presences. The threat of do-it-yourself (DIY) video and film is big enough that Viacom alleged that YouTube’s copyright infringements of Viacom-owned video should entitle Viacom to $1 billion in damages. The entertainment industry and the Writers Guild of America endured a long, bitter strike in 2007–2008 precisely because the projected revenues from Internet video are so large. +={Great Value Shift;Internet:Great Value Shift, and;YouTube+2;Viacom;Writers Guild of America} + +It is too early to know which new video styles will be flash-inthe-pan novelties and which will ripen into popular, and perhaps lucrative, genres. But rarely has a culture seen so many diverse experiments in amateur and indie video expression. One site, Justin.tv, is a free platform for broadcasting and viewing live video. Some people make round-the-clock “life casts” of their daily activities; others have used it to broadcast live from Baghdad, showing war-related events. Yahoo and Reuters have entered into a partnership to host amateur photojournalism by people using their digital cameras and camera phones. Machinima video, the product of the underground gaming community, blends filmmaking with online games to produce computer-generated imagery. As John Seely Brown describes it, “Basically, you can take Second Life or Worlds of Warcraft and have a set of avatars run all over the world, that come together and create their own movie, and then you can ‘YouTube’ the movie.”~{ Cited in David Bollier, /{The Rise of Collective Intelligence: Decentralized Cocreation of Value as a New Paradigm in Commerce and Culture}/ (Washington, DC: Aspen Institute Communications and Society Program, 2007), p. 27. }~ +={Brown, John Seely} + +As amateur video and film proliferate, thanks to inexpensive technologies and Internet access, the CC licenses have obvious value in letting the creator retain a copyright in the video while inviting its duplication and reuse by millions of people online. To industry traditionalists locked into binary options, the free circulation of a work precludes any moneymaking opportunities. But of course, that is precisely what is now being negotiated: how to devise ingenious new schemes to make money from freely circulating video. One option is to own the platform, as YouTube does. But there are also competitors such as Revver and blip.tv, which have established their own approaches based on advertising and commercial licensing of works. There are also schemes that use Internet exposure to drive paying customers into theaters and advertisers to buy commercial licenses. For some amateurs, DIY video is simply a way to get noticed and hired by a conventional media company. +={Creative Commons (CC) licenses:video and film, for+8} + +That’s what the Los Angeles–based comedy collective The Lonely Island did to promote themselves to national attention. They posted their comedy shorts and songs to their Web site using Creative Commons licenses. Soon other artists began making remixes of their songs. The remixes in effect served as free marketing, which caught the attention of the Fox Broadcasting Company, which in turn hired them to create a comedy pilot TV episode. In the end, Fox did not pick up the show, but as /{Wired News}/ recounted, “Instead of letting the show wither on a shelf somewhere, the group posted the full video both cut and uncut. The edgy, quirky short— Awesometown — spread like wildfire online and eventually landed all three performers an audition spot for /{Saturday Night Live}/.”~{ Matt Haughey, “From LA’s Awesometown to New York City’s SNL,” /{Wired News}/, October 1, 2005. }~ + +Perhaps the most successful example of leveraging free Internet exposure to reap commercial benefits is the sci-fi parody /{Star Wreck}/. Finnish producer Samuli Torssonen took seven years to shoot a fulllength movie using a Sony DVCAM, computer-generated graphics, and a makeshift studio. Some three hundred people were involved in the project, including some professional actors and many amateurs. When /{Star Wreck}/ was deliberately posted to the Internet in 2005, tagged with a CC-BY-NC-ND license (Attribution, NonCommercial, No Derivatives), it was eventually downloaded 5 million times and became the most-watched Finnish film in history. Fans in Russia, China, and Japan soon copied the film, which stimulated broader viewer demand and led to commercial deals to distribute the film. /{Star Wreck}/ became so popular that Universal Pictures, the American studio, signed a deal in 2006 to distribute DVD versions of the film. Torssonen says that the film has earned a 20to-1 return on investment. “I wouldn’t call free distribution stupid, as some people say, but a success,” he told an audience in 2007.~{ Samuli Torssonen presentation at iCommons Summit 2007, Dubrovnik, Croatia, June 15, 2007. See also www.starwreck.com. }~ +={Star Wreck Studios+1;Torssonen, Samuli} + +The lesson for Stephen Lee, CEO of Star Wreck Studios, is that “you don’t need millions to make a quality movie. You need an active, passionate community.” Lee says the plan for a peer-produced model of “wrecking a movie” is to develop an Internet collaboration, make the film popular through viral marketing, and then license it commercially. Star Wreck Studios is now developing a new movie, /{Iron Sky}/, about a Nazi base on the far side of the moon. +={Lee, Stephen} + +One of the more daring experiments in film production is being pioneered by the Blender Institute, a studio for open-content animation and game projects located in the Amsterdam docklands. Started in August 2007, the Institute employs fourteen full-time people who are obsessed with improving its three-dimensional open-source software, the so-called Blender 3D suite. The software is widely used by a large international user community for modeling, animation, rendering, editing, and other tasks associated with 3D computer-generated animation. +={Blender Institute+2} + +Ton Roosendaal, who directs the Blender Institute, is trying to demonstrate that a small studio can develop a virtuous cycle of economically sustainable creativity using open-source software, Creative Commons licenses, and talented programmers and artists from around the world. “We give programmers the freedom to do their best, and what they want to do is improve the technology,” he said. “The market is too hyper-rational and nailed down and filled with limits,” he argues, referring to his peers at major animation studios. “Open source is free of most of these constraints.”~{ Ton Roosendaal remarks at conference, “Economies of the Commons,” De Balie Centre for Culture and Politics, Amsterdam, April 10–12, 2008. }~ +={Roosendaal, Ton} + +In April 2008, the Blender Institute released a ten-minute animated short, /{Big Buck Bunny}/, which features a kind-hearted, fat white bunny who endures the abuse of three stone-throwing rodents until they smash a beautiful butterfly with a rock — at which point the bunny rallies to teach the bullies a lesson.~{ The film can be downloaded at http://www.bigbuckbunny.org/index.php/ download. }~ The film uses cutting-edge computer-generated animation techniques that rival anything produced by Pixar, the Hollywood studio responsible for /{Toy Story}/, /{Cars}/, and /{Ratatouille}/. /{Big Buck Bunny}/ is licensed under a CC Attribution license, which means the digital content can be used by anyone for any purpose so long as credit is given to the Blender Institute. +={Big Buck Bunny (animated short)+1} + +/{Big Buck Bunny}/ was initially distributed to upfront investors as a DVD set that includes extras such as interviews, outtakes, deleted scenes, and the entire database used in making the film. Then, to pique wider interest in sales of the DVD set, priced at thirty-four euros, a trailer was released on the Internet. This resulted in extensive international press coverage and blog exposure. Early signs are promising that Blender will be able to continue to make highquality animation on a fairly modest budget without worries about illegal downloads or a digital rights management system. The Blender production model also has the virtue of enabling access to top creative talent and cutting-edge animation technologies as well as efficient distribution to paying audiences on a global scale. + +While CC-licensed films are not common, neither are they rare. Davis Guggenheim, the filmmaker who directed /{An Inconvenient Truth}/, made a short film, /{Teach}/, to encourage talented people to become teachers. The film was released in 2006 under a CC BY-NCND license because Guggenheim wanted the film widely available to the public yet also wanted to preserve the integrity of the stories told, hence the NoDerivatives provision. A Spanish short film, /{Lo que tú Quieras Oír}/, became YouTube’s fifth most-viewed video— more than 38 million views. The film’s viral diffusion may have been helped by the CC BY-NC-SA (Attribution, NonCommercial, ShareAlike) license, which allows viewers not only to share the film, but to remix for noncommercial purposes so long as they use the same license. +={Guggenheim, Davis;YouTube} + +In Brazil, director Bruno Vianna released his first full-length film, /{Cafuné}/, under a CC BY-NC-SA license (Attribution, NonCommercial, ShareAlike) and put it on file-sharing networks at the same time that it was exhibited in a handful of theaters.~{ Mia Garlick, CC blog, at http://creativecommons.org/weblog/entry/6048; see also “Cafuné breaking the limits for open business models,” iCommons blog, at http://www.icommons.org/static/2006/11/22/cafune-breakingthe-limits-for-open-business-models. }~ Each release had different endings; downloaders were invited to remix the ending as they wished. The film was financed by the government’s culture ministry as part of a competition for low-budget films, but only about fifty Brazilian films are released to commercial theaters each year. Vianna saw the Internet release as a great way to build an audience for his debut film . . . which is exactly what happened. For some weeks, it made it into the list of twenty most-watched films in the country. +={Vianna, Bruno} + +2~ Letting the Music Flow +={music:CC licenses for+18|remixes+18;remix works+18;Creative Commons (CC) licenses:music, for+18} + +Media reform activist Harold Feld offers a succinct overview of why creativity in music — and therefore the business of selling recorded music — has suffered over the past two decades: +={Feld, Harold+2} + +_1 The 1990s saw a number of factors that allowed the major labels to push out independents and dominate the market with their own outrageously priced and poorly produced products: consolidation in the music industry, the whole “studio system” of pumping a few big stars to the exclusion of others, the consolidation in music outlets from mom-andpop record stores to chains like Tower Records and retail giants like Wal-Mart that exclude indies and push the recordings promoted by major labels, and the consolidation of radio — which further killed indie exposure and allowed the labels to artificially pump their selected “hits” through payola. All this created a cozy cartel that enjoyed monopoly profits. +={music:music industry+1} + +_1 As a result, the major labels, the mainstream retailers, and the radio broadcasters grew increasingly out of touch with what listeners actually wanted. But as long as the music cartel controlled what the vast majority of people got to hear, it didn’t matter . . . The music cartel remained the de facto only game in town.~{ Harold Feld, “CD Sales Dead? Not for Indies!” blog post on Public Knowledge Web site, March 27, 2007, at http://www.publicknowledge.org/node/ 890. }~ + +Changing the music industry is obviously a major challenge that is not going to be solved overnight. Still, there is a growing effort led by indie musicians, small record labels, Internet music entrepreneurs, and advocacy groups such as the Future of Music Coalition to address these problems. Creative Commons is clearly sympathetic, but has largely focused on a more modest agenda — enabling a new universe of shareable music to arise. Its chief tools for this mission, beyond the CC licenses, are new software platforms for legal music remixes, online commons that legally share music, and new business models that respect the interests of both fans and artists. Ultimately, it is hoped that a global oeuvre of shareable music will emerge. Once this body of music matures, attracting more artists and fans in a self-sustaining viral spiral, the record industry may be forced to give up its dreams of perfect control of how music may circulate and adopt fan-friendly business practices. +={Future of Music Coalition} + +This, at least, is the theory, as Lessig explains it. He calls it the “BMI strategy,” a reference to the strategy that broadcasters and musicians used to fight ASCAP’s monopoly control over radio music in the early 1940s. ASCAP, the American Society of Composers, Authors and Publishers, is a nonprofit organization that collects royalties for musical performances. At the time, ASCAP required artists to have five hits before it would serve as a collection agency for them, a rule that privileged the playing of pop music on the radio at the expense of rhythm and blues, jazz, hillbilly, and ethnic music. Then, over the course of eight years, ASCAP raised its rates by 450 percent between 1931 and 1939 — at which point, ASCAP then proposed /{doubling}/ its rates for 1940. In protest, many radio stations refused to play ASCAP-licensed music. They formed a new performance-rights body, BMI, or Broadcast Music, Inc., which sought to break the ASCAP monopoly by offering free arrangements of public-domain music to radio stations. They also charged lower rates than ASCAP for licensing music and offered better contracts for artists.~{ Donald Clarke, /{The Rise and Fall of Popular Music}/, chapter 11. }~ +={ASCAP+1;BMI (Broadcast Music, Inc.)+3;music:ASCAP+l;Lessig, Lawrence:CC licenses, and+2|music, and+2} + +“The Internet is today’s broadcasters,” said Lessig in a 2006 speech. “They are facing the same struggle.”~{ Lessig explained his BMI strategy at a speech, “On Free, and the Differences Between Culture and Code,” at the 23d Chaos Communications Conference (23C3) in Berlin, Germany, December 30, 2006; video can be watched at http://video.google.com/videoplay?docid=7661663613180520595&q= 23c3. }~ Just as ASCAP used its monopoly power to control what music could be heard and at what prices, he said, so today’s media corporations want to leverage their control over content to gain control of the business models and technologies of digital environments. When Google bought YouTube, one-third of the purchase price of $1.65 billion was allegedly a financial reserve to deal with any copyright litigation, said Lessig. This is how the incumbent media world is trying to stifle the emergence of free culture. +={Google;YouTube} + +The same questions that once confronted broadcasters are now facing Internet innovators, Lessig argues: “How do we free the future from the dead hand of the past? What do we do to make it so they can’t control how technology evolves?” With copyright terms lasting so long, it is not really feasible to try to use public-domain materials to compete with a commercial cartel. Lessig’s answer is a BMI-inspired solution that uses the CC licenses to create a new body of “free” works that, over time, can begin to compete with popular works. The legendary record producer Jerry Wexler recalled how ASCAP marginalized R & B, country, folk, and ethnic music, but “once the lid was lifted — which happened when BMI entered the picture — the vacuum was filled by all these archetypal musics. BMI turned out to be the mechanism that released all those primal American forms of music that fused and became rock-androll.”~{ From BMI, Inc., Web site, at http://www.bmi.com/genres/entry/533380. }~ Lessig clearly has similar ambitions for Creative Commons. +={Wexler, Jerry} + +For now, the subculture of CC-licensed music remains something of a fringe movement. It is easy to patronize it as small, amateurish, and quirky. Yet its very existence stands as a challenge to the music industry by showing the feasibility of a more artist- and fanfriendly way of distributing music. Is it visionary to believe that free culture artists will force the major labels to change — just as BMI forced ASCAP to lower prices — and make them more competitive and inclusive? +={ASCAP;music:ASCAP+1;music:music industry} + +Creative Commons’s primary task is practical — to help musicians reach audiences directly and reap more of the financial rewards of their music. So far, a wide range of indie bands, hip-hop artists, and bohemian experimentalists of all stripes have used the licenses. One of the most popular is the Attribution, NonCommercial license, which lets artists share their works while getting credit and retaining commercial rights. A number of marquee songwriters and performers — David Byrne, Gilberto Gil, the Beastie Boys, Chuck D — have also used CC licenses as a gesture of solidarity with free culture artists and as an enlightened marketing strategy. Inviting people to remix your songs is a great way to engage your fan base and sell more records. And tagging your music with a CC license, at least for now, wraps an artist in a mantle of tech sophistication and artistic integrity. +={Beastie Boys;Byrne, David;Chuck D;Gil, Gilberto} + +Guitarist Jake Shapiro was one of the first musicians to show the marketing potential of unleashing free music on the Internet. In 1995, Shapiro put MP3 files of music by his band, Two Ton Shoe, on the group’s Web site. Within a few years, Two Ton Shoe was one of the most-downloaded bands on the Internet, developing fan bases in Italy, Brazil, Russia, and South Korea. One day Shapiro received a phone call out of the blue from a South Korean concert promoter. He wanted to know if the band would fly over to Seoul to perform four concerts. It turned out that fans in South Korea, where fast broadband connections are the norm, had discovered Two Ton Shoe through file sharing. A local CD retailer kept getting requests for the band’s music, which led him to contact a concert promoter. In August 2005, Shapiro and his buddies arrived in Seoul as conquering rock stars, selling out all four of their concerts. “The kids who showed up knew all the words to the songs,” Shapiro recalled. A year later, the band signed a deal to distribute a double CD to East Asia.~{ Shapiro described his experiences at the “Identity Mashup Conference,” June 19–21, 2006, hosted by the Berkman Center for Internet and Society at Harvard Law School, at http://blogs.law.harvard.edu/mediaberkman/2006/ 06/28/id-mashup-2006-day-two-the-commons-open-apis-meshups-andmashups. His band’s Web site is at http://www.twotonshoe.com. }~ +={Shapiro, Jake;Two Ton Shoe} + +While such stories of viral marketing success are not common, neither are they rare. Lots of bands now promote themselves, and find admiring (paying) fans, by posting their music, for free, on Web sites and file-sharing sites. Perhaps the most scrutinized example was Radiohead’s decision to release its album /{In Rainbows}/ for free online, while inviting fans to pay whatever they wanted. (The band did not release any numbers, but considered the move a success. They later released the album through conventional distribution channels as well.)~{ Jon Pareles, “Pay What You Want for This Article,” /{New York Times}/, December 9, 2007. }~ +={Radiohead} + +Just as previous generations of fans came together around FM radio or live performance venues, the Internet is the new gathering place for discovering interesting, fresh, and authentic talent. The lesson that the record industry hasn’t quite learned is that music is not just a commodity but a /{social experience}/ — and social experiences lose their appeal if overly controlled and commercialized. If the music marketplace does not provide a place for fans to congregate and share in a somewhat open, unregimented way — if the commodity ethic overwhelms everything else — the music dies. Or more accurately, it migrates underground, outside the marketplace, to sustain itself. This is why so much of the best new music is happening on the fringes of the stagnant commercial mainstream. +={music:social experience, as+4} + +It is also why the Creative Commons licenses have acquired such cachet. They have come to be associated with musicians who honor the integrity of music making. They symbolize the collective nature of creativity and the importance of communing freely with one’s fans. Nimrod Lev, a prominent Israeli musician and supporter of the CC licenses, received considerable press coverage in his country for a speech that lamented the “cunning arrangement” (in Israeli slang, /{combina}/) by which the music industry has betrayed people’s love of music, making it “only a matter of business and commerce.” Said Lev: +={music:music industry+1;Lev, Nimrod+2} + +_1 The music industry treats its consumer as a consumer of sex, not of love, the love of music. Just like everything else: a vacuum without values or meaning. But it is still love that everyone wants and seeks. . . . The music vendors knew then [a generation ago] what they have forgotten today, namely that we must have cultural heroes: artists that are not cloned in a manner out to get our money. There was an added value with a meaning: someone who spoke to our hearts in difficult moments, and with that someone, we would walk hand in hand for a while. We had loyalty and love, and it all meant something.~{ Nimrod Lev, “The Combina Industry,” November 16, 2004, at http://law .haifa.ac.il/techlaw/new/try/eng/nimrod.htm. }~ + +At the risk of sounding naïve, Lev said he wanted to stand up for the importance of “authenticity and empathy and my own truth” in making music. It is a complaint that echoes throughout the artistic community globally. A few years ago, Patti Smith, the punk rocker renowned for her artistic integrity, decried the “loss of our cultural voice” as the radio industry consolidated and as music television became a dominant force. She grieved for the scarcity of places for her to “feel connected” to a larger musical community of artists and fans.~{ Patti Smith at a panel at the National Conference for Media Reform, St. Louis, sponsored by Free Press, May 14, 2005. }~ +={Smith, Patti} + +The classic example of music as social experience — music as a vehicle for a community of shared values — is the Grateful Dead. The band famously invited its fans to record all of its concerts and even provided them with an authorized “tapers’ section” in which to place their microphones and equipment. Fans were also allowed to circulate their homemade tapes so long as the music was shared, and not sold. This had the effect of building a large and committed fan base, which avidly archived, edited, and traded Grateful Dead cassettes. One reason that the Dead’s “customer base” has been so lucrative and durable over several decades is that the fans were not treated as mere customers or potential pirates, but as a community of shared values. The music belonged to the fans as much as to the band, even though Deadheads were only too happy to pay to attend concerts and buy the officially released CDs and t-shirts.~{ A fascinating collision of the Grateful Dead’s sharing ethic and the copyright business model occurred in 2005, when the Internet Archive placed a huge cache of fan recordings online, available for free download. When Grateful Dead Merchandising objected, Deadheads accused the band’s representatives of betraying the band’s long-established sharing ethic. Paradoxically, the band’s merchandisers may also have jeopardized the band’s commercial appeal by prohibiting the downloads. As music critic Jon Pareles put it, “The Dead had created an anarchy of trust, going not by statute but by instinct and turning fans into co-conspirators, spreading their music and buying tickets, T-shirts and official CDs to show their loyalty. The new approach . . . removes what could crassly be called brand value from the Dead’s legacy by reducing them to one more band with products to sell. Will the logic of copyright law be more profitable, in the end, than the logic of sharing? That’s the Dead’s latest improvisational experiment.” Jon Pareles, “The Dead’s Gamble: Free Music for Sale,” /{New York Times}/, December 3, 2005. }~ +={Grateful Dead+1} + +While the Grateful Dead may be an outlier case, it exemplifies the sharing ethic that the Internet is facilitating: the formation of communities of amateurs that flourish by sharing and celebrating music. Artists can make some money through CD sales, but much more through performances, merchandising, endorsements, and sales to films, television, and advertisers. If established singers and bands are reluctant to make a transition to this new business model, hungry newcomers are not. + +The Mountain Goats, an indie rock group, authorized the Internet Archive to host their live shows on the Web because they realized the videos seed market demand for their music. The group’s front man, John Darnielle, said, “I am totally in favor of tape trading, and file sharing never did anything wrong by me. People got into The Mountain Goats after downloading my stuff.”~{ Creative Commons blog, “Musicians Large and Small on Internet Downloading,” by Matt Haughey, July 26, 2004. }~ In 2001, two newcomers working out of a basement produced a cover version of Tears for Fears’ “Mad World,” which two years later went to the top of the British pop charts.~{ http://news.bbc.co.uk/l/hi/entertainment/3352667.stm. }~ In a world where amateur creativity can easily migrate to the commercial mainstream, tagging works with a NonCommercial CC license is a valuable option. By requiring uses that fall outside the scope of the license to pay as usual, it can help artists get visibility while retaining their potential to earn money. A larger restructuring of the music industry, alas, will take longer to achieve. +={Darnielle, John;Mountain Goats} + +2~ Music as Remix +={Creative Commons (CC) licenses:music, for+18;music:remixes+18;remix works+18} + +If any segment of the music world really understands the social dynamics of musical creativity, it is hip-hop artists. As Joanna Demers documents in her book about “transformative appropriation” in music, /{Steal This Music}/, hip-hop was born as a remix genre in the 1970s and 1980s.~{ Joanna Demers, /{Steal This Music: How Intellectual Property Law Affects Musical Creativity}/ (Athens: University of Georgia Press, 2006). }~ In defiance of copyright law, which considers unauthorized borrowing as presumptively illegal, hip-hop artists used turntable scratching and digital sampling to transform existing songs into something new, which in time grew into a lucrative market segment. Hip-hop illustrates how the commons and the market need to freely interact, without undue restrictions, in order for both to flourish. It works because sampling is not a simple matter of “theft” but a mode of creativity, a way of carrying on a cultural conversation. Sampling is a way of paying tribute to musical heroes, mocking rivals, alluding to an historical moment, or simply experimenting with an arresting sound. When the rap group Run-DMC used Aerosmith’s “Walk This Way” as the basis for a remix, it was not only a salute to the group’s musical influence and a new turn of the creative wheel, it revived Aerosmith’s sagging career (or, in economist’s terms, it “created new value”). +={Demers, Joanna:Steal This Music+1;music:hip-hop+1} + +The problem, of course, is that most remix culture (and the value it creates) is illegal. By the late 1980s, in fact, the freedom of the commons that gave birth to hip-hop was coming under siege. Musicians and record labels were routinely invoking copyright law to demand permission and payments for the tiniest samples of music. Only wealthy artists could afford to clear the rights of familiar songs, and basement amateurs (who had given rise to the genre in the first place) were being marginalized. When George Clinton’s group Funkadelic succeeded in its lawsuit against the rap group N.W.A. for using a nearly inaudible sample of a three-note, two-second clip from “Get Off Your Ass and Jam” — the infamous /{Bridgeport v. Dimension Films}/ decision, in 2004 — it became clear that the commons of hip-hop music was being enclosed.~{ This story is told by Demers in Steal This Music. The court ruling is /{Bridgeport v. Dimension Films}/, 383 F. 3d 390 (6th Circ. 2004). }~ Critics like Siva Vaidhyanathan and Kembrew McLeod believe that the legal crusade against sampling has significantly harmed the creative vitality of hip-hop. Something is clearly amiss when the one of the most critically acclaimed albums of 2005 — /{The Grey Album}/, a remix collection by DJ Danger Mouse — cannot be legally released. /{The Grey Album}/ artfully combined music from the Beatles’s /{White Album}/ with lyrics from Jay-Z’s /{Black Album}/, resulting in “the most popular album in rock history that virtually no one paid for,” according to /{Entertainment Weekly}/.~{ DJ Danger Mouse’s remix received considerable press attention. A good overview is by Chuck Klosterman, “The DJ Auteur,” /{New York Times Magazine}/, June 18, 2006, pp. 40–45. }~ +={Bridgeport v. Dimension Films;Clinton, George;Funkadelic;McLeod, Kembrew;Vaidhyanathan, Siva;DJ Danger Mouse} + +The impetus for a solution to the sampling problem started with Negativland, an irreverent “sound collage” band known as much for its zany culture jamming as for its anticopyright manifestos. (One of its CDs includes a polemical booklet about fair use along with a whoopee cushion with a © symbol printed on it.) Negativland gained notoriety in the 1990s for its protracted legal battle with the band U2 and Island Records over Negativland’s release of a parody song called “U2.” Island Records claimed it was an infringement of copyright and trademark law, among other things. Negativland claimed that no one should be able to own the letter U and the numeral 2, and cited the fair use doctrine as protecting its song and title. The case was eventually settled.~{ See Negativland’s book, /{Fair Use: The Story of the Letter U and the Numeral 2}/ (Concord, CA: Seeland, 1995). }~ +={Negativland+1} + +As an experienced sampler of music, Negativland and collagist People Like Us (aka Vicki Bennett) asked Creative Commons if it would develop and offer a music sampling license. Don Joyce of Negativland explained: +={Joyce, Don} + +_1 This would be legally acknowledging the now obvious state of modern audio/visual creativity in which quoting, sampling, direct referencing, copying and collaging have become a major part of modern inspiration. [A sampling option would] stop legally suppressing it and start culturally encouraging it — because it’s here to stay. That’s our idea for encouraging a more democratic media for all of us, from corporations to the individual.~{ Glenn Otis Brown, “Mmm . . . Free Samples (Innovation la),” Creative Commons blog, March 11, 2003, at http://creativecommons.org/weblog/entry/ 3631. }~ + +With legal help from Cooley Godward Kronish and Wilson, Sonsini, Goodrich & Rosati, Creative Commons did just that. During its consultations with the remix community, Creative Commons learned that Gilberto Gil, the renowned /{tropicalismo}/ musician and at the time the Brazilian minister of culture, had been thinking along similar lines, and so it received valuable suggestions and support from him. +={Cooley Godward Kronish;Wilson, Sonsini, Goodrich & Rosati;Gil, Gilberto} + +In 2005, Creative Commons issued the Sampling license as a way to let people take pieces of a work for any purpose except advertising.~{ Creative Commons Web site, at http://creativecommons.org/about/sampling. See also Ethan Smith, “Can Copyright Be Saved?” /{Wall Street Journal}/, October 20, 2003. }~ It also prohibited copying and distribution of the entire work.~[* A “Sampling Plus” license was also issued to allow noncommercial copying and distribution of an entire work, which means it could be distributed via file-sharing networks. Finally, a “NonCommercial Sampling Plus” license was devised to let people sample and transform pieces of a work, and copy and distribute the entire work, so long as it was for noncommercial purposes.]~ For example, an artist could take a snippet of music, a clip of film, or a piece of a photograph, and use the sample in a new creation. Since its release, the Sampling license has been criticized on philosophical grounds by some commoners who say it does not truly enhance people’s freedom because it prohibits copying and distribution of the entire work. This concern reached serious enough proportions that in 2007 Creative Commons “retired” the license; I’ll revisit this controversy in chapter 9. + +The CC Sampling license only whetted the imagination of people who wanted to find new ways to sample, share, and transform music. Neeru Paharia, then the assistant director of the Creative Commons, came up with the idea of developing ccMixter, a software platform for remixing music on the Web.~{ See http://wiki.creativecommons.org/ccMixter. Interview with Mike Linksvayer, February 7, 2007, and Neeru Paharia, April 13, 2007. }~ Paharia realized one day that “this whole remixing and sharing ecology is about getting feedback on who’s using your work and how it’s evolving. That’s almost half the pleasure.”~{ Interview with Neeru Paharia, April 13, 2007. }~ So the organization developed a Web site that would allow people to upload music that could be sampled and remixed. The site has about five thousand registered users, which is not terribly large, but it is an enthusiastic and active community of remix artists that acts as a great proof of concept while promoting the CC licenses. There are other, much larger remix sites on the Internet, such as Sony’s ACIDplanet, but such sites are faux commons. They retain ownership in the sounds and remixes that users make, and no derivative or commercial versions are allowed. +={Paharia, Neeru} + +One feature of viral spirals is their propensity to call forth a jumble of new projects and unexpected partners. The CC licenses have done just that for music. ccMixter has joined with Opsound to offer a joint “sound pool” of clips licensed under an Attribution ShareAlike license. It also supports Freesound, a repository of more than twenty thousand CC-licensed samples ranging from waterfalls to crickets to music.~{ Neeru Paharia, “Opsound’s Sal Randolph,” Creative Commons blog, October 1, 2005, at http://creativecommons.org/audio/opsound; Mike Linksvayer, “Freesound,” Creative Commons blog, October 1, 2005, at http://creative commons.org/audio/freesound; Matt Haughey, “Free Online Music Booms as SoundClick Offers Creative Commons Licenses,” Creative Commons blog, August 11, 2004. }~ + +Runoff Records, Inc., a record label, discovered a remix artist who teaches physics and calculus and goes by the name of Minus Kelvin. Runoff heard a podcast of Kelvin’s CC-licensed music, and signed him up, along with another ccMixter contributor, to do music for three seasons of the television show /{America’s Next Top Model}/.~{ Neeru Paharia, “Minus Kelvin Discovered on ccMixter,” Creative Commons blog, May 17, 2005, at http://creativecommons.org/weblog/archive/2005/5. }~ A few months later, two ccMixter fans based in Poland and Holland started an online record label, DiSfish, that gives 5 percent of all sale proceeds to CC, another 5 percent to charity, with the remainder split between the label and the artist. All music on the label is licensed under CC.~{ Cezary Ostrowski from Poland and Marco Raaphorst from Holland met online at ccMixter and decided to go into business together. They started an online label called DiSfish. }~ + +The CC licenses are not just the province of daring remix artists and other experimentalists. Disappointed by its CD sales through traditional channels, the Philharmonia Baroque Orchestra released its performance of Handel’s 1736 opera, /{Atalanta}/, exclusively through the online record label Magnatune, using a CC license. Conductor Nicholas McGegan said the Internet “has potentially given the industry a tremendous shot in the arm,” letting orchestras reach “new audiences, including ones that are unlikely to hear you in person.”~{ Mia Garlick, “Classical Music Goes Digital (& CC),” May 3, 2006, at http://creativecommons.org/weblog/entry/5883. }~ A company that specializes in Catalan music collaborated with the Catalonian government to release two CDs full of CC-licensed music.~{ The Enderrock Group, a company that specializes in Catalan music and publishes three popular music magazines, released the two CDs, /{Música Lliure and Música Lliure II}/, free within the page of its magazines. See Margot Kaminski, “Enderrock,” Creative Commons Web site, January 17, 2007, at http://cre ativecommons.org/audio/enderrock. }~ A group of Gamelan musicians from central Java who perform in North Carolina decided to release their recordings under a CC license.~{ The group, Gamelan Nyai Saraswait, was blogged about by Matt Haughey on February 1, 2003, at http://creativecommons.org/weblog/entry/3599. }~ +={McGegan, Nicholas} + +Big-name artists have gotten into the licenses as well. DJ Vadim created a splash when he released all the original solo, individual instrumental, and a cappella studio tracks of his album /{The Sound Catcher}/ under an Attribution, NonCommercial license, so that remixers could have at it.~{ Victor Stone, “DJ Vadim Releases Album Tracks Under CC,” August 20, 2007, at http://creativecommons.org/weblog/entry/7619. }~ In 2004, /{Wired}/ magazine released a CD with sixteen tracks by the likes of David Byrne, Gilberto Gil, and the Beastie Boys. “By contributing a track to /{The Wired CD}/., these musicians acknowledge that for an art form to thrive, it needs to be open, fluid and alive,” wrote /{Wired}/. “These artists — and soon, perhaps, many more like them — would rather have people share their work than steal it.”~{ Thomas Goetz, “Sample the Future,” /{Wired}/, November 2004, pp. 181–83. }~ +={Byrne, David;Gil, Gilberto+1;DJ Vadim;Beastie Boys} + +Soon thereafter, Byrne and Gil went so far as to host a gala benefit concert for Creative Commons in New York City. In a fitting fusion of styles, Gil sang a Brazilian arrangement of Cole Porter’s cowboy song, “Don’t Fence Me In.” The crowd of 1,500 was high on the transcultural symbolism, said Glenn Brown: “Musical superstars from North and South, jamming together, building earlier works into new creations, in real time. Lawyers on the sidelines and in the audience, where they belong. The big Creative Commons logo smiling overhead.”~{ Glenn Otis Brown, “WIRED Concert and CD: A Study in Collaboration,” September 24, 2004, available at http://creativecommons.org/weblog/entry/ 4415. }~ The description captures the CC enterprise to a fault: the fusion of some clap-your-hands populism and hardheaded legal tools, inflected with an idealistic call to action to build a better world. +={Brown, Glenn Otis;Porter, Cole} + +By 2008 the power of open networks had persuaded the major record labels to abandon digital rights management of music CDs, and more major artists were beginning to venture forth with their own direct distribution plans, bypassing the standard record label deals. Prince, Madonna, and others found it more lucrative to run their own business affairs and deal with concert venues and merchandisers. In a major experiment that suggests a new business model for major music acts, Nine Inch Nails released its album /{Ghosts I-IV}/ under a Creative Commons NonCommercial ShareAlike license, and posted audio files of the album on its official Web site, inviting free downloads. It did not do advertising or promotion. Despite the free distribution — or because of it — the group made money by selling 2,500 copies of an “Ultra-Deluxe Limited Edition” of the album for $300; the edition sold out in less than three days. There were also nonlimited sales of a “deluxe edition” for $75 and a $10 CD. The scheme showed how free access to the music can be used to drive sales for something that remains scarce, such as a “special edition” CD or a live performance. One week after the album’s release, the Nine Inch Nails’ Web site reported that the group had made over $1.6 million from over 750,000 purchase and download transactions. Considering that an artist generally makes only $1.60 on the sale of a $15.99 CD, Nine Inch Nails made a great deal more money from a “free” album distribution than it otherwise would have made through a standard record deal.~{ See, e.g., Wikipedia entry, “Ghosts I-IV,” at http://en.wikipedia.org/wiki/ Ghosts_I-IV. }~ +={Nine Inch Nails} + +It is too early to know if Lessig’s “BMI strategy” will in fact catalyze a structural transformation in the entertainment industries. But Lessig apparently feels that it is the only feasible strategy. As he said in a 2006 speech, intensified hacking to break systems of proprietary control will not work; new campaigns to win progressive legislation won’t succeed within the next twenty years; and litigation is “a long-term losing strategy,” as the /{Eldred}/ case demonstrated. For Lessig and much of the free culture community, the long-term project of building one’s own open, commons-friendly infrastructure is the only enduring solution. +={BMI (Broadcast Music, Inc.);Eldred v. Reno/Eldred v. Ashcroft:effects of;Lessig, Lawrence:Eldred v. Reno, and|music, and+1} + +In the music industry, the early signs seem to support this approach. When digital guru Don Tapscott surveyed the events of 2006, he concluded that “the losers built digital music stores and the winners built vibrant communities based on music. The losers built walled gardens while the winners built public squares. The losers were busy guarding their intellectual property while the winners were busy getting everyone’s attention.” In a penetrating analysis in 2007, music industry blogger Gerd Leonhard wrote: “In music, it’s always been about interaction, about sharing, about engaging — not Sell-Sell-Sell right from the start. Stop the sharing and you kill the music business — it’s that simple. When the fan/user/listener stops engaging with the music, it’s all over.”~{ Gerd Leonhard, “Open Letter to the Independent Music Industry: Music 2.0 and the Future of Music,” July 1, 2007, at http://www.gerdleonhard.net/ 2007/07/gerd-leonhards.html. }~ +={Leonhard, Gerd;Tapscott, Don} + +Serious change is in the air when the producer/consumer dichotomy is no longer the only paradigm, and a vast network of ordinary people and talented creators are becoming active participants in making their own culture. They are sharing and co-creating. Markets are no longer so separate from social communities; indeed, the two are blurring into each other. Although we may live in a complicated interregnum between Centralized Media and distributed media, the future is likely to favor those creators and businesses who build on open platforms. As Dan Hunter and F. Gregory Lastowka write: “It is clear that two parallel spheres of information production exist today. One is a traditional, copyright-based and profit-driven model that is struggling with technological change. The second is a newly enabled, decentralized amateur production sphere, in which individual authors or small groups freely release their work.”~{ Dan Hunter and F. Gregory Lastowka, “Amateur-to-Amateur,” /{William and Mary Law Review}/ 46, no. 951 (December 2004), pp. 1029–30. }~ +={Hunter, Dan+1;Lastowka, F. Gregory} + +Hunter and Lastowka liken copyright law today to the Roman Empire in decline: “It is meaningless to ask whether the unitary might of imperial Rome was preferable to the distributed, messy agglomeration of tribes and states that eventually emerged after Rome fell. It was not better, just different.” That is certainly a debatable conclusion, depending upon one’s cultural tastes and sense of history. But the Rome metaphor does capture the fragmentation and democratization of creativity that is now under way. And that, in fact, is something of the point of the CC licenses: to make access and use of culture more open and egalitarian. For all his commitment to law and the CC licenses, Lessig ultimately throws his lot in with social practice: “Remember, it’s the /{activity}/ that the licenses make possible that matters, not the licenses themselves. The point is to change the existing discourse by growing a new discourse.”~{ Interview with Lawrence Lessig, September 14, 2006. }~ +={copyright law:decline of;Creative Commons (CC) licenses:social practice, and;Lessig, Lawrence:CC licenses, and} + +1~ 7 THE MACHINE AND THE MOVEMENT +={Creative Commons (CC):social movement, as+10} + +/{An infrastructure of code gives rise to a movement for free culture.}/ + +When the CC licenses were first launched, many regarded them as a boring legal license that may or may not really matter. The real surprise was how the CC licenses became a focal object for organizing a movement. As more users began to adopt the licenses in 2003 and 2004, they ceased being just a set of legal permissions and became a cool social brand. The CC licenses and logo became symbols of resistance against the highly controlled, heavily marketed, Big Brother worldview that Hollywood and the record industry seem to embody. The CC licenses offered a way to talk about one’s legal and creative rights in the Internet age, and to cite to a positive alternative — the sharing economy. With no paid advertising to speak of, the CC logo came to symbolize an ethic and identity, one that stood for artistic integrity, democratic transparency, and innovation. + +Glenn Otis Brown recalls how people spontaneously took up the license to express their anger at the media establishment and their yearning for a more wholesome alternative: “If you’re frustrated with the way the world works now, frustrated with the way the media is becoming more democratized but all these laws aren’t really facilitating that,” said Brown, “you can just cast a little virtual vote for a different sort of copyright system by putting the ‘Some Rights Reserved’ tag on your Web page. But also, practically, you can help create pools of content that people can work with and make it so much easier to participate.” Without really planning it, the Creative Commons became much more than a system of free licenses for sharing. It became a symbol for a movement. Communities of social practice began to organize themselves around the CC project. +={Brown, Glenn Otis:CC licensing, and+3;Creative Commons (CC) licenses:social practice, and} + +“Inside of the organization, we always talked about how we really had /{two}/ organizations,” said Brown. “One was Creative Commons, the /{movement}/; and one was Creative Commons, the /{machine}/.”~{ Interview with Glenn Otis Brown, June 9, 2006. }~ The machine was about meeting utilitarian needs through licenses and software; the movement was about motivating people and transforming culture. Just as the GPL had given rise to the free software community and a hacker political philosophy (which in turn inspired the Creative Commons’s organizers), so the CC licenses were spontaneously igniting different pockets of the culture: Web designers, bloggers, musicians, book authors, videographers, filmmakers, and amateurs of all stripes. The viral spiral was proceeding apace. +={Brown, Glenn Otis:CC as movement and “machine,”, and+3;Creative Commons (CC):growth of+3|“machine”, as+3;General Public License (GPL)free software, and+1;hackers:political philosophy} + +The tension between the machine and the movement has been an animating force in the evolution of the Creative Commons. “You want to have something that’s actually useful to people,” said Brown, “but you also have to get people excited about it, and build up your constituency.”~{ Ibid. }~ Some CC initiatives have had strong symbolic resonances but little practical value, while other initiatives were quite useful but not very sexy. For example, embedding CC metadata into software applications and Web services is complicated and technical — but highly effective in extending the practices of free culture. On the other hand, the Creative Commons’s release of specialty licenses for music sampling, developing nations, and a CC version of the General Public License for software (as discussed below) were discretionary moves of some utility that were probably more important as gestures of solidarity to allies. + +This has been a recurrent motif for the organization — pragmatic, improvisational outreach to distinct constituencies as part of a larger attempt to build a movement. There has always been a corresponding pull, however, “not to put ‘the machine’ at risk by incorporating the new licenses into every last one of our software tools,” said Brown. The integrity of “the machine” ultimately needs to be respected. + +Even as the machine was getting built, Lessig was taking steps to stoke up a movement. In 2004, Lessig published his third book in five years, /{Free Culture}/. The book described, as the subtitle put it, “how big media uses technology and the law to lock down culture and control creativity.” Lessig’s earlier books, /{Code}/ and /{The Future of Ideas}/, had critiqued the alarming trends in copyright law, explained the importance of the commons, and set forth a philosophical rationale for what became the CC licenses. Now /{Free Culture}/ provided a wide-ranging survey of how incumbent industries with old business models — for recorded music, film, broadcasting, cable television — were (and are) curbing traditional creative freedoms and technological innovations. Drawing explicitly on the ideas of freedom developed by Richard Stallman in the 1980s, and upon legal history, politics, and colorful stories, Lessig argued that industry protectionism poses a profound harm to creators, business, and democratic culture — and that action needed to be taken. +={Lessig, Lawrence:Free Culture+2;Lessig, Lawrence:Code and Other Laws of Cyberspace|The Future of Ideas|CC licenses, and|law in contemporary context, and;Stallman, Richard:influence of|freedom, and} + +Although /{Free Culture}/ repeats many of the fundamental arguments made in his earlier books, Lessig’s arguments this time did not sound like a law professor’s or academic’s, but more like an activist trying to rally a social movement. “This movement must begin in the streets,” he writes. “It must recruit a significant number of parents, teachers, librarians, creators, authors, musicians, filmmakers, scientists — all to tell their story in their own words, and to tell their neighbors why this battle is so important. . . . We will not reclaim a free culture by individual action alone. It will take important reforms of laws. We have a long way to go before the politicians will listen to these ideas and implement these reforms. But that also means that we have time to build awareness around the changes that we need.”~{ Lawrence Lessig, /{Free Culture}/ (New York: Penguin, 2004), pp. 275, 287. }~ The preeminent challenge for this would-be movement, Lessig wrote, is “rebuilding freedoms previously presumed” and “rebuilding free culture.” + +Lessig had reason to think that his analysis and exhortations would find receptive ears. He was now a leading voice on copyright and Internet issues, and well known through his earlier books, public speaking, and /{Eldred}/ advocacy. The launch of the Creative Commons was thrusting him into the spotlight again. Adoption of the CC licenses was steadily growing in 2003 and 2004 based on the most comprehensive sources at the time, search engines. Yahoo was reporting in September 2004 that there were 4.7 million links to CC licenses on the Web. This number shot up to 14 million only six months later, and by August 2005 it had grown to 53 million.~{ CC license statistics, on CC wiki page, at http://wiki.creativecommons.org/ License_statistics. }~ These numbers offer only a crude estimate of actual license usage, but they nonetheless indicated a consistent trend. Usage was also being propelled by new types of Web 2.0 sites featuring usergenerated content. For example, Flickr, the photo-sharing site, had 4.1 million photos tagged with CC licenses at the end of 2004, a number that has soared to an estimated 75 million by 2008. +={Lessig, Lawrence:CC licenses, and;Yahoo;Web 2.0:CC licenses, and;Creative Commons (CC) licenses:Web 2.0 environment, and} + +The decisive choice, four years earlier, to build a suite of licenses that could propagate themselves via open networks was bearing fruit. + +2~ Building the CC Machine +={Creative Commons (CC):“machine”, as+22} + +It was a pleasant surprise for the organization to learn that a great deal of individual usage of the CC licenses was fairly spontaneous. Persuading large companies and respected institutions to use the CC licenses was a more difficult proposition. Lessig therefore spent a fair amount of time trying to get prominent institutions to adopt the licenses and give them some validation. Among the early converts were MIT, Rice University, Stanford Law School, and Sun Microsystems, supplemented by some relatively new organizations such as Brewster Kahle’s Internet Archive and the Public Library of Science, a publisher of open-access journals. + +Personal diplomacy can accomplish only so much, however, and in any case the Internet itself needed to be leveraged to disseminate the licenses and educate the public. One challenge, for example, was to introduce the CC licenses — which are not, after all, a self-evident need for most people — in a clear, compelling way. Most authors and artists have little idea what licenses they may want to choose, and their implications for how they might be able to sell or share works in the future. People needed a quick and easy way to make intelligent choices. It fell to Lisa Rein, the first technical director at CC, in late 2001, to develop a license-generating interface for the Web site. The quandary she faced was how to maximize user choice in selecting licenses while minimizing complexity. +={Rein, Lisa} + +The Web interface for the licenses has steadily improved over the years, but in a sense, those improvements have been offset by a growing complexity and number of CC licenses. Some critics have complained that the whole CC scheme can be a bit daunting. Yes, the licenses can ensure certain freedoms without your having to hire an attorney, which is clearly an improvement over relying on the fair use doctrine. But that does not mean that anyone can immediately understand the implications of using a NonCommercial or ShareAlike license for a given work. Any lurker on a CC listserv soon encounters head-scratching questions like “Can I use a BY-NC photo from Flickr on my blog if the blog is hosted by a company whose terms of service require me to grant them a worldwide, nonexclusive license to use any work hosted by their service, including for commercial use?” +={Creative Commons (CC) licenses:complexity of|fair use, and;fair use doctrine:CC licenses, and} + +By far the more important vehicle for promoting usage of the CC licenses has been software code. Lessig and the CC team realized that if the licenses could become an embedded element of leading search engines, Web publishing tools, and Web 2.0 platforms, it could powerfully promote license use. Integrating the code into existing Web sites and software can pose some serious technical challenges, however. Figuring out how to integrate the CC licenses with popular software applications, Web services, and digital file formats has fallen chiefly to Nathan Yergler, the chief technology officer of Creative Commons. Over the years, he and other CC developers have come up with a variety of applications to help make software infrastructures more friendly. One program that was developed, ccHost, is a content management system that has licensing and remix tracking built into its core. JsWidget is a simple javascript widget that developers can easily integrate into their sites to enable users to choose a license without leaving the site. Creative Commons has made it a standard practice to coordinate its work with technology volunteers, startup companies, and nonprofits with a stake in digitally enabling open licensing. It does this work through a CC development wiki, the cc-devel mailing list, Internet Relay Chat, World Wide Web Consortium working groups, and participation in Google’s annual “Summer of Code” program for student programmers. +={code:CC licenses, and+7;Creative Commons (CC) licenses:software code, and+7|Web 2.0 environment, and;Web 2.0:CC licenses, and;Google;software:CC licenses, and+7} + +Lessig and top CC staff have worked hard at convincing executives at major software enterprises to incorporate the CC licenses into a software application or Web site. One early triumph came when the makers of Movable Type, a blogging platform, agreed to make it easy for users to tack a CC license onto their blogs. Two months later, the O’Reilly empire of software blogs adopted the CC licenses. Then programmer Dave Winer embedded the licenses in his new Web log software in 2003. Blogs may not be core infrastructure for the Internet, but they are plentiful and popular, and have given Creative Commons enormous visibility and a high adoption curve. +={Lessig, Lawrence:CC licenses, and+6;O’Reilly, Tim;blogging;Winer, Dave} + +It had always been Lessig’s ambition that the major search engines would be reengineered to help people find CC-tagged content. To help prove that it could be done, Creative Commons built its own jerry-rigged search engine that retrieved content tagged with CC metadata. Lessig and Brown, meanwhile, made numerous diplomatic overtures to Google and Yahoo executives and software engineers. After two years of off-and-on conversations, both search engine companies agreed in 2005 to incorporate changes into their advanced searches so that users could locate CC-licensed content. (The Google advanced search does not use the Creative Commons name, but simply asks users if they want content that is “free to use or share,” among other options.) The search engine exposure was a serious breakthrough for Creative Commons’s visibility and legitimacy. +={Brown, Glenn Otis+2;Google;Yahoo} + +After a few years, the CC licenses were integrated into a number of other software platforms. It became possible to search for CClicensed images (Flickr), video programs (blip.tv), music (Owl), and old Web content (Internet Archive, SpinXpress). With these search tools, Internet users had a practical way to locate blues tunes that could be remixed, photos of the Eiffel Tower that could be modified and sold, and articles about flower arrangements that could be legally republished. Advertisers, publishers, and other companies could search for images, songs, and text that could be licensed for commercial use. + +Lessig and Brown worked hard to get other major Web and software companies to make it easy for users to tag content with CC licenses. The ultimate goal was to make it easy for users to automate their preferences. Joi Ito, a Japanese venture capitalist and democratic reformer who became the chair of the Creative Commons’s board of directors in 2006, put it this way: “Every input device that you have, whether it’s a camera phone, a digital camera or PowerPoint software, should allow you to automatically set it to the CC license that you want. And the minute you take that picture, you’ve already expressed how you would want that picture to be used.” +={Ito, Joichi} + +Creative Commons also urged open-source software communities to incorporate CC-made software into their applications so that users can more easily tag content with the licenses or find licensed works. Firefox, for example, has integrated a Creative Commons search function into the drop-down menu of its browser search interface. It also has a plug-in module called MozCC that scans for any CC metadata as you browse Web pages, and then reports on the browser status bar how content is licensed. CC licenses have been integrated into other software as well, such as Songbird, a free software media player, and Inkscape, a free vector-graphics program similar to Adobe Illustrator. +={open source software:CC licenses, and+2} + +Application by application, Web site by Web site, the Creative Commons board and staff have tried to insinuate the licenses into as many software applications and Web services as they could, in a kind of behind-the-scenes enactment of Lessig’s book /{Code}/. If code is law, then let’s write it ourselves! The diffusion of the licenses has tended to occur through personal connections of Lessig, CC board members, and friendly tech entrepreneurs and programmers. Joi Ito used his contacts at Sony to persuade it to develop a video remix Web site in Japan that uses CC licenses as the default choice. For Sony, the licenses help the company avoid any whiff of legal impropriety because users must stipulate whether their video remixes may be shared or not. +={code:as law;law:code as;Lessig, Lawrence:Code and Other Laws of Cyberspace;Ito, Joichi} + +In 2006, Microsoft went so far as to come out with a plug-in module for its Word program, enabling writers to tag their text documents with CC licenses. At the time, many CC fans grumbled at the hypocrisy of Microsoft, the five-hundred-pound gorilla of proprietary software, embracing the Creative Commons, even in such a modest way. But for Lessig and CC board members, any business that chooses to advance the reach of free culture — in this case, by accessing the 400 million users of Microsoft Office — is welcomed. While this ecumenical tolerance has made the Creative Commons a big-tent movement with an eclectic assortment of players, it has also provoked bitter complaints in free software and Wikipedia circles that the Creative Commons promotes a fuzzy, incoherent vision of “freedom” in the digital world (an issue to which I return in chapter 9). +={Microsoft:CC licenses, and} + +One vexing problem that CC developers confronted was how to digitally tag stand-alone files as CC-licensed work if they are not on the Web. How could one tag an MP3 file, for example, to show that the music is under a CC license? One problem with just inserting a CC tag onto the MP3 file is that anyone could fraudulently mark the file as CC-licensed. To prevent scams, Neeru Paharia, then CC assistant director, and other developers came up with a solution that requires any stand-alone digital files that are embedded with CC licenses to include a URL (Uniform Resource Locator) that links to a Web page verifying the assertions made on the file. +={Paharia, Neeru} + +The practice of embedding CC license information on digital files has been called /{digital rights expression}/ — a kind of benign analogue to digital rights management. The purpose is to embed information about the copyright status of a work /{in}/ the digital file. Unlike DRM, the goal is not to try to build an infrastructure for enforcing those rights or controlling how people may use a work. “Instead of using technology to ensure that the consumer can’t do anything with it,” said Mike Linksvayer, CC vice president and former chief technology officer, “we’re trying to use technology to ensure that people can find a CC-licensed work. If they’re looking, for instance, for music that can remixed, then this information will help a search engine locate that information.”~{ Interview with Mike Linksvayer, February 7, 2007. }~ +={Linksvayer, Mike;digital rights expression;digital rights management (DRM)} + +Perhaps the neatest self-promotional trick that the Creative Commons has devised is to rely upon companies whose very business plans revolve around CC licenses. We will examine “open business” enterprises in chapter 10, but for now it is worth noting that a number of innovative companies use the licenses as a core element of their business strategy. These enterprises include Flickr (photo sharing), Magnatune (an online record label), Jamendo (a Luxembourg-based music site), and Revver (a video-sharing site that shares advertising revenues with creators). + +Infrastructure grows old and occasionally needs to be updated and improved. The CC licenses have been no exception. As users have incorporated them into one medium after another, the unwitting omissions and infelicitous legal language of some parts of the licenses needed revisiting. After many months of discussions with many parts of the CC world, the Creative Commons issued a new set of 2.0 licenses in May 2004.~{ Glenn Otis Brown, “Announcing (and explaining) our new 2.0 licenses,” CC blog, May 25, 2004, at http://creativecommons.org/weblog/entry/4216. }~ They did not differ substantially from the original ones, and in fact the changes would probably bore most nonlawyers. For example, version 2.0 included a provision that allows a licensor to require licensees to provide a link back to the licensor’s work. The 2.0 licenses also clarify many complicated license options affecting music rights, and make clear that licensors make no warranties of title, merchantability, or fitness for use. Perhaps the biggest change in version 2.0 was the elimination of the choice of Attribution licenses. Since nearly 98 percent of all licensors chose Attribution, the Creative Commons decided to drop licenses without the Attribution requirement, thereby reducing the number of CC licenses from eleven to six. +={Creative Commons (CC) licenses:version 2.0 of} + +Another set of major revisions to the licenses was taken up for discussion in 2006, and agreed upon in February 2007.~{ 7. Mia Garlick, “Version 3.0 Launched,” CC blog, http://creativecommons.org/ weblog/entry/7249. }~ Once again, the layperson would care little for the debates leading to the changes, but considerable, sometimes heated discussion went into the revisions. In general, the 3.0 tweaks sought to make the licenses clearer, more useful, and more enforceable. The issue of “moral rights” under copyright law — an issue in many European countries — is explicitly addressed, as are the complications of the CC licenses and collecting societies. New legal language was introduced to ensure that people who remix works under other licenses, such as the GNU Free Documentation License (FDL), would be able to also use CC-licensed materials in the same work — an important provision for preventing free culture from devolving into “autistic islands” of legally incomptabile material. Besides helping align the CC world with Wikipedia (which uses the GNU FDL license), the 3.0 revisions also made harmonizing legal changes to take account of MIT and the Debian software development community. +={GNU Project:GNU FDL;copyright law:moral rights, and;Creative Commons (CC) licenses:version 3.0 of} + +By getting the CC licenses integrated into so many types of software and Web services, and even leveraging market players to embrace the sharing ethic, Creative Commons has managed to kill at least three birds with one stone. It has enlarged the universe of shareable Internet content. It has educated people to consider how copyright law affects them personally. And it has given visibility to its larger vision of free culture. +={copyright law:CC licenses, and;Creative Commons (CC) licenses:copyright law, and;Internet:communication system, as+1;;Creative Commons (CC):growth of+2} + +In one sense, the CC “machine” composed of the licenses, the CC-developed software, and the CC-friendly protocol was the engine for change. In another sense, the influence that Creative Commons has acquired derives from the social communities that gradually began to use its infrastructure. The social practice infused power into the “machine” even as the machine expanded the social practice. A virtuous cycle took hold, as the CC community used its self-devised legal and technological infrastructure to advance their shared cultural agenda. +={Creative Commons (CC):influence of} + +Driving this cycle was an ever-growing staff and new managers working out of offices in downtown San Francisco. Although Lessig has been the chief executive officer and chairman of the board of Creative Commons for most of its existence, most day-to-day operating responsibilities fell to executive director Glenn Otis Brown until his departure in 2005, and then to general counsel Mia Garlick, who left in 2007. (Both took jobs at Google.) Key executives at Creative Commons in 2008 included Mike Linksvayer, vice president; Eric Steuer, creative director; Diane Peters, general counsel; Nathan Yergler, chief technology officer; and Jennifer Yip, operations manager. The annual budget, which was $750,000 in 2003, had grown to $3.6 million in 2008 (a sum that included the Science Commons project). Much of this funding came from foundations such as the John D. and Catherine T. MacArthur Foundation, the William and Flora Hewlett Foundation, the Rockefeller Foundation, and Omidyar Network. +={Linksvayer, Mike;Brown, Glenn Otis:executive director, as;Peters, Diane;Steuer, Eric;Yergler, Nathan;Yip, Jennifer;John D. and Catherine T. MacArthur Foundation;Garlick, Mia;Lessig, Lawrence:CC licenses, and+2;Creative Commons (CC):funding of;Omidyar Network;Rockefeller Foundation;William and Flora Hewlett Foundation} + +Once the CC machine had secured its footing, Lessig and the CC staff paid close attention to the movement — the social communities that find utility and meaning through Creative Commons— and to developing new software and projects that these early adopters would welcome. In 2006, the organization hit upon the idea of hosting a series of “salons” in major cities. The gatherings have become a big success, and are now replicated in cities throughout the world. Artists talk about how they use CC licenses; entrepreneurs explain how their business models work; remix artists perform their work. The events, free and open to the public, combine testimonials about free culture, personal networking, entrepreneurial idea-mongering, live performances, and partying. The CC crowd seems to enjoy partying; they do it well. Every December, there are gala anniversary parties in groovy San Francisco hot spots. There have been virtual parties in the immersive online world, Second Life. Because CC users tend to include some of the most adventurous artistic talent and eclectic innovators around — people who know where the truly cool night spots are — CC parties tend to be lively, good times. The parties in Rio and Dubrovnik, at the iCommons Summits, were memorable international happenings, for example — occasions, as one self-styled Lothario boasted to me, “where a guy could dance with a woman from every continent of the world in a single evening.” +={Creative Commons (CC):social movement, as+2|CC Salons} + +Add to the mix tech-oriented college students, another key sector of free culture activism, and there is even more youthful energy. Hundreds of college students participate in a nationwide student organization, FreeCulture.org, later renamed Students for Free Culture. The group got its start in 2004 when some students at Swarthmore College began investigating the reliability of Diebold electronic voting machines; the company invoked copyright law in an attempt to keep the problems secret, leading to a public confrontation that Diebold lost. Nelson Pavlosky and Luke Smith, who were also inspired by Lessig’s advocacy, co-founded the group, which has since spawned over thirty quasi-autonomous chapters on campuses across the United States and a few foreign nations. The organization tries to be a grassroots force on Internet, digital technology, and copyright issues. It has mounted protests against CDs with digital rights management, for example, and hosted film remixing contests and exhibits of CC-licensed art at NYU and Harvard. Students for Free Culture also organized a “no-profit record company/recording collective,” the Antenna Alliance, which gave bands free recording space and distributed their CC-licensed music to college radio stations. +={Pavlosky, Nelson;Smith, Luke;Students for Free Culture} + +We have looked at the machine and many parts of the movement, but not at one of the most significant forces fueling Creative Commons — the dozens of national projects to adapt the licenses to legal systems around the world. The long-term reverberations of this movement — which includes activists in Brazil, Croatia, South Africa, Egypt, Peru, Scotland, and dozens of other countries — are only beginning to be felt. + +2~ 8 FREE CULTURE GOES GLOBAL +={Creative Commons International+79} + +/{The commoners mount a transnational mobilization to build their own digital commons.}/ + +It is a measure of Lessig’s ambition for Creative Commons that only five months after the release of the licenses, in April 2003, he instigated a move to take the idea global. Glenn Brown remembers objecting, “I don’t know how we’re going to get this done! Larry was like, ‘We have no other choice. We /{have}/ to do this. This needs to be an international organization.’”~{ Interview with Glenn Otis Brown, June 9, 2006. }~ +={Boyle, James:CC International, and+1;Lessig, Lawrence:CC International, and+1} + +Professor James Boyle, a board member, was aghast. “That’s the stupidest thing I’ve ever heard,” he said upon hearing the idea. “I was practically foaming at the mouth,” he recalled, noting that it was “just insane” to try to adapt the licenses to the mind-boggling complexities of copyright laws in scores of nations.~{ Interview with James Boyle, August 15, 2006. }~ But Lessig, determined to make the Creative Commons an international project, proceeded to hire Christiane Asschenfeldt (now Christiane Henckel von Donnersmarck), a Berlin-based copyright lawyer whom he had met the previous summer at an iLaw (Internet Law) conference in Cambridge, Massachusetts. He charged her with helping project leaders in different countries adapt the licenses (or, in computerese, “port” them) to their respective national legal codes. +={Asschenfeldt, Christiane+1;copyright law:international} + +Asschenfeldt set about inventing a system for gathering teams of volunteers, usually associated with a law school or technology institute, to become CC affiliates. Once an affiliate institution and project lead are chosen, the project lead produces a first draft of the licenses, which then undergoes public discussion, rewriting, and a final review by the new international arm of Creative Commons, CC International.~{ The procedures for porting a CC license to another jurisdiction are outlined in a document, “Welcome to Creative Commons International,” undated, at http://wiki.creativecommons.org/Worldwide_Overview. }~ (Confusingly, this project was originally called “iCommons,” a name that in 2006 was reassigned to a new CC spinoff group that convenes the international free culture movement.) +={free culture:international} + +In a pre-Internet context, the whole idea of a creating a new international license architecture and network of legal experts might seem ridiculously unrealistic. But by 2003 there were enough examples of “distributed intelligence” popping up that it no longer seemed so crazy to think that a passionate corps of dispersed volunteers could collaborate as catalysts for change. In any case, following the /{Eldred}/ defeat, Lessig and Brown came to believe, as discussed earlier, that the Creative Commons needed to be both a machine and a movement. +={Brown, Glenn Otis:CC as movement and “machine,”, and;Creative Commons (CC):“machine”, as|social movement, as;Eldred v. Reno/Eldred v. Ashcroft:effects of;Lessig, Lawrence:Eldred v. Reno, and;Lessig, Lawrence:CC International, and+1} + +Going international with the licenses offered an appealing way to grow both simultaneously without forcing unpleasant trade-offs between the two, at least initially. Drafting the licenses for a country, for example, helps convene top lawyers committed to the idea of legal sharing and collaboration while also mobilizing diverse constituencies who are the potential leaders of a movement. + +According to Jonathan Zittrain, an early collaborator on the project and a board member, Creative Commons at the international level is more of a “persuasive, communicative enterprise than a legal licensing one.”~{ Interview with Jonathan Zittrain, September 28, 2006. }~ It is a vehicle for starting a process for engaging public-spirited lawyers, law scholars, and all manner of creators. The licenses do have specific legal meanings in their respective legal jurisdictions, of course, or are believed to have legal application. (Only three courts, in the Netherlands and Spain, have ever ruled on the legal status of the CC licenses. In two instances the courts enforced the licenses; in the other case, in which the defendant lost, the validity of the licenses was not at issue.)~{ The most famous court case involving the CC licenses is /{A. Curry v. Audax/Weekend}/, in which Adam Curry sued the publishers of a Dutch tabloid magazine and two senior editors for using four photos of his family on his Flickr account that had been licensed under a BY-NC-SA license. See http://creativecommons.org/weblog/entry/5944 and http://creativecommons.org/weblog/entry/5823. A District Court of Amsterdam upheld Curry’s usage of the CC licenses in a March 9, 2006, decision; see http://mir rors.creativecommons.org/judgements/Curry-Audax-English.pdf. There have been two Spanish cases involving CC licenses. In both cases, a collecting society, the Sociedad General de Autores y Editores (SGAE), sued cafés for playing “free music” licensed under CC licenses; SGAE claimed that it was owed royalties for the public performance of music because artists cannot legally apply a CC license to their work (or even release it online) without the consent of their collecting society. In both instances, the cases turned on evidentiary issues, not on the enforceability of CC licenses. See http:// creativecommons.org/weblog/entry/5830 and http://creativecommons.org/ weblog/entry/7228. }~ Apart from their legal meaning, the licenses’ most important function may be as a social signaling device. They let people announce, “I participate in and celebrate the sharing economy.” The internationalization of the CC licenses has also been a way of “localizing” the free culture movement. +={Zittrain, Jonathan} + +The first nation to port the CC licenses was Japan. This was partly an outgrowth of a five-month sabbatical that Lessig had spent in Tokyo, from late 2002 through early 2003. There were already stirrings of dissatisfaction with copyright law in Japan. Koichiro Hayashi, a professor who had once worked for the telecom giant NTT, had once proposed a so-called d-mark system to allow copyright owners to forfeit the statutory term of copyright protection and voluntarily declare a shorter term for their works. In the spring of 2003, a team of Japanese lawyers associated with a technology research institute, the Global Communications Center (GLOCOM), working with CC International in Berlin, set about porting the licenses to Japanese law. +={Creative Commons International:Japan+2;Global Communications Center (GLOCOM);Hayashi, Koichiro} + +Yuko Noguchi, a former Lessig student and lawyer who later became the legal project lead, explained that the CC licenses are a culturally attractive way for Japanese to address the structural problems of copyright law. Japan is a country that prizes harmony and dislikes confrontation. The licenses offer a way to promote legal sharing without forcing bitter public policy conflicts with major content industries.~{ Interview with Yuko Noguchi, September 12, 2007. }~ (Partly for such reasons, CC Japan shifted its affiliation to the University of Tokyo in 2006.) In a culture that enjoys the sharing of comics, animation, haiku, and other works, the CC Japan licenses, launched in January 2004, have been used by a diverse range of artists and companies. +={Noguchi, Yuko} + +During his sojourn in Japan, Lessig had a fateful meeting with Joichi Ito, who in many ways embodies the tech sophistication, democratic zeal, and cosmopolitan style of the international Creative Commons movement. Widely known as Joi (pronounced “Joey”), Ito, forty-two, was born in Japan and educated in the United States. Disaffected with formal education in the U.S., where he studied computer science and physics, he dropped out and began his highly unusual career in Japan as an activist, entrepreneur, and venture capitalist. He has worked as a nightclub disc jockey, and brought industrial music and the rave scene to Japan, but he has also become a talented venture capitalist and early stage investor in such companies as Six Apart, Technorati, Flickr, SocialText, Dopplr, and Rupture. Lessig and Ito became close friends; Ito later joined the Creative Commons board. He was appointed chairman of the board in 2007 and then, in 2008, he became chief executive officer when Lessig left to start a congressional reform project. Duke law professor James Boyle, a board member, replaced Ito as chairman. +={Ito, Joichi+1;Boyle, James:as chairman} + +Once it went public, the very idea of Creative Commons attracted many other people like Ito to its ranks: educated, tech-savvy, culturally fluent, activist-minded. In fact, following the American launch of Creative Commons, volunteers from many countries began to approach the organization, asking if they could port the licenses to their own legal systems. Finland became the second nation to adopt the licenses, in May 2004, followed a month later by Germany. In Europe, the early adopters included Denmark, Hungary, Scotland, Slovenia, Sweden, and Malta. In South America, CC licenses were introduced in Argentina, Chile, and Peru. In Asia, Malaysia and China ported the licenses, as did Australia. Israel was the first Middle Eastern country to port the licenses. + +As each jurisdiction introduces its licenses, it typically hosts a gala public event to celebrate and publicize free culture. News media and government officials are invited. There are panel discussions about copyright law and digital culture; performances by musicians who use the licenses; and endorsements by prominent universities, cultural institutions, and authors. Lessig has made it a practice to fly in and deliver an inspirational speech. Few international launches of CC licenses have been more spectacular or consequential than the one staged by Brazil in March 2004. +={Lessig, Lawrence:public speaker, as|CC International, and} + +2~ Brazil, the First Free Culture Nation +={Brazil:free culture in+23;Creative Commons International:Brazil+23} + +Luiz Inácio Lula da Silva had just been elected president of Brazil, and he was eager to stake out a new set of development policies to allow his nation to plot its own economic and cultural future. His government, reflecting his electoral mandate, resented the coercive effects of international copyright law and patent law. To tackle some of these issues on the copyright front, President Lula appointed Gilberto Gil, the renowned singer-songwriter, as his minister of culture. +={Lula da Silva, Luiz Inácio;Gil, Gilberto+11} + +Gil became a revered cultural figure when he helped launch a new musical style, /{tropicalismo}/, in the late 1960s, giving Brazil a fresh, international cachet. The music blended national styles of music with pop culture and was inflected with political and moral themes. As one commentator put it, /{tropicalismo}/ was “a very ’60s attempt to capture the chaotic, swirling feel of Brazil’s perennially uneven modernization, its jumble of wealth and poverty, of rural and urban, of local and global. . . . They cut and pasted styles with an abandon that, amid today’s sample-happy music scene, sounds up-to-theminute.”~{ Wikipedia entry, “Tropicalismo,” at http://en.wikipedia.org/wiki/Tropical ismo. }~ The military dictatorship then running the government considered /{tropicalismo}/ sufficiently threatening that it imprisoned Gil for several months before forcing him into exile, in London. Gil continued writing and recording music, however, and eventually returned to Brazil.~{ For a history of Gil, see his personal Web site at http://www.gilbertogil .com.br/index.php?language=en; the Wikipedia entry on him at http: //en.wikipedia.org/wiki/Gilberto_Gil; and Larry Rohter, “Gilberto Gil Hears the Future, Some Rights Reserved,” /{New York Times}/, March 11, 2007. }~ + +This history matters, because when Gil was appointed culture minister, he brought with him a rare political sophistication and public veneration. His moral stature and joyous humanity allowed him to transcend politics as conventionally practiced. “Gil wears shoulder-length dreadlocks and is apt to show up at his ministerial offices dressed in the simple white linens that identify him as a follower of the Afro-Brazilian religion /{candomblé}/,” wrote American journalist Julian Dibbell in 2004. “Slouching in and out of the elegant Barcelona chairs that furnish his office, taking the occasional sip from a cup of pinkish herbal tea, he looks — and talks — less like an elder statesman than the posthippie, multiculturalist, Taoist intellectual he is.”~{ Julian Dibbell, “We Pledge Allegiance to the Penguin,” /{Wired}/, November 2004, at http://www.wired.com/wired/archive/12.11/linux_pr.html. }~ +={Dibbell, Julian+1} + +As luck had it, Dibbell — author of the article on cyber-rape that had enticed Lessig to investigate digital culture in the first place (see chapter 3) — was living in Rio at the time. He was friendly with Hermano Vianna, a prominent intellectual who knew Gil and was deeply into the music scene and digital technology. Between Dibbell and Vianna, a flurry of introductions was made, and within months Larry Lessig, John Perry Barlow, and Harvard law professor William Fisher were sitting with Gil, Vianna, and Dibbell in Gil’s Rio de Janeiro penthouse across from the beach.~{ Ibid. }~ Lessig’s mission was to pitch the Creative Commons licenses to Gil, and in particular, get Gil’s thoughts about a new CC Sampling license that would let musicians authorize sampling of their songs. +={Fisher, William (Terry);Vianna, Hermano+1;Lessig, Lawrence:CC International, and+3;Barlow, John Perry:CC International, and} + +“Gil knew that sampling was a central driving power for contemporary creativity well before digital instruments came along,” recalled Vianna. "/{Tropicalismo}/ was all about sampling different ideas and different cultures. /{Tropicalismo}/ was about juxtapositions, not fusions, and in this sense was heir to a long tradition of Brazilian modern thought and art that began with the cultural anthropology of the early modernists, in the 1920s and 1930s, and can be traced back to all debates about Brazilian identity in the 20th century."~{ E-mail from Hermano Vianna, January 8, 2007. }~ + +Lessig did not need to argue his case. Gil immediately understood what Creative Commons was trying to accomplish culturally and politically. He was enthusiastic about CC licenses, the proposed Sampling license, and the prospect of using his ministry to advance a vision of free culture. + +By further coincidence, Ronaldo Lemos da Silva, then a Brazilian law student who has been described as a “Lessig of the Southern Hemisphere,” had just completed his studies at Harvard Law School. He was well acquainted with Creative Commons and was considering his future when friends at the Fundação Getulio Vargas (FGV), a Rio de Janeiro university, urged him to join them in founding a new law school. The school would host a new Center for Technology and Society to study law and technology from the perspective of developing nations like Brazil. Lemos accepted, and the center soon became the host for CC Brazil and myriad free culture projects. +={Lemos da Silva, Ronaldo} + +This alignment of intellectual firepower, artistic authority, and political clout was extraordinary — and a major coup for Creative Commons. The culture minister of the world’s fifth-largest country and tenth-largest economy — whose own forty-year career was based on a remix sensibility — became a spirited champion of the CC licenses and free culture. Unlike most culture ministers, who treat culture chiefly as an aesthetic amenity, Gil took the economic and technological bases of creativity seriously. He wanted to show how creativity can be a tool for political and cultural emancipation, and how government can foster that goal. It turned out that Brazil, with its mix of African, Portuguese, and indigenous cultures and its colorful mix of vernacular traditions, was a perfect laboratory for such experimentation. + +One of the first collaborations between Creative Commons and the Brazilian government involved the release of a special CC-GPL license in December 2003.~{ Creative Commons press release, “Brazilian Government First to Adopt New ‘CC-GPL,’ ” December 2, 2003. }~ This license adapted the General Public License for software by translating it into Portuguese and putting it into the CC’s customary “three layers” — a plain-language version, a lawyers’ version compatible with the national copyright law, and a machine-readable metadata expression of the license. The CC-GPL license, released in conjunction with the Free Software Foundation, was an important international event because it gave the imprimatur of a major world government to free software and the social ethic of sharing and reuse. Brazil has since become a champion of GNU/Linux and free software in government agencies and the judiciary. It regards free software and open standards as part of a larger fight for a “development agenda” at the World Intellectual Property Organization and the World Trade Organization. In a related vein, Brazil has famously challenged patent and trade policies that made HIV/AIDS drugs prohibitively expensive for thousands of sick Brazilians. +={free software:international licensing, and+1;GNU/Linux:Brazil, in;World Trade Organization;World Intellectual Property Organization;open networks:international} + +When the full set of CC Brazil licenses was finally launched— at the Fifth International Free Software Forum, in Port Alegre on June 4, 2004 — it was a major national event. Brazilian celebrities, government officials, and an enthusiastic crowd of nearly two thousand people showed up. Gil, flying in from a cabinet meeting in Brasília, arrived late. When he walked into the auditorium, the panel discussion under way immediately stopped, and there was a spontaneous standing ovation.~{ A ten-minute video of the CC Brazil opening can be seen at http:// support.creativecommons.org/videos#brasil. }~ “It was like a boxer entering the arena for a heavyweight match,” recalled Glenn Otis Brown. “He had security guards on both sides of him as he walked up the middle aisle. There were flashbulbs, and admirers trailing him, and this wave of people in the audience cresting as he walked by.”~{ Interview with Glenn Otis Brown, August 10, 2006. }~ +={Brown, Glenn Otis, CC International, and+1} + +Gil originally planned to release three of his songs under the new CC Sampling license — dubbed the “Recombo” license — but his record label, Warner Bros., balked. He eventually released one song, “Oslodum,” that he had recorded for an indie label. “One way to think about it,” said Brown, “is that now, anybody in the world can jam with Gilberto Gil.”~{ Film about CC Brazil launch, at http://support.creativecommons.org/ videos#brasil. }~ + +As culture minister, Gil released all materials from his agency under a CC license, and persuaded the Ministry of Education as well as Radiobrás, the government media agency, to do the same. He also initiated the Cultural Points (Pontos de Cultura) program, which has given small grants to scores of community centers in poor neighborhoods so that residents can learn how to produce their own music and video works. Since industry concentration and payola make it virtually impossible for newcomers to get radio play and commercially distribute their CDs, according to many observers, the project has been valuable in allowing a fresh wave of grassroots music to “go public” and reach new audiences. + +For developing countries, the real challenge is finding ways to tap the latent creativity of the “informal” economy operating on the periphery of formal market systems. Brazil is rich with such creative communities, as exemplified by the flourishing /{tecnobrega}/ music scene in the northeast and north regions of Brazil. Ronaldo Lemos says that /{tecnobrega}/ — “a romantic sound with a techno-beat and electronica sound”~{ Interview with Ronaldo Lemos da Silva, September 15, 2006. }~ —arose on the fringes of the mainstream music marketplace through “sound system parties” attended by thousands of people every weekend. Local artists produce and sell about four hundred new CDs every year, but both the production and distribution take place outside the traditional music industry. The CDs can’t be found in retail stores but are sold entirely by street vendors for only $1.50. The CDs serve as advertising for the weekend parties. The music is “born free” in the sense that the /{tecnobrega}/ scene doesn’t consider copyrights as part of its business model and does not enforce copyrights on their CDs; it invites and authorizes people to share and reuse the content.~{ The /{tecnobrega}/ scene is described by Ronaldo Lemos in “From Legal Commons to Social Commons: Developing Countries and the Cultural Industry in the 21st Century,” http://icommons.org/banco/from-legal-commons-tosocial-commons-brazil-and-the-cultural-industry-1. }~ (The /{tecnobrega}/ business model is discussed at greater length in chapter 10.) +={Lemos da Silva, Ronaldo+6} + +Lemos believes the CC licenses are an important tool for helping grassroots creativity in Brazil to “go legitimate.” He explains, “Creative Commons provides a simple, non-bureaucratic structure for intellectual property that might help to integrate the massive marginal culture that is arising in the peripheries, with the ‘official,’ ‘formal’ structures of the Brazilian economy.”~{ Ibid. }~ Freed of the blockbuster imperatives of the current music market, the CC licenses allow creativity in the informal “social commons” to flow — yet not be appropriated by commercial vendors. People can experiment, generate new works, and learn what resonates with music fans. All of this is a predicate for building new types of open markets, says Lemos. /{Tecnobrega}/ is just one of many open-business models that use the free circulation of music to make money. + +Since its launch in June 2004, Lemos and the CC Brazil office have instigated a number of projects to demonstrate how sharing and collaboration can spur economic and cultural development. They have promoted free software and open business models for music and film and started collaborations with allies in other developing nations. Nigerian filmmakers inspired the People’s Cinema in Brazil, a project to help people use audio-video technology to produce their own films and develop audiences for them. The /{culture-livre}/ (free culture) project, a joint effort of Creative Commons in Brazil and South Africa, is using the ccMixter software to encourage young musicians to mix traditional African instruments with contemporary sensibilities, and launch their careers.~{ http://www.ccmixter.co.za. }~ + +In Brazil, there are open-publishing projects for scientific journals;~{ http://www.scielo.br. }~ a Web site that brings together a repository of short films;~{ http://www.portacurtas.comb.br. }~ and Overmundo,a popular site for cultural commentary by Internet users.~{ http://www.overmundo.com.br }~ TramaVirtual, an open-platform record label that lets musicians upload their music and fans download it for free, now features more than thirty-five thousand artists.~{ http://tramavirtual.uol.com.br. }~ (By contrast, the largest commercial label in Brazil, Sony-BMG, released only twelve CDs of Brazilian music in 2006, according to Lemos.) + +“Cultural production is becoming increasingly disconnected from traditional media forms,” said Lemos, because mass media institutions “are failing to provide the adequate incentives for culture to be produced and circulated. . . . Cultural production is migrating to civil society and/or the peripheries, which more or less already operate in a ‘social commons’ environment, and do not depend on intellectual property within their business models.”~{ Ronaldo Lemos, “From Legal Commons to Social Commons: Developing Countries and the Cultural Industry in the 21st Century,” http://icommons .org/banco/from-legal-commons-to-social-commons-brazil-and-the-culturalindustry-1. }~ + +As more people have adopted legal modes of copying and sharing under CC licenses, it is changing the social and political climate for copyright reform. Now that CC Brazil can cite all sorts of successful free culture ventures, it can more persuasively advocate for a Brazilian version of the fair use doctrine and press for greater photocopying privileges in educational settings (which are legally quite restrictive). +={free culture:international+2} + +Although the CC licenses are now familiar to many Brazilians, they have encountered some resistance, mostly from lawyers. “Among all other audiences — musicians, artists, writers — they were extremely well received,” said Lemos. When he presented the CC licenses to an audience of three hundred lawyers, however, he recalls that a famous law professor publicly scoffed: “You’re saying this because you’re young, foolish, and communist.” Three years later, Lemos discovered that the professor was using his intellectual property textbook in her class. + +As a unique global ambassador of creative sharing, Gilberto Gil did a lot to take the CC licenses to other nations and international forums such as the World Intellectual Property Organization. The day before his 2004 benefit concert for the Creative Commons in New York City with David Byrne, Gil delivered a powerful speech explaining the political implications of free culture: +={Byrne, David;Gil, Gilberto+3;World Intellectual Property Organization} + +_1 A global movement has risen up in affirmation of digital culture. This movement bears the banners of free software and digital inclusion, as well as the banner of the endless expansion of the circulation of information and creation, and it is the perfect model for a Latin-American developmental cultural policy (other developments are possible) of the most anti-xenophobic, anti-authoritarian, anti-bureaucratizing, anti-centralizing, and for the very reason, profoundly democratic and transformative sort.~{ Gil remarks at New York University, September 19, 2004, at http://www .nyu.edu/fas/NewsEvents/Events/Minister_Gil_speech.pdf. }~ + +The Brazilian government was making digital culture “one of its strategic public policies,” Gil said, because “the most important political battle that is being fought today in the technological, economic, social and cultural fields has to do with free software and with the method digital freedom has put in place for the production of shared knowledge. This battle may even signify a change in subjectivity, with critical consequences for the very concept of civilization we shall be using in the near future.”~{ Ibid. }~ + +To advance this new paradigm, Gil, who left his post as culture minister in 2008, called for the rise of “new creative /{mestizo}/ [hybrid] industries” that break with the entrenched habits of the past. Such businesses “have to be flexible and dynamic; they have to be negotiated and re-negotiated, so that they may contemplate the richness, the complexity, the dynamism and the speed of reality itself and of society itself, without becoming impositions.”~{ Ibid. }~ + +2~ National Variations of a Global Idea + +When it comes to free culture, Brazil is clearly a special case. But citizens in more than seventy nations have stepped forward to build a CC presence in their societies. Each has shown its own distinctive interests. + +Tomislav Medak, a philosopher by training and a copyfighter by circumstance, runs the Multimedia Institute in Zagreb, Croatia, a cultural center that consists mostly of a performance space, a lounge, and a café. The organization survives on donations from the likes of George Soros’s Open Society Institute, but it thrives because it is the gathering place for an avant-garde corps of electronic musicmakers, publishers, performers, and hackers. Mainstream Croats would probably describe the community as a bunch of “cyberSerbian-gay-Communists,” said Medak, which he concedes is not inaccurate.~{ Interview with Tomislav Medak, CC Croatia, June 25, 2006. }~ But the institute is not just a coalition of minority interests; it is also broad-spectrum champion of cultural freedom. It sees free software, civil liberties, and artists’ rights as core elements of a democratic society that it would like to build. +={Creative Commons International:Croatia+1;Croatia:CC licenses in+1;Medak, Tomislav+2;Open Society Institute;Soros, George} + +The Multimedia Institute was understandably excited when it learned about Creative Commons and Lessig’s vision of free culture. With help from some lawyer friends, the institute in January 2004 ported the CC licenses to Croatian law, primarily as a way to empower artists and counteract the dominance of corporate media and expansive copyright laws. “We are a country where the IP framework is very young, and most of the policies are protection-driven. Most policies are dictated by official institutions that just translate international documents into local legislation,” Medak said.~{ Ibid. }~ This commercial/copyright regime tends to stifle the interests of emerging artists, amateurs, consumers and local culture. +={Lessig, Lawrence:CC International, and} + +“In the post-socialist period,” said Medak, “our society has been hugely depleted of the public domain, or commons. The privatization process and the colonizing of cultural spaces have been blatant over the last couple of years, especially in Zagreb. So the Creative Commons has fit into a larger effort to try to recapture some of those public needs that were available, at least ideologically, in socialist societies. Now they are for real.”~{ Ibid. }~ Medak has since gone on to become a leader of iCommons and the host of the international iCommons Summit in 2007, which brought several hundred commoners from fifty nations to Dubrovnik. + +In Scotland, government and other public-sector institutions have been huge fans of the CC licenses. In fact, museums, archives, and educational repositories have been the primary advocates of the CC Scotland licenses, says Andrés Guadamuz, a law professor at the Research Centre for Studies in Intellectual Property and Technology Law at the University of Edinburgh. “People who want to try to share information in the public sector are turning to Creative Commons because they realize that here is a license that is already made.”~{ Interview with Andrés Guadamuz of CC Scotland, December 19, 2006. }~ +={Guadamuz, Andrés;Scotland:CC licenses in;Creative Commons International:Scotland} + +The BBC was a pioneer in making its archived television and radio programs available to the public for free. In 2003, inspired by the CC licenses, the BBC drafted its own “Creative Archive” license as a way to open up its vast collection of taxpayer-financed television and radio programs.~{ See http://news.bbc.co.uk/2/hi/help/4527506.stm, and interview with Paula Le Dieu, joint director of the BBC Creative Archive project, May 28, 2004, at http://digital-lifestyles.info/2004/05/28/exclusive-providing-the-fuel-fora-creative-nation-an-interview-with-paula-le-dieu-joint-director-on-the-bbccreative-archive. }~ The license was later adopted by Channel 4, the Open University, the British Film Institute, and the Museum, Libraries and Archives Council. Although the Creative Archive license has similar goals as the CC licenses, it contains several significant differences: it restricts use of video programs to United Kingdom citizens only, and it prohibits use of materials for political or charitable campaigns and for any derogatory purposes. +={BBC} + +The CC licenses have proven useful, also, to the British Museum and National Archives. In 2004, these and other British educational institutions were pondering how they should make their publicly funded digital resources available for reuse. A special government panel, the Common Information Environment, recommended usage of the CC licenses because they were already international in scope. The panel liked that the licenses allow Web links in licensed materials, which could help users avoid the complications of formal registration. The panel also cited the virtues of “human readable deeds” and machine-readable metadata.~{ Intrallect Ltd and AHRC Research Centre for Studies in Intellectual Property and Technology Law, University of Edinburgh, “The Common Information Environment and Creative Commons,” October 10, 2005, at http://www .intrallect.com/index.php/intrallect/content/download/632/2631/file/CIE _CC_Final_Report.pdf. }~ + +As it happened, a team of Scottish legal scholars led by a private attorney, Jonathan Mitchell, successfully ported the licenses and released them a few months later, in December 2005. The Scottish effort had been initiated a year earlier when Mitchell and his colleagues objected that the U.K. CC licenses then being drafted were too rooted in English law and not sufficiently attuned to Scottish law. Since the introduction of the CC Scotland licenses, publicsector institutions have enthusiastically embraced them. Museums use the licenses on MP3 files that contain audio tours, for example, as well as on Web pages, exhibition materials, and photographs of artworks. Interestingly, in England and Wales, individual artists and creative communities seem to be more active than public-sector institutions in using the licenses. +={Scotland:CC licenses in;Creative Commons International:Scotland;Mitchell, Jonathan} + +The use of CC licenses for government information and publicly funded materials is inspiring similar efforts in other countries. Governments are coming to realize that they are one of the primary stewards of intellectual property, and that the wide dissemination of their work — statistics, research, reports, legislation, judicial decisions — can stimulate economic innovation, scientific progress, education, and cultural development. Unfortunately, as Anne Fitzgerald, Brian Fitzgerald, and Jessica Coates of Australia have pointed out, “putting all such material into the public domain runs the risk that material which is essentially a public and national asset will be appropriated by the private sector, without any benefit to either the government or the taxpayers.”~{ iCommons annual report, 2007, http://www.icommons.org/annual07. }~ For example, the private sector may incorporate the public-domain material into a value-added proprietary model and find other means to take the information private. The classic instance of this is West Publishing’s dominance in the republishing of U.S. federal court decisions. Open-content licenses offer a solution by ensuring that taxpayerfinanced works will be available to and benefit the general public. +={Coates, Jessica;Fitzgerald, Anne;Fitzgerald, Brian;West Publishing} + +In the United States, the National Institutes of Health has pursued a version of this policy by requiring that federally funded research be placed in an open-access archive or journal within twelve months of its commercial publication. The European Commission announced in 2007 that it plans to build a major open-access digital repository for publicly funded research.~{ Michael Geist, “Push for Open Access to Research, BBC News, February 28, 2007, at http://news.bbc.co.uk/go/pr/fr/~/2/hi/technology/6404429. }~ In Mexico, the Sistema Internet de la Presidencia, or Presidency Internet System (SIP), decided in 2006 to adopt CC licenses for all content generated by the Mexican presidency on the Internet — chiefly the president’s various Web sites, Internet radio station, and documents.~{ Creative Commons blog, Alex Roberts, March 8, 2006, at http://creative commons.org/text/sip. }~ In Italy, CC Italy is exploring legislation to open up national and local government archives. It also wants new contract terms for those who develop publicly funded information so that it will automatically be available in the future.~{ Interview with Juan Carlos de Martin, CC Italy, July 17, 2007. }~ +={Creative Commons International:Italy|Mexico;Italy:CC licenses in;Mexico:CC licenses in} + +2~ Laboratories of Free Culture +={Creative Commons International:laboratories for free culture, as+7} + +In 2005, about two years after the launch of CC International, twenty-one jurisdictions around the world had adopted the licenses. (A legal jurisdiction is not necessarily the same as a nation because places like Scotland, Puerto Rico, and Catalonia — which have their own CC licenses — are not separate nations.) Under a new director of CC International, copyright attorney Catharina Maracke, who took over the license-porting project in 2006, the pace of license adoption has continued. By August 2008, fortyseven jurisdictions had ported the CC licenses, and a few dozen more had their projects under way. The CC affiliates have now reached a sufficient critical mass that they represent a new sort of international constituency for the sharing economy. The CC network of legal scholars, public institutions, artistic sectors, and Internet users is not just a motivated global community of talent, but a new sort of transnational cultural movement: a digital republic of commoners. +={Maracke, Catharina} + +To be sure, some nations have more institutional backing than others, and some have more enthusiastic and active projects than others. CC Poland reported in 2006 that its biggest challenge was “a complete lack of financial and organizational support, in particular from our partner organization.”~{ iCommons ’06 conference booklet, p. 77. }~ (This was remedied in 2008 when CC Poland entered into a partnership with an interdisciplinary center at the University of Warsaw and with a law firm.) CC affiliates in smaller developing countries with fewer resources — especially in Africa — often have to beg and scrape to pull together resources to supplement the work of volunteers. + +Not surprisingly, the American CC licenses — a version of which was spun off as a generic license, as opposed to jurisdictionspecific licenses — are the most used. In a pioneering study of license usage in January 2007, Giorgos Cheliotis of Singapore Management University and his co-authors conservatively estimated that there were 60 million distinct items of CC content on the Internet — a sum that rose to 90 million by the end of 2007. Over 80 percent of these items use a license that is not jurisdiction-specific; the remaining 20 percent are spread among the thirty-three nations included in the study.~{ Giorgos Cheliotis, Warren Chik, Ankit Guglani, and Girl Kumar Tayi, “Taking Stock of the Creative Commons Experiment: Monitoring the Use of Creative Commons Licenses and Evaluating Its Implications for the Future of Creative Commons and for Copyright Law,” paper presented at 35th Research Conference on Communication, Information and Internet Policy (TPRC), September 28–30, 2007. Paper dated August 15, 2007. }~ The highest volume of license usage per capita can be found in European nations — particularly Spain, Germany, Belgium, France, Italy, and Croatia — which were among the earliest adopters of the licenses. In absolute terms, the heaviest usage can be seen in Spain, Germany, France, South Korea, Italy, and Japan.~{ Cheliotis, “Taking Stock,” pp. 20–22. }~ Overall, however, CC usage outside of the United States is still fairly new, and usage and growth rates vary immensely from country to country. +={Cheliotis, Giorgos+1} + +As a fledgling network, the international CC community is a rudimentary platform for change. Its members are still groping toward a shared understanding of their work and devising new systems of communication and collaboration. But a great deal of cross-border collaboration is occurring. A variety of free culture advocates have constituted themselves as the Asia Commons and met in Bangkok to collaborate on issues of free software, citizen access to government information, and industry antipiracy propaganda. CC Italy has invited leaders of neighboring countries— France, Switzerland, Austria, Croatia, and Slovenia — to share their experiences and work together. A CC Latin America project started /{Scripta}/, a new Spanish-language journal based in Ecuador, to discuss free software and free culture issues affecting the continent. +={Creative Commons International:cross-border collaboration+1} + +CC leaders in Finland, France, and Australia have published books about their licensing projects.~{ The French book is Danièle Bourcier and Mélanie Dulong de Rosnay, eds., /{International Commons at the Digital Age}/ (Paris: Romillat, 2004), at http://fr.creativecommons.org/icommons_book.htm. The Finnish book is Herkko Hietanen et al., /{Community Created Content: Law, Business and Policy}/ (Turre Publishing, 2007), at http://www.turre.com/images/stories/books/webkirja_koko_ optimoitu2.pdf. The Australian book is Brian Fitzgerald, /{Open Content Licensing: Cultivating the Creative Commons}/ (Sydney: Sydney University Press, 2007). }~ CC Brazil and CC South Africa have collaborated on a project about copyright and developing nations. CC Canada is working with partners to develop an online, globally searchable database of Canadian works in the Canadian public domain. CC Salons have been held in Amsterdam, Toronto, Berlin, Beijing, London, Warsaw, Seoul, Taipei, and Johannesburg. + +In the Netherlands, CC project lead Paul Keller engineered a breakthrough that may overcome the persistent objections of European collecting societies to CC-licensed content. Collecting societies in Europe generally insist that any musician that they represent transfer all of their copyrights to the collective. This means that professional musicians cannot distribute their works under a CC license. Artists who are already using CC licenses cannot join the collecting societies in order to receive royalties for commercial uses of their works. In this manner, collecting societies in many European nations have effectively prevented many musicians from using the CC licenses. +={Keller, Paul;collecting societies+1:see also ASCAP} + +In 2007, however, CC Netherlands negotiated a one-year pilot program with two Dutch collecting societies, Buma and Stemra, to let artists use CC NonCommercial licenses for parts of their repertoire.~{ Creative Commons Netherlands press release, “Buma/Stemra and Creative Commons Netherlands Launch a Pilot,” August 23, 2007; e-mail by Paul Keller, CC Netherlands, to CC International listserv, August 23, 2007. }~ As a result, artists will have greater choice in the release of their works and the ability to easily manage their rights via a Web site. Other European CC affiliates hope that this Dutch experiment will break the long stalemate on this issue and persuade their collecting societies to be more flexible. + +2~ The Developing Nations License +={Creative Commons International:Developing Nations license+9} + +One of the boldest experiments in the CC world was the creation of the Developing Nations license, launched in September 2004. A year earlier, Lessig had approached James Love, the director of Knowledge Ecology International (previously the Consumer Project on Technology), to ask him to craft a CC license that might help developing countries. Love proposed that the CC offer a “rider” at the end of its existing licenses so that people using the licenses could exempt developing nations from, say, the NonCommercial or NoDerivatives license restrictions. So, for example, if a textbook author wanted to let developing nations copy her book for either commercial or noncommercial purposes, she could add a rider authorizing this practice. +={Love, James+3;Lessig, Lawrence:CC International, and} + +Love was trying to do for books and journal articles what is already possible for drugs — the legalization of a commercial market for generic equivalents. Love had seen how generic drugs could reach people only because for-profit companies were able to produce and sell the drugs; nonprofit or philanthropic distribution is just not powerful enough. But the market for generic drugs is possible only because of laws that authorize companies to make legal knockoffs of proprietary drugs once the patent terms expire. Love hoped to do the same via a Developing Nations license for copyrighted works: “It would create an opportunity for the publishing equivalent of generic drug manufacturers who make ‘generic’ books. In developing countries, you have whole libraries full of photocopied books. You would not have libraries there if people didn’t engage in these practices.”~{ Interview with James P. Love, June 13, 2006. }~ + +In the end, Creative Commons offered the Developing Nations license as a separate license, not a rider. It had simple terms: “You must attribute the work in the manner specified by the author or licensor (but not in any way that suggests that they endorse you or your use of the work)” — and the license was valid only in non–high income nations, as determined by United Nations’ statistics. Although the release of the license got considerable press coverage, actual usage of the license was extremely small. The most prominent use was totally unexpected — for architectural designs. Architecture for Humanity, a California nonprofit, used the license for its designs of low-cost housing and health centers. The organization wanted to give away its architectural plans to poor countries while not letting its competitors in the U.S. use them for free.~{ Creative Commons blog, Kathryn Frankel, “Commoners: Architecture for Humanity,” June 30, 2006, at http://creativecommons.org/education/architecture. }~ +={United Nations} + +The expected uses of the Developing Nations license never materialized. In 2006, Love said, “The license is there, but people who might be willing to use it are not really aware of it.” He worried that the license “hasn’t really been explained in a way that would be obvious to them,” and ventured that there may be “a need for a re-marketing campaign.” By this time, however, the license had attracted the ire of Richard Stallman for its limitations on “freedom.”~{ See Lessig on Creative Commons blog, December 7, 2005, at http://cre ativecommons.org/weblog/archive/2005/12/page/3. }~ It prohibited copying of a work in certain circumstances (in high-income countries) even for noncommercial purposes, and so authorized only a partial grant of freedom, not a universal one. “Well, the whole point was /{not}/ to be universal,” said Love. “The license is for people that are unwilling to share with high-income countries, but are willing to share with developing countries. So it actually expands the commons, but only in developing countries.”~{ Interview with James Love, June 13, 2006. }~ +={Lessig, Lawrence:CC International, and+1;Stallman, Richard:freedom, and+2} + +The controversy that grew up around the Developing Nations license illuminates the different approaches to movement building that Lessig and Stallman represent. Lessig’s advocacy for free culture has been an exploratory journey in pragmatic idealism; Stallman’s advocacy for free software has been more of a crusade of true believers in a core philosophy. For Stallman, the principles of “freedom” are unitary and clear, and so the path forward is fairly self-evident and unassailable. For Lessig, the principles of freedom are more situational and evolving and subject to the consensus of key creative communities. The flexibility has enabled a broad-spectrum movement to emerge, but it does not have the ideological coherence of, say, the free software movement. +={Richard:free software, and;free software:international licensing, and;Lessig, Lawrence:freedom, and} + +Several factors converged to make it attractive for Creative Commons to revoke the Developing Nations license. Some people in the open-access publishing movement disliked the license because it did not comply with its stated standards of openness. In addition, Richard Stallman’s increasingly strident objections to Creative Commons licenses were starting to alarm some segments of the “free world.” What if Internet content became Balkanized through a series of incompatible licenses, and the movement were riven with sectarian strife? Stallman objected not only to the Developing Nations license, but to attempts by Creative Commons to get Wikipedia to make its content, licensed under the GNU Free Documentation license, compatible with the CC licenses. By 2007 this dispute had been simmering for four years (see pages 212–217). +={Wikipedia:CC licenses, and;Stallman, Richard:influence of} + +Finally, many CC staff members regarded the Developing Nations and Sampling licenses as misbegotten experiments. Fewer than 0.01 percent of uses of CC licenses at the time involved the Developing Nations license, and the Sampling license was used by a relatively small community of remix artists and musicians. If eliminating two little-used niche licenses could neutralize objections from the open access and free software movements and achieve a greater philosophical and political solidarity in the “free world,” many CC partisans regarded a rescission of the licenses as a modest sacrifice, if not a net gain. +={remix works+1;music:remixes;Creative Commons (CC) licenses:music, for} + +In June 2007, Creative Commons announced that it was officially retiring the two licenses.~{ Creative Commons “retired licenses page,” at http://creativecommons.org/ retiredlicenses. }~ In a formal statement, Lessig explained, “These licenses do not meet the minimum standards of the Open Access movement. Because this movement is so important to the spread of science and knowledge, we no longer believe it correct to promote a standalone version of this license.”~{ Lawrence Lessig, “Retiring standalone DevNations and One Sampling License,” message to CC International listserv, June 4, 2007. }~ The Creative Commons also revoked the Sampling license because it “only permits the remix of the licensed work, not the freedom to share it.” (Two other sampling licenses that permit noncommercial sharing— SamplingPlus and NonCommercial SamplingPlus — were retained.) +={Lessig, Lawrence:CC International, and} + +Anyone could still use the Sampling or Developing Nations license if they wished; they still exist, after all. It’s just that the Creative Commons no longer supports them. While the actual impact of the license revocations was minor, it did have major symbolic and political significance in the commons world. It signaled that the Creative Commons was capitulating to objections by free software advocates and the concerns of open access publishing activists. + +2~ The iCommons Network +={iCommons+13;Creative Commons International:iCommons network+13} + +As an international network of CC affiliates grew, it naturally spawned new pockets of activism. Lessig explained: “Once a country gets launched, it becomes a cell of activism. Sometimes it is very traditional — Creative Commons Korea is made up of a bunch of federal judges — and sometimes it is very radical — Creative Commons Croatia is made of up a bunch of real activists who want to change copyright. Creative Commons Poland, too, is a bunch of really smart law graduates. But then there is the artist community, on the other side, many of whom want to blow up copyright; they just think it is ridiculous. +={Croatia:CC licenses in;Korea:CC licenses in;Creative Commons International:Croatia|Korea|Poland;Poland:CC licenses in;Lessig, Lawrence:CC International, and+1|iCommons, and+1} + +“So the opportunity and problem we faced at that point,” said Lessig, “was, ‘Well, what are we going to do with these activists?’ Because Creative Commons wanted to facilitate activism, of course, but it wasn’t as if we could bring activism into our core because it would make it more suspect.”~{ Interview with Lawrence Lessig, March 20, 2006. }~ + +The first steps toward organizing this protocommunity of activists came in March 2005, when eighty people from the various international licensing projects convened in Boston to talk about their shared challenges.~{ http://icommons.org/isummit05. }~ It quickly became clear that everyone wanted a forum in which to learn from one another, coordinate their work, and see themselves as something larger . . . perhaps a new sort of movement. +={Creative Commons (CC):social movement, as+1} + +Here again was the tension between “the movement” and “the machine.” As neutral stewards of the licenses, the CC affiliates could not become full-throated advocates of a new international free culture movement. Their mission was preserving the integrity and utility of the licenses for all users, not advocacy. To avoid this problem, the Creative Commons, with an infusion of seed money and CC leaders, in 2006 started a new nonprofit organization, iCommons. +={Creative Commons (CC):“machine”, as} + +iCommons, a registered charity in the United Kingdom, is led by Heather Ford, a South African who met Lessig at Stanford and went back to her country to evangelize the Creative Commons licenses. Working out of Johannesburg, Ford is the activist counterpart to her Berlin licensing colleagues. She is a gregarious, spirited organizer who keeps tabs on activist gambits in dozens of nations and pulls together annual iCommons “summits.” +={Ford, Heather} + +The iCommons conferences are something of a staging area for a new type of global citizenship in the digital “free world.” The first conference, in Rio de Janeiro in June 2006, attracted more than three hundred commoners from fifty nations.~{ http://icommons.org/isummit06. }~ The second one, in Dubrovnik, Croatia, drew a somewhat larger and still more diverse crowd, and a third was held in Sapporo, Japan, in 2008. The free and open-source software community and the Creative Commons network are two of the largest, most influential blocs participating in iCommons, although Wikipedians represent a growing sector. But there are many other factions. There are musicians from the indie music, netlabels, and the remix scene. Filmmakers trying to reform fair use legal norms and video artists who are into mashups. Bloggers and citizen-journalists and social-networking fans. Gamers and participants in immersive environments like Second Life and World of Warcraft. Open business entrepreneurs who regard free software and CC licenses as key elements of their competitive, profit-making strategies. +={Wikipedia:iCommons, and} + +From Japan, there were anime artists who are into remixes. From South Africa, print-on-demand research publishers. A bare-chested Brazilian guitarist traded thoughts about copyright law with a Zagreb performer. An Amsterdam hacker with a punk t-shirt shared a smoke with an American academic. From India, there was Lawrence Liang, founder of the Alternative Law Forum, a leading intellectual about copyright law and economic and social inequality. From Syria, there was Anas Tawileh, who is working to produce the Arab Commons, a directory of Arabic works released under any of the CC licenses. He hopes it will counteract “the weak representation of the Arabic language on the Internet, the shallow nature of Arabic content currently available and the consumption rather than the production of knowledge.” From the United States, there was Michael Smolens, an entrepreneur who started dotSUB, a captioning system to make any film available in any language. +={Liang, Lawrence;Tawileh, Anas;Smolens, Michael;Smolens, Michael} + +The convergence of so many players in the nascent sharing economy, assembled in the flesh, was a bracing glimpse into a new kind of cosmopolitan, democratic sensibility. The program organizers stated their aspirations this way: “How do we help one another to build a commons that nurtures local communities while respecting the needs of others? How can we move towards the growth of a ‘Global Commons Community’?”~{ iCommons Summit ’06 program. }~ + +Although most international commoners seem to be culturally progressive and politically engaged, they cannot be situated along a left-right ideological spectrum. This is because commoners tend to be more pragmatic and improvisational than ideological. They are focused on building specific projects to facilitate sharing and creativity, based on open-source principles. Their enthusiasm is for cool software, effective legal interventions, and activist innovations, not sectarian debate. + +It is not as if politics has been banished. For example, some critics have questioned the “elite” origins and governance structure of iCommons, which was hatched by CC board members and leaders. David Berry, a free culture advocate who teaches at the University of Sussex, complained on a listserv that iCommons was “creating a corporate machine rather than a democratic one.”~{ David Berry, “The iCommons Lab Report,” sent to UK FreeCulture listserv, November 9, 2006. }~ He cited ambiguity in the powers of the organization, the murky process by which the iCommons code of conduct was adopted, and the board’s selection of community council members. Still other critics have grumbled at the Creative Commons’s collaboration with Microsoft in developing a licensing feature within the Word application. +={Berry, David;Microsoft:CC licenses, and} + +When pressed at the 2006 iCommons Summit to develop more formal organizational structure, Lessig begged off for the time being, saying that “trust and faith in each other” was a better approach than rigid rules and system. “We need a recognition that we have a common purpose. Don’t tell me that I need to tell you what that is, because we’ll never agree, but we do have a common purpose.”~{ Becky Hogge, “What Moves a Movement,” OpenDemocracy.org, June 27, 2006, at www.opendemocracy.net/media-commons/movement_3686.jsp. }~ This provoked Tom Chance, a free software and free culture advocate, to complain that “Lessig’s call to base the organization on ‘trust and faith in each other’ is too idealistic and undemocratic.” +={Chance, Tom;Lessig, Lawrence:iCommons, and} + +The encounter nicely captures the quandaries of leadership and governance in the networked environment. How can the effectiveness and clarity of leadership be combined with networked participation and the legitimacy that it provides? How should an organization draw philosophical boundaries to define itself while remaining open to new ideas? How should participation in online collectives be structured to generate collective wisdom and legitimacy and avoid collective stupidity and bureaucratic paralysis? In this case, iCommons diversified its governance in late 2007. It invited the Free Software Foundation Europe, Computer Professionals for Social Responsibility, and Instituto Overmundo, a Brazilian nonprofit dedicated to cultural diversity, to join Creative Commons as full-fledged partners in managing the organization. Despite its broadened leadership, iCommons remains more of a convener of annual forums and discussion host than the democratically sanctioned voice of an international movement. + +This is not surprising. The international commons community is still a fledgling enterprise trying to forge an identity and agenda. The resources for many CC affiliates are quite modest and the bonds of cooperation remain rudimentary. That said, the international explosion of free culture projects, above and beyond the CC licenses themselves, is nothing short of remarkable. It represents a “vast, transnational mobilization in favor of digital freedom,” as Gilberto Gil put it. In the early stages of the viral spiral, no one could have imagined that a corps of passionate, self-selected volunteers cooperating through the Internet could accomplish so much. And it continues, unabated. +={Gil, Gilberto;free culture:international} + +1~ 9 THE MANY FACES OF THE COMMONS + +/{As the “free world” grows and diversifies, so does debate over how to build the commons.}/ + +As the Creative Commons insinuated itself into one creative sector after another, and throughout dozens of nations, the variety of licenses proliferated. By one count in 2006, there were once eighteen distinct CC licenses, not counting version changes.~[* The eighteen licenses once offered include the core six licenses; a nonattribution version of five of those six licenses (now retired); three sampling licenses (one of which has been retired); the Developing Nations license (now retired); and a public domain dedication (which is otherwise not possible under copyright statutes). There was also a “Music Sharing license,” which was just another name for the Attribution-NonCommercial-No Derivatives license, and a “Founders’ Copyright,” which is not a license but a contract between an author and Creative Commons to place a particular work in the public domain after fourteen years (or twenty-eight years, if the author opts for a fourteen-year extension)]~ In the meantime, other parties were offering their own licenses. While the Creative Commons licenses had become the most-used licenses on the Internet, many people were choosing to use Free Software Foundation licenses for text (the GNU Free Documentation License, or FDL), the European Art Libre license, and special licenses that various institutions have devised for the arts, music, and educational works. +={Creative Commons (CC) licenses:types of+6} + +In theory, a proliferation of licenses is not a bad thing. By the lights of free-market economics and complexity theory, in fact, the best way to identify the most useful licenses is to introduce a variety of them and then let them compete for supremacy. Let natural selection in an ecosystem of licenses cull the losers and elevate the most useful ones. + +Unfortunately, this libertarian vision of diverse licenses competing for supremacy in the cultural ecosystem can run up against a harsh reality of the Internet. Too many disparate licenses may make it /{harder}/ for people to share content in an easy, interoperable way. It is not the proliferation of licenses per se that is problematic, it is the absence of a mechanism to enable differently licensed works to “play together” so that they can commingle and be used to produce new things. If bodies of works released under a CC license cannot be combined with works licensed under other licenses, it defeats one of the key value propositions of the Internet, easy interoperability and facile sharing and reuse. Despite its best intentions, license proliferation has the effect of “fencing off the commons,” because the different license terms keep different bodies of work in separate ghettos. + +Incompatibility is a problem both within the suite of CC licenses and between CC licenses and other licenses. Within the CC suite of licenses, for example, a work licensed under the AttributionNonCommercial-ShareAlike license (BY-NC-SA) cannot legally be combined with a work licensed under the Attribution-No Derivatives license (BY-ND) or an Attribution-NonCommercial (BY-NC). The former license requires that any derivative works be licensed under the same license, period. +={Creative Commons (CC) licenses:incompatibility of+3} + +Some observers are not disturbed by the internal incompatibilities of the CC suite of licenses. They regard the different licenses as tools for various communities to build their own “subeconomies” of content, based on their own distinct needs and priorities. A scientist may not want his research articles altered or combined with other material. A musician may want to promote noncommercial usage on the Internet but retain commercial rights so that he can benefit from any CD sales. Not all creative sectors want to distribute their work in the same ways. + +The incompatibility between CC-licensed work and other freecontent licenses is arguably more problematic. At a conference in Spain in the summer of 2005, Lessig recalls having a “Homer Simpson moment” — /{D’oh!}/ — when he realized where license proliferation was heading. The incompatibility of licenses, and therefore bodies of content, could lead to an irretrievably fragmented universe of content. Lessig saw license proliferation as analogous to the Balkanization of technical standards that once plagued mainframe computing. IBM computers couldn’t communicate with DEC, which couldn’t communicate with Data General.~{ Ibid. }~ “The legal framework of the licensing world is basically a pre-Internet framework,” said Lessig in 2007. “We don’t have interoperability at the layer of legal infrastructure.”~{ Interview with Lawrence Lessig, October 23, 2007. }~ +={Lessig, Lawrence:CC licenses, and+1} + +_1 In my view [said Lessig], there’s a critical need for the free culture movement to achieve interoperability. And until it achieves interoperability, there’s a huge problem — because we’re creating these kinds of autistic islands of freedom. Basically, the stuff produced in the Wikimedia world is free, but can only be used in the Wikimedia world; the stuff created in the Creative Commons world is free, but can only be used in the Creative Commons world — and never the two will meet. That’s very destructive, because what we want is a kind of invisible platform of freedom that everybody can then build on. It’s been my objective from the very beginning to find the way to assure that we would get that platform.~{ Ibid. }~ + +A critic might call it “the revenge of choice” — the inevitable outcome of a neoliberal philosophy that privileges individualism and choice, rather than a collective concern for the commons. This is the view of Niva Elkin-Koren, a law professor at the University of Haifa (which coincidentally is the host of CC Israel). Elkin-Koren argues that the Creative Commons is replicating and reinforcing property rights discourse and failing to advance the cause of copyright reform. Because the Creative Commons is plagued by an “ideological fuzziness” that does not adequately set forth a philosophical vision of freedom or the commons, Elkin-Koren believes the CC project threatens to “spread and strengthen the proprietary regime in information.”~{ Niva Elkin-Koren, “Exploring Creative Commons: A Skeptical View of a Worthy Pursuit,” chapter XIV in Lucie Guibault and P. Bernt Hugenholtz, editors, /{The Future of the Public Domain: Identifying the Commons in Information Law}/ (Alphen aan den Rijn, Netherlands: Kluwer Law International BV, 2006). }~ +={Elkin-Koren, Niva;Creative Commons (CC) licenses:critics of+5} + +This critique was at the heart of one of the most serious internecine squabbles in the movement, the struggle to make Wikipedia content — licensed under the Free Software Foundation’s GNU Free Documentation License — compatible with CC-licensed content. The failure to find a solution, after four years of negotiation, threatened to keep two great bodies of Internet content from legally commingling and cause further fragmentation of open content. +={Wikipedia:CC licenses, and;GNU Project:GNU FDL;Free Software Foundation} + +There are other controversies. Anticapitalist leftists periodically take the Creative Commons to task for being too politically respectable. Friendly voices from underdeveloped nations of the Southern Hemisphere have raised alarms that the public domain is just another excuse for corporate exploitation of their resources. Others from the South argue that the informal, social commons inhabited by poor people — the “nonlegal commons” — deserve respect, too. And then there are copyright traditionalists, who believe that a redoubled effect to fortify the fair use doctrine should be a top priority. +={Creative Commons (CC) licenses:public domain, and;public domain:CC licenses, and} + +For the most part, the general public is oblivious to these internecine disputes. Who cares about the relative merits of using a GNU Free Documentation License for Wikipedia entries instead of a Creative Commons license? The layperson may not understand the long-term implications of vesting individual authors with the choice of how to share a work (in the style of the Creative Commons) as opposed to vesting communities of practice with those rights (in the style of the Free Software Foundation’s General Public License). Yet tech sophisticates realize that, in the context of the Internet, uninformed choices today can have serious practical consequences tomorrow. The terms of a license or the design of a software application or digital appliance can prevent people from sharing or reusing works. Bodies of content may become legally incompatible. Consumer freedoms to innovate and distribute may be limited. And then there are second-order questions that have great symbolic importance within the movement, such as, Whose vision of “freedom” in digital spaces shall we endorse? What is philosophically desirable and consistent? +={Wikipedia:CC licenses, and;free culture:sharing ethic of;Internet:future of} + +For a movement that aspires to simplify copyright law, the free culture movement has gotten embroiled in knotty debates that might give lawyers headaches. It is not easy to tell if the disputants are persnickety zealots who have spent too much time in front of their screens or latter-day Jeffersons, Madisons, and Hamiltons— brilliant thinkers who are astute enough to understand the longterm implications of some difficult issues and passionate enough to take a stand. One person’s arcana can be another person’s foundational principle, and one person’s quest for intellectual clarity is another person’s distraction from the messy challenges of building a movement. + +That is the basic problem of the crazy-quilt network that constitutes the free world. There are, in fact, so many divergent, sometimes competing, sometimes congruent agendas that it can be difficult to orchestrate them into a single, harmonious song. For better or worse, the passions that animate culture jammers, copyright reformers, hackers, law scholars, artists, scientists, and countless others in seventy-plus countries are widely divergent. Although the intramovement disagreements may sometimes seem gratuitous, sectarian, and overblown, they are, in fact, understandable. The commoners tend to see their projects as part of a larger, ennobling enterprise— the construction of a new democratic polity and cultural ecology. It makes sense to fret about the technical, legal, and philosophical details when so much is potentially at stake. + +2~ Individual Choice Versus the Commons +={commons:individual choice vs.+8} + +It turns out that overcoming license incompatibilities is not such an easy task. Any attempt to bridge differences immediately runs into mind-bending legal complexities. Crafting new licensing language can trigger philosophical disagreements, some of which may be proxies for turf issues and personal control. One of the major philosophical disagreements involves the one raised by Elkin-Koren: the merits of individual choice versus the commons. Should individuals be allowed to choose how their work may circulate in the wider world, or is such legal partitioning of culture an affront to the value proposition of the commons and its sharing ethic? Why should the choices of individual creators be privileged over the creative needs of the general culture? +={Elkin-Koren, Niva;Creative Commons (CC) licenses:incompatibility of+4} + +The question is a divisive one. The answer that you give, Yochai Benkler of Harvard Law School told me, “depends on whether you think that what you’re doing is building a political movement or whether you’re building a commons that has narrower appeal, but is potentially, more functionally unitary.”~{ Interview with Yochai Benkler, February 7, 2006. }~ A movement is about building a “big tent,” he said — a vision that accommodates many different types of people with different preferences. If you are building a movement, then you will use terminologies that are attractive to a very broad range of liberal and illiberal conceptions of choice, he said. +={Benkler, Yochai:social movements, on;Creative Commons (CC):social movement, as} + +But a commons — of the sort that Richard Stallman’s GPL enables for software code — requires that its members honor a community’s social and moral priorities. A commons does not cater to individual preferences; its first priority is to advance the shared goals and relationships of the community. A commons is not oblivious to the self-interest of individuals. It just fulfills that self-interest in a different way. A commons does not confer benefits through individual negotiations or transactions, but instead through an individual’s good-faith participation in an ongoing, collective process. There is no individual quid pro quo, in other words. A person’s contributions accrue to the collective — and benefits flow from belonging to that collective. This is not an exotic or communistic model; it more or less resembles a scientist’s relationship with his research discipline. In the style of a gift economy, a scientist’s articles and lectures are gifts to the discipline; in return, he enjoys privileged access to his colleagues and their research. +={Stallman, Richard:GPL, and;General Public License (GPL):free software, and+3;Internet:gift economy} + +It is worth noting that a commons does not necessarily preclude making money from the fruit of the commons; it’s just that any commercial activity cannot interfere with the integrity of social relationships within the commons. In the case of GPL’d software, for example, Red Hat is able to sell its own versions of GNU/Linux only because it does not “take private” any code or inhibit sharing within the commons. The source code is always available to everyone. By contrast, scientists who patent knowledge that they glean from their participation in a scientific community may be seen as “stealing” community knowledge for private gain. The quest for individual profit may also induce ethical corner-cutting, which undermines the integrity of research in the commons. +={Red Hat;GNU/Linux:Red Hat, and;code:free access to;commoners:sharing by} + +Ironically, the Creative Commons is not itself a commons, nor do its licenses necessarily produce a commons in the strict sense of the term. The licenses are /{tools}/ for creating commons. But the tools do not require the creation of a commons (unlike the GPL). In this sense, a commons of CC-licensed content may be a “lesser” type of commons because it may have restrictions on what content may be shared, and how. The choices of individual authors, not the preexisting claims of the community, are considered paramount. +={Creative Commons (CC) licenses:tools for creating commons, as+3} + +Is one type of commons superior to the others? Does one offer a superior vision of “freedom”? This philosophical issue has been a recurrent source of tension between the Free Software Foundation, the steward of the GPL, and the Creative Commons, whose licenses cater to individual choice. + +Strictly speaking, a commons essentially offers a binary choice, explained Benkler: “You’re in the commons or you’re out of the commons.” By broadening that binary choice, the CC licenses make the commons a more complicated and ambiguous enterprise. This is precisely what some critics like Stallman have found objectionable about certain CC licenses. They don’t necessarily help forge a community of shared values and commitments. Or as two British critics, David Berry and Giles Moss, have put it, the CC licenses create “commons without commonality.”~{ David Berry and Giles Moss, “On the ‘Creative Commons’: A Critique of the Commons without Commonality,” Free Software Magazine, July 15, 2005, at http://www.freesoftwaremagazine.com/articles/commons_without_com monality. }~ +={Benkler, Yochai:social movements, on;Berry, David;Moss, Giles;Stallman, Richard:criticisms by} + +Inviting authors to choose how their work may circulate can result in different types of “commons economies” that may or may not be interoperable. ShareAlike content is isolated from NoDerivatives content; NonCommercial content cannot be used for commercial purposes without explicit permission; and so on. CC-licensed works may themselves be incompatible with content licensed under other licenses, such as the GNU Free Documentation License. + +2~ Freedom, the Commons, and Movement Building +={free culture+9} + +The slightly confused layperson may ask, Why does all of this matter? The answer may depend on your commitment to the commons as a different (better?) way of creating value. Do you believe in individual freedom and choice, as conceived by contemporary liberal societies? Or do you believe in the /{different type of freedom}/ that comes through participation in a community of shared values? +={commons:individual choice vs.+8} + +Does this state the choice too starkly, as an either/or proposition? Some believe that it does. Perhaps a broader taxonomy of commons is possible. Perhaps a commons can accommodate some measure of individual choice. Or is that an oxymoron? + +These are pivotal questions. The answers point toward different visions of free culture and different strategic ideas about movement building. Is it enough to put forward a demanding, utopian ideal of the commons, and hope that it will attract a corps of true believers willing to toil away in the face of general indifference or hostility? This is essentially what Stallman has done. Or is it better to build a “coalition of the reasonable,” so that a more accessible, practical vision can gain widespread social acceptance and political traction in a relatively short period of time? This is the vision that drives Larry Lessig and his allies. +={Stallman, Richard:criticisms by|freedom, and;Lessig, Lawrence:freedom, and;free culture:differing visions of+3} + +Some critics accuse Creative Commons of betraying the full potential of the commons because its licenses empower individual authors to decide how “shareable” their works can be. The licenses do not place the needs of the general culture or the commons first, as a matter of universal policy, and some licenses restrict how a work may be used. The lamentable result, say critics like Niva Elkin-Koren, is a segmented body of culture that encourages people to think of cultural works as property. People internalize the norms, such as “This is /{my work}/ and /{I’ll}/ decide how it shall be used by others.” +={Elkin-Koren, Niva;commoners:sharing by+1;Creative Commons (CC) licenses:critics of+2} + +This can be seen in the actual choices that CC licensors tend to use. Some 67 percent of CC-licensed works do not allow commercial usage.~{ Based on Yahoo queries, June 13, 2006, at http://wiki.creativecommons.org/ License_Statistics. }~ Arguments go back and forth about whether the NC restriction enhances or shrinks freedom. Many musicians and writers want to promote their works on the Internet while retaining the possibility of commercial gain, however remote; this would seem a strike for freedom. Yet critics note that the NC license is often used indiscriminately, even when commercial sales are a remote possibility. This precludes even modest commercial reuses of a work, such as reposting of content on a blog with advertising.~{ Eric Muller, “The Case for Free Use: Reasons Not to Use a Creative Commons–NC License,” at http://freedomdefined.org/Licenses/NC. }~ + +The larger point of criticism is that the Creative Commons licenses do not “draw a line in the sand” about what types of freedoms are inherent to the commons. In the interest of building a broad movement, Creative Commons does not insist upon a clear standard of freedom or prescribe how a commons should be structured. + +“While ideological diversity may be crucial for the successes of a social movement,” observed Elkin-Koren, “it may impair attempts to make creative works more accessible. The lack of a core perception of freedom in information, may lead to ideological fuzziness. This could interfere with the goal of offering a workable and sustainable alternative to copyright.”~{ Niva Elkin-Koren, “Exploring Creative Commons: A Skeptical View of a Worthy Pursuit,” chapter 14 in Lucie Guibault and P. Bernt Hugenholtz, editors, /{The Future of the Public Domain: Identifying the Commons in Information Law}/ (The Netherlands: Kluwer Law International BV, 2006), p. 326. }~ In an essay that offers “a skeptical view of a worthy pursuit,” Elkin-Koren says that the CC regime encourages narrow calculations of self-interest and the same attitudes toward property and individual transactions as the market economy; it does not promote a coherent vision of “freedom” that fortifies the commons as such. +={Elkin-Koren, Niva+2;Creative Commons (CC):social movement, as} + +“The normative message that we communicate by using Creative Commons licenses is the strategy of choice,” Elkin-Koren told me. “You’re the owner, you’re the author, and therefore, you are entitled to govern your work. . . . No one tells you that maybe it’s wrong; maybe you should allow people to use your work.” By using the CC licenses, she continued, we internalize these norms. “We are teaching ourselves and others that our works are simply commodities, and like every other commodity, everyone has to acquire a license in order to use it.”~{ Interview with Niva Elkin-Koren, January 30, 2007. }~ +={authorship:control, and} + +But champions of the Creative Commons licenses celebrate their approach as a pragmatic and effective way to break free from the stifling “all rights reserved” ethic of copyright law. Historically, of course, not much else has been successful in challenging copyright norms — which is precisely why Lessig and others find the CC strategy attractive. “If I believed that there was a different discourse that had political purchase in someplace other than tiny corners of law faculty commons rooms, I’d be willing to undertake it,” said Lessig. He concedes that his viewpoint may be affected by his living in the United States instead of Israel (where Elkin-Koren lives) but, in the end, he considers the Creative Commons as “just my judgment about what’s going to be effective.”~{ Interview with Lawrence Lessig, October 23, 2007. }~ +={Lessig, Lawrence:CC licenses, and} + +2~ The Splintering of the Free World? +={Creative Commons (CC) licenses:critics of+20} + +At one point, the philosophical disagreements between the Creative Commons and its critics did not matter so much. There was enough shared purpose and common history that everyone could agree to disagree. And since the project was still young, the stakes were not so high. But then it became clear that the CC licenses would be quite popular indeed. When the Creative Commons issued its Developing Nations and Sampling licenses in 2003, it brought Richard Stallman’s simmering dissatisfaction with the organization to a boil, threatening a serious schism. Pointing to the “four freedoms” that define the free software movement, Stallman criticized the new CC licenses as “not free” because they do not allow universal copying of a work. +={Stallman, Richard:criticisms by+2|freedom, and+2;Creative Commons (CC) licenses:popularity of;Creative Commons International:Developing Nations license} + +Stallman objected to the Sampling license because, while it allowed a remix of a licensed work, it did not allow the freedom to share it. The Developing Nations license was objectionable because its freedoms to copy are limited to people in the developing world, and do not extend to everyone. Stallman also disliked the fact that the CC tag that licensors affix to their works did not specify /{which}/ license they were using. With no clear standard of “freedom” and now a mix of licenses that included two “non-free” licenses, Stallman regarded the CC tag as meaningless and the organization itself problematic. + +“I used to support Creative Commons,” said Stallman on his blog in July 2005, “but then it adopted some additional licenses which do not give everyone that minimum freedom, and now I no longer endorse it as an activity. I agree with Mako Hill that they are taking the wrong approach by not insisting on any specific freedoms for the public.”~{ Richard Stallman, “Fireworks in Montreal,” at http://www.fsf.org/blogs/ rms/entry-20050920.html. }~ + +Mako Hill is a brilliant young hacker and Stallman acolyte who wrote a 2005 essay, “Towards a Standard of Freedom: Creative Commons and the Free Software Movement,”~{ Benjamin Mako Hill, “Towards a Standard of Freedom: Creative Commons and the Free Software Movement,” /{Advogato}/, July 29, 2005, at http://www .advogato.org/article/851.html. }~ a piece that shares Elkin-Koren’s complaint about the CC’s “ideological fuzziness.” Then enrolled in a graduate program at the MIT Media Lab, Hill has written a number of essays on the philosophy and social values of free software. (When he was an undergraduate at Hampshire College, I was an outside advisor for his senior thesis and remain friends with him.) +={Elkin-Koren, Niva;Hill, Benjamin Mako+2;free culture:differing visions of+31;free software:social movement, as+31} + +In his “Freedom’s Standard” essay, Hill wrote: “[D]espite CC’s stated desire to learn from and build upon the example of the free software movement, CC sets no defined limits and promises no freedoms, no rights, and no fixed qualities. Free software’s success is built on an ethical position. CC sets no such standard.” While CC prides itself on its more open-minded “some rights reserved” standard, Hill says that a real movement for freedom must make a bolder commitment to the rights of the audience and other creators— namely, that “essential rights are unreservable.”~{ Interview with Benjamin Mako Hill, June 1, 2007. }~ + +By this, Hill means that certain essential freedoms should not be restricted by copyright law or any license. The problem with the CC licenses, argued Hill, is that they cannot commit to any "/{defined}/ spirit of sharing" (emphasis in original). This is not the way to build a transformative, sustainable movement, said Hill.~{ Ibid. See also Hill, “Freedom’s Standard Advanced?” /{Mute}/, November 23, 2005, at http://www.metamute.org/en/node/5597. }~ + +But what, then, about the choice of authors? Doesn’t that freedom count for anything? CC partisans have responded. Joi Ito, the chair of the Creative Commons, wrote in 2007, “CC is about providing choice. FSF is mostly about getting people to make /{their}/ choice. I realize it’s not THAT clear-cut, but I think the point of CC is to provide a platform for choice. . . . I realize that we are headed in the same general free culture direction and many of us debate what choices should be allowed, but I think we are more ‘tolerant’ and support more diverse views than the FSF.”~{ Joichi Ito, message on iCommons listserv, June 1, 2007. }~ +={Ito, Joichi} + +Lessig has argued many times that, just as the free software community decided for itself how its content ought to be distributed, so other artistic sectors — musicians, photographers, filmmakers, etc. — must make such decisions themselves. If they can’t have certain choices, then they will have little interest in joining a movement for free culture, said Lessig at the 23rd Chaos Communication Congress in Berlin. “We don’t have the standing to tell photographers or musicians what ‘freedom’ is.” Why should the Free Software Foundation, or any other group, be able to dictate to an artistic community how their works should circulate? +={Lessig, Lawrence:freedom, and;Free Software Foundation} + +Elkin-Koren is not so sure we can segment the world according to creative sectors and let each determine how works shall circulate. “I don’t think we can separate the different sectors, as if we work in different sectors,” she told me. “We all work in the production of information. My ideas on copyright are really affected by the art that I use and the music that I listen to. . . . Information is essential not only for creating something functional or for selling a work of art, but for our citizenship and for our ability to participate in society. So it’s not as if we can say, ‘Well, this sector can decide for themselves.’”~{ Interview with Niva Elkin-Koren, January 30, 2007. }~ +={Elkin-Koren, Niva} + +As Wikipedia began to take off in popularity, what might have been an unpleasant philosophical rift grew into a more serious fissure with potentially significant consequences. All Wikipedia content is licensed under the Free Software Foundation’s GNU Free Documentation License, or FDL,~{ Wikipedia entry on GNU Free Documentation license, at http://en.wiki pedia.org/wiki/GNU_Free_Documentation_License. }~ largely because the CC licenses did not exist when Wikipedia was launched in 2001. The FDL, originally intended for the documentation manuals that explicate software applications, is essentially the same as the CC ShareAlike license (any derivative works must also be released under the same license granting the freedom to share). But using the FDL can get cumbersome, especially as more video, audio, and photos are incorporated into a text; each artifact would require that the license be posted on it. As more content is shared, the potential for misuse of the content, and lawsuits over violations of licensing agreements, would grow.~{ Michael Fitzgerald, “Copyleft Hits a Snag,” /{Technology Review}/, December 21, 2005. }~ +={Free Documentation License+10;GNU Project+10;Wikipedia:GNU FDL, and+10|CC licenses, and+10} + +Unfortunately, as a legal matter, the FDL is incompatible with the CC licenses. This means that all content on Wikipedia and its sister Wikimedia projects (Wikispecies, Wikiquote, Wikinews, among other projects) cannot legally be combined with works licensed under CC licenses. Angered by the two “non-free” CC licenses, Stallman dug in his heels and defended Wikipedia’s use of the FDL. He also made it clear that he would remain a critic of Creative Commons unless it revoked or changed its licenses to conform with the Free Software Foundation’s standards of “freedom.” +={Free Software Foundation;Stallman, Richard:criticisms by+5|Wikipedia, and+5} + +Thus began a four-year search for a resolution. Lessig recalled, “We started to think about a way that Wikimedia could migrate to a license that we would then deem as compatible to a Creative Commons license. That took two years of negotiation, basically.” One proposed solution was for Wikimedia projects to offer both licenses, the FDL and CC BY-SA, for the same work. However, it was determined that derivative works licensed under one license would still be incompatible with dual-licensed works, resulting in “project bleed” (new works would migrate away the existing corpus of works). Another approach was for a “one-way compatibility” of licenses, so that people creating works under the FDL could use CC-licensed content. +={Lessig, Lawrence:CC licenses, and+4|freedom, and+4} + +But Lessig realized that these solutions dealt only with the issue at hand; the real challenge was finding a more systemic solution. As various players engaged with the FDL/CC controversy, it grew from a licensing squabble into an intertribal confrontation. It became a symbol for everything that Stallman found politically unacceptable about the Creative Commons’s vision of freedom. + +From 2005 to 2007, the issue roiled many factions within the free culture/free software communities. The debate and invective flew back and forth in various venues, and there were proposals, negotiations, and political maneuvers. MIT computer scientist (and CC board member) Hal Abelson rejoined the FSF board. Lessig and other CC staff entered into talks with the FSF general counsel, Eben Moglen. Wikipedia co-founder Jimmy Wales joined the Creative Commons board. Yet Stallman continued to resist, and the Wikimedia board would not approve any proposed solutions. +={Abelson, Hal:CC board, on|Free Software Foundation, and;Moglen, Eben;Wales, Jimmy} + +The stalemate was broken in June 4, 2007, when Lessig made a surprise announcement that the Creative Commons was “retiring” the Developing Nations and Sampling licenses.~{ Lessig post to CC International listserv, June 4, 2007. More about the CC’s retired licenses can be seen at http://creativecommons.org/retiredlicenses. }~ One reason was a lack of interest in the licenses: only 0.01 percent of CC licensors were using each license. But, without alluding to the Free Software Foundation or Stallman, Lessig also noted that the two licenses did not ensure a minimal freedom to share a work noncommercially— a standard met by all other CC licenses. In addition, Lessig pointed out to me, some publishers were beginning to see the Developing Nations license as a subterfuge to avoid meeting open-access publishing standards. +={Free Software Foundation;Creative Commons International:Developing Nations license} + +For Creative Commons, the revocation of the two licenses was at least a shrewd political move; it also affirmed a stricter standard of “freedom” in the ability to use digital materials. In return for sacrificing two little-used licenses, the organization gained Stallman’s eventual support for a deal that would let the FDL be treated as compatible with the CC ShareAlike license. This was a major triumph because it could avoid the contorted, legalistic solutions that had been previously proposed and rejected. It was also a breakthrough because it averted a major rift between two growing bodies of open content and avoided a slow drift into a wider Balkanization of content across the Internet. “I kind of thought that no matter what we did, Richard would find a reason to object,” recalled Lessig, “but he didn’t. He stuck to his principles, so I give credit to him.”~{ Interview with Lawrence Lessig, October 23, 2007. }~ +={open networks:license incompatibility, and} + +The debates about “freedom” produced several specific results. In November 2006, when Creative Commons released an updated legal version of its licenses, version 3.0, it formally recognized other licenses as legally compatible with the ShareAlike license if they have the same purpose, meaning, and effect, and if the other license recognizes the CC license. The move should help avoid future strife over interoperability. + +A few months later, the Creative Commons also adopted a “Free Cultural Works” definition and seal as a way to recognize works that are “free,” as understood by the Free Software Foundation. The definition declares that works with either the CC Attribution or Attribution-ShareAlike licenses should be considered “free” because they give people the freedom to modify works without any discrimination against specific uses or users. The definition and seal /{exclude}/ the CC NonCommercial and NoDerivatives licenses, however, because those licenses do not allow this sort of freedom. The purpose of the seal is not to denigrate use of the NC and ND licenses, but to educate users about the less restrictive licenses and to assert a philosophical solidarity with the free software community. +={Free Software Foundation} + +As part of this larger effort, the Creative Commons also issued a draft statement in April 2008 declaring the special importance of the ShareAlike license in the free culture movement and the organization’s intentions in its stewardship of the license. The statement amounted to a diplomatic peace treaty, to be finalized in the months ahead. +={free culture:sharing ethic of+1} + +By May 2008 the details of the agreement to make Wikipedia’s entries, licensed under the FDL, legally compatible with materials licensed under the CC ShareAlike license had not been consummated. But it was expected that the legal technicalities would be ironed out, and two great bodies of open content would no longer be legally off-limits to each other. + +2~ Criticism from the Left and from the South + +As the Creative Commons has grown in popularity, a longer line has formed to take issue with some of its fundamental strategies. One line of criticism comes from anticapitalist ideologues, another from scholars of the underdeveloped nations of the South. + +British academics Berry and Moss apparently hanker for a more bracing revolution in culture;they object to the commodification of culture in any form and to the role that copyright law plays in this drama. To them, Lessig is distressingly centrist. He is “always very keen to disassociate himself and the Creative Commons from the (diabolical) insinuation that he is (God forbid!) anti-market, anticapitalist, or communist,” Berry and Moss complain.~{ David Berry and Giles Moss, “On the ‘Creative Commons’: A Critique of the Commons Without Commonality,” /{Free Software Magazine}/, July 15, 2005, at http://www.freesoftwaremagagine.com/articles/commons_without_com monality }~ The gist of their objection: Why is Lessig collaborating with media corporations and neoclassical economists when there is a larger, more profound revolution that needs to be fought? A new social ethic and political struggle are needed, they write, “not lawyers exercising their legal vernacular and skills on complicated licenses, court cases and precedents.” +={Berry, David;Moss, Giles;Lessig, Lawrence:CC licenses, and} + +Dense diatribes against the antirevolutionary character of Creative Commons can be heard in various hacker venues and cultural blogs and Web sites. The argument tends to go along the lines sketched here by Anna Nimus of Berlin, Germany: +={Nimus, Anna+1} + +_1 Creative Commons preserves Romanticism’s ideas of originality, creativity and property rights, and similarly considers “free culture” to be a separate sphere existing in splendid isolation from the world of material production. Ever since the 18th century, the ideas of “creativity” and “originality” have been inextricably linked to an anti-commons of knowledge. Creative Commons is no exception. There’s no doubt that Creative Commons can shed light on some of the issues in the continuing struggle against intellectual property. But it is insufficient at best, and, at its worst, it’s just another attempt by the apologists of property to confuse the discourse, poison the well, and crowd out any revolutionary analysis.~{ Anna Nimus, “Copyright, Copyleft and the Creative Anti-Commons,” at http://subsol.c3.hu/subsol_2/contributors0/nimustext.html. }~ + +To ensure that her revolutionary analysis gets out, Nimus released her piece under a self-styled “Anticopyright” notation, with the added phrase, “All rights dispersed.” + +A more penetrating brand of criticism has come from the South, which fears that the West’s newfound enthusiasm for the commons may not necessarily benefit the people of developing nations; indeed, it could simply legitimate new thefts of their shared resources. In an important 2004 law review article, “The Romance of the Public Domain,” law professors Anupam Chander and Madhavi Sunder argue that “public domain advocates seem to accept that because a resource is open to all by force of law, that resource will indeed be exploited by all. In practice, however, differing circumstances — including knowledge, wealth, power and ability — render some better able than others to exploit a commons. We describe this popular scholarly conception of the commons as ‘romantic.’ . . . It is celebratory, even euphoric, about the emancipatory potential of the commons. But it is also naïve, idealistic and removed from reality.”~{ Anupam Chander and Madhavi Sunder, “The Romance of the Public Domain,” California Law Review 92, no. 1131 (2004), p. 1341. }~ +={Chander, Anupam+2;Sunder, Madhavi+2;free culture:international+2;public domain+3:commons, and+3} + +If genes, seeds, indigenous medicines, agricultural innovations, artistic designs, music, and the various ecological and cultural resources of the South are not treated as private property, but instead as elements of the public domain, then anyone can exploit them freely. This can lead to serious injustices, as powerful corporations swoop in to exploit resources that are available to all in the public domain. + +Chander and Sunder write: “By presuming that leaving information and ideas in the public domain enhances ‘semiotic democracy’ — a world in which all people, not just the powerful, have the ability to make cultural meanings — law turns a blind eye to the fact that for centuries the public domain has been a source for exploiting the labor and bodies of the disempowered — namely, people of color, the poor, women and people from the global South.”~{ Ibid., p. 1343. }~ Chander and Sunder argue that the binary logic of copyright law — something is either private property or in the public domain — “masks the ways in which the commons often functions more in the interests of traditional property owners than in the interests of commoners.” +={democracy:semiotic} + +This critique makes clear why the distinction between the public domain and the commons matters. The public domain is an open-access regime available to all; it has no property rights or governance rules. The commons, however, is a legal regime for ensuring that the fruits of collective efforts remain under the control of that collective. The GPL, the CC licenses, databases of traditional knowledge, and sui generis national statutes for protecting biological diversity all represent innovative legal strategies for protecting the commons. The powerful can exploit and overwhelm the public domain, but they are not likely to overwhelm a commons that has a legal regime to protect a collective’s shared resources. +={commons:protection of;General Public License (GPL):commons, and} + +A more radical and profound critique of the commons came in an open letter to “inhabitants of the ‘legal’ Commons” from “Denizens of Non Legal Commons, and those who travel to and from them.” The three-page letter, drafted by Shuddhabrata Sengupta, a filmmaker and writer with the Raqs Media Collective in New Delhi, is a plea for recognizing the informal sharing economy that flourishes beneath the oblivious gaze of mainstream society, and certainly beyond the reach of property rights and law. +={Sengupta, Shuddhabrata} + +“Greetings!” the letter opens. “This missive arrives at your threshold from the proverbial Asiatic street, located in the shadow of an improvised bazaar, where all manner of oriental pirates and other dodgy characters gather to trade in what many amongst you consider to be stolen goods.” To this /{other}/ commons, stolen goods are really “borrowed,” because nothing is really “owned” — and therefore nothing can be “stolen.” This is the realm of “the great circulating public library of the Asiatic street.” The letter continues: + +_1 We appreciate and admire the determination with which you nurture your garden of licenses. The proliferation and variety of flowering contracts and clauses in your hothouses is astounding. But we find the paradox of a space that is called a commons and yet so fenced in, and in so many ways, somewhat intriguing. The number of times we had to ask for permission, and the number of security check posts we had to negotiate to enter even a corner of your commons was impressive. . . . Sometimes we found that when people spoke of “Common Property” it was hard to know where the commons ended and where property began . . . + +_1 Strangely, the capacity to name something as “mine,” even if in order to “share” it, requires a degree of attainments that is not in itself evenly distributed. Not everyone comes into the world with the confidence that anything is “theirs” to share. This means that the “commons,” in your parlance, consists of an arrangement wherein only those who are in the magic circle of confident owners effectively get a share in that which is essentially, still a configuration of different bits of fenced in property. What they do is basically effect a series of swaps, based on a mutual understanding of their exclusive property rights. So I give you something of what I own, in exchange for which, I get something of what you own. The good or item in question never exits the circuit of property, even, paradoxically, when it is shared. Goods that are not owned, or those that have been taken outside the circuit of ownership, effectively cannot be shared, or even circulated.~{ “A Letter to the Commons, from the participants of the ‘Shades of the Commons Workshop,’ ” in /{In the Shade of the Commons:Towards a Culture of Open Networks}/ (Amsterdam, Netherlands: Waag Society, 2006), at http://www3 .fis.utoronto.ca/research/iprp/cracin/publications/pdfs/final/werbin_InThe Shade.pdf. }~ + +The letter invites a deeper consideration of how humans form commons. However ingenious and useful the jerry-rigged legal mechanisms of the GPL and Creative Commons, the disembodied voice of the Non Legal Commons speaks, as if through the sewer grate, to remind us that the commons is about much more than law and civil society. It is part of the human condition. Yet the chaotic Asiatic street is not likely to yield conventional economic development without the rule of law, civil institutions, and some forms of legal property. The question posed by the informal commons remains a necessary one to ponder: What balance of commons and property rights, and in what forms, is best for a society? + +2~ Fair Use and the Creative Commons +={Creative Commons (CC) licenses:fair use, and+15;fair use doctrine:CC licenses, and+15} + +Walk through the blossoming schools of commons thought and it quickly becomes clear that the commons is no monolithic ideal but a many-splendored mosaic of perspectives. To the befuddlement of conventional observers, the perspectives are not necessarily adversarial or mutually exclusive. More often than not, they are fractal— interesting variations of familiar commons themes. In our fascination with newfangled commons, it is easy to overlook a more traditionally minded defender of the commons: the champion of fair use. It is all well and good to promote works that are “born free” under CC licenses, say these friendly critics. But the hard fact of the matter is that for the foreseeable future, creators will still need access to copyrighted content — and this requires a strong fair use doctrine and aggressive public education. + +It is a compelling argument, but in fact only an indirect criticism of Creative Commons. For filmmakers who need to use film clips from existing films and musicians who want to use a riff from another performer, the fair use doctrine is indeed more important than any CC license. Peter Jaszi, the law professor at American University’s Washington School of Law, believes that even with growing bodies of CC-licensed content, “teachers, filmmakers, editors, freelance critics and others need to do things with proprietary content.” As a practical matter, they need a strong, clear set of fair use guidelines. +={Jaszi, Peter+2} + +Jaszi and his colleague Pat Aufderheide, a communications professor who runs the Center for Social Media at American University, have dedicated themselves to clarifying the scope and certainty of fair use. They have launched a major fair use project to get specific creative communities to define their “best practices in fair use.” If filmmakers, for example, can articulate their own artistic needs and professional interests in copying and sharing, then the courts are more likely to take those standards into consideration when they rule what is protected under the fair use doctrine.~{ Center for Social Media, at http://www.centerforsocialmedia.org/fairuse. See also Pat Aufderheide and Peter Jaszi, “Fair Use and Best Practices: Surprising Success,” /{Intellectual Property Today}/, October 2007, at http://www.iptoday .com/articles/2007-10-aufderheide.asp; and Peter Jaszi, “Copyright, Fair Use and Motion Pictures,” /{Utah Law Review}/ 3, no. 715 (2007), and which also appeared in R. Kolker, ed., /{Oxford Handbook of Film and Media Studies}/ (2007), at http://www.centerforsocialmedia.org/files/pdf/fairuse_motionpictures.pdf. }~ A set of respectable standards for a given field can help stabilize and expand the application of fair use. +={Aufderheide, Pat+1} + +Inspired in part by a professional code developed by news broadcasters, some of the nation’s most respected filmmakers prepared the Documentary Filmmakers’ Statement of Best Practices in Fair Use, which was released in November 2005. The guidelines have since been embraced by the film industry, television programmers, and insurance companies (who insure against copyright violations) as a default definition about what constitutes fair use in documentary filmmaking.~{ Aufderheide and Jaszi, /{Intellectual Property Today}/, October 2007, at http:// www.iptoday.com/articles/2007-10-aufderheide.asp. }~ Aufderheide and Jaszi are currently exploring fair use projects for other fields, such as teaching, as a way to make fair use a more reliable legal tool for sharing and reuse of works. + +Lessig has been highly supportive of the fair use project and, indeed, he oversees his own fair use law clinic at Stanford Law School, which litigates cases frequently. “It’s not as if I don’t think fair use is important,” said Lessig, “but I do think that if the movement focuses on fair use, we don’t attract the people we need. . . . From my perspective, long-term success in changing the fundamental perspectives around copyright depends on something like Creative Commons as opposed to legal action, and even quasi-legal action, like the Fair Use Project.” +={Lessig, Lawrence:fair use, on+5} + +For Lessig, fair use is deeply flawed as the basis for building a political movement to reform copyright law. He argues that its advocates are dogged by the (unfair) perception that they are “just a bunch of people who want to get stuff for free, without paying for it. . . . It’s too easy to dismiss that movement.” Lessig recalled the time that the head of a major record label snorted, “Fair use is the last refuge of the scoundrel.” Fair use defenders obviously take issue with this characterization, but the accusation nonetheless pushes fair use champions into a rhetorical corner from which it is difficult to escape. + +A more appealing alternative, Lessig argues, is to use the credibility of copyright ownership to argue the point in a different way. He cited the successful campaign by European software engineers in the 1980s to fight attempts to expand patent protection for software. Their campaign did not resemble “a bunch of peer-to-peer downloaders who are saying, ‘Yeah, I want my music for free,’” said Lessig. “It was a bunch of people who are the /{beneficiaries}/ of patent rights saying, ‘Look, we /{don’t want}/ these rights.’ That creates a kind of credibility.” From a moral and political standpoint, Lessig argued, a movement based on copyright owners declaring that they want to forfeit certain rights in order to /{share}/ and promote creativity, has greater credibility than a campaign seeking to “balance” the public’s rights against private copyright privileges. + +“I imagine a world where there are one hundred million Creative Commons–licensed artists out there, creating works according to Creative Commons views,” he said. Then, when Hollywood pressures Congress for stronger copyright protections, he said, “there would be all these people out there who are creating according to a radically different model. [Hollywood’s] claims about extremism would just not be true for a large number of creators.” Instead of a copyright debate that pits “creators” against “pirates,” Lessig said, “I want to create this world where there is a third category of people who are creators, but who create according to different values, values that emphasize the importance of sharing and building upon the past.”~{ Interview with Lawrence Lessig, October 23, 2007. }~ +={piracy} + +In the larger scheme of things, the tensions between the fair use and free culture advocates are not mutually exclusive. In the end, the two approaches complement each other with different contributions. Both seek to promote sharing and reuse, but the former works within the traditional framework of copyright law; the latter is trying to build a whole new body of culture and discourse. There is a kind of gentleman’s agreement between the fair use and free culture communities to work on different sides of the street, while traveling a parallel path down the same road. +={free culture:fair use vs.+2;fair use doctrine:copyright law, and;copyright law:fair use doctrine, and} + +For Lessig, there is little advantage in shirking the property rights discourse of copyright law, as Elkin-Koren and the “Non Legal Commons” urge. Indeed, he sees a distinct strategic advantage in /{embracing}/ that discourse — and then trying to turn it to different ends. This, in a way, is what Stallman succeeded in doing with the GPL, a license based on copyright law. Yet, while Stallman attracted a somewhat homogeneous community of programmers to his movement, Creative Commons has attracted a sprawling community of eclectic interests, diverse priorities, and no agreed-upon philosophical core. +={Elkin-Koren, Niva;copyright law:property rights, and;property rights:copyright law, and;free software:social movement, as} + +By choosing a middle path that embraces but seeks to transform property discourse, Creative Commons may avoid the marginalization of ardent leftists and the modest agenda of fair use activism. It remains an open question whether the ideological fuzziness at the core of Creative Commons, or the limitations of its licenses, is offset by its success in popularizing a new cultural vision. Yochai Benkler, the great commons theorist, understands the legal criticisms, and agrees with them to an extent. But ultimately, the significance of Creative Commons, he believes, has been “in galvanizing a movement, in symbolizing it and in providing a place to organize around. From my perspective, if I care about Creative Commons, it is as a cultural icon for a movement, more than as a set of licenses. Which is why I am less bothered than some, about the people who are beginning to criticize Creative Commons and how good the licenses really are, and how compatible they are.”~{ Interview with Yochai Benkler, February 7, 2006. }~ +={Benkler, Yochai:social movements, on;free culture:social movement, as+4} + +For Cory Doctorow, the copyfighter and sci-fi writer, the eclectic viewpoints within the free culture movement is a decisive strength: “The difference between a movement and an organization,” he wrote on the iCommons listserv, “is that an organization is a group of people who want the same thing for the same reason. A movement is a collection of groups of people who want the same thing for different reasons. Movements are infinitely more powerful than organizations.” +={Doctorow, Cory:free culture movemement, and+1} + +The reason the environmental movement is so powerful, Doctorow continued, is the very fact that it encompasses “anticapitalists, green investors, spiritualists, scientists, hunters and fishers, parents worried about environmental toxins, labor reformers, proglobalists, anti-globalists, etc. . . . Denuding the ideological landscape of the environmental movement in a purge to eliminate all those save the ones who support environmentalism /{qua}/ environmentalism would be the worst setback environmentalism could suffer. Likewise copyfighters: there are Marxists, anarchists, Ayn Rand objectivists, economists, artists, free marketeers, libertarians, liberal democrats, etc., who see copyright liberalization as serving their agenda. If we insist that copyright reform is about copyright reform and nothing else, there will be no copyright reform movement.”~{ Cory Doctorow, iCommons listserv [thread, “Andrew Orlowski Attacks Lessig], June 1, 2007. }~ +={Rand, Ayn} + +There is a price to be paid for all this diversity, however. Diversity means constant debate. Debate can escalate into strife and sectarianism. And in the free culture movement, where so many people are feverishly improvising and inventing, nearly everything is open for debate. It turns out that this business of inventing the commons is complicated stuff; there are many ways to construct a commons. It is only natural for people to have their own ideas about how to build the digital republic. + +The fundamental question may be whether the existing framework of copyright law and property discourse can be adequately reformed — or whether its very categories of thought are the problem. The late poet and activist Audre Lorde, in the context of feminist struggle, declared that the prevailing discourse must be overthrown, not reformed, because, in her words, “the master’s tools will never dismantle the master’s house.” Within the free software and free culture movements, however, there are those who believe that copyright law can be sufficiently adapted to build a sharing economy, a more competitive marketplace, and a more humane democratic culture. Others are convinced that the legal discourse of property rights, however modified, will simply entrench the very principles that they wish to transcend. As the movement grows and diversifies, debates over what constitutes the most strategic, morally honorable path forward are likely to intensify. +={Lorde, Audre;copyright law:property rights, and;property rights:copyright law, and;free software:social movement, as} + +:B~ PART III + +:C~ A Viral Spiral of New Commons + +1~intro_iii [Intro] -# + +By 2008 the viral spiral had come a long way. Richard Stallman’s fringe movement to build a commons for code became an enormous success, partly inspiring Lawrence Lessig and his compatriots to develop the Creative Commons licenses and a larger vision of free culture. Empowered by these tools, ordinary people began to develop some exciting new models for creativity and sharing. New types of commons arose. Soon there was a popular discourse about the sharing economy, a politics of open networks, and a new international social movement. The movement was so successful at diversifying itself that it was able to engage in serious internecine squabbles. +={Stallman, Richard:influence of;code:free access to;commons:achievement of;free culture;Lessig, Lawrence:CC licenses, and;open networks+1} + +As the commons movement matured, and people came to understand the sensibilities of open networks, the viral spiral seemed to acquire new speed and powers. Over the past few years, it has advanced into all sorts of new arenas. Part III examines three of the most exciting ones — business, science, and education. Each has taken the tools and insights developed by the commons movement — free software, CC licenses, collaborative models — and adapted them to its own special needs. + +These spin-off movements of entrepreneurs, scientists, and educators recognize their debt to the free software and CC licenses, but none feels confined by that history or beholden to its leaders. Each is too intent on adapting the tools to its own circumstances. Just as CC licenses have been used in some ways by musicians, and in other ways by filmmakers, and in still other ways by bloggers, so the commoners in the worlds of business, science, and education are forging their own paths. Development requires differentiation. It is fascinating to watch how the principles of the commons are being crafted to meet the distinctive needs of the marketplace, the academy, the research lab, and the classroom. + +What may be most notable about these developments is the blurring of these very categories. On open platforms, social communities are becoming sites for market activity. Scientists are increasingly collaborating with people outside their disciplines, including amateurs. Formal education is becoming more focused on learning, and learning is moving out of the classroom and into more informal and practice-driven venues. + +If there is a common denominator in each of the domains examined in Part III, it is the use of distributed networks, social community, and digital technologies to enhance the goals at hand. The new open business models seek to bring consumer and seller interests into closer alignment. The new science commons seek to create more powerful types of research collaboration. The open educational resources movement wants knowledge to circulate more freely and students to direct their own learning. + +For the short term, the fledgling models in these fields are likely to be seen as interesting novelties on the periphery of the mainstream. In time, however, given what we know about network dynamics, the new models are likely to supplant or significantly transform many basic parameters of business, science, and education. The participatory practices that open networks enable are showing that knowledge is more about socially dynamic relationships than about fixed bodies of information. These relationships are also spawning new challenges to institutional authority and expertise. If one looks closely enough, the matrix for a very different order of knowledge, institutional life, and personal engagement can be seen. + +1~ 10 THE NEW OPEN BUSINESS MODELS +={open business models+79} + +/{The commons and the market can be great partners if each shows respect for the other and ingenuity in working together.}/ + +Entrepreneur John Buckman concedes that his Internet record label, Magnatune, amounts to “building a business model on top of chaos.”~{ John Buckman presentation at iCommons Summit, Dubrovnik, Croatia, June 15, 2007. }~ That is to say, he makes money by honoring open networks and people’s natural social inclinations. The company rejects the proprietary muscle games used by its mainstream rivals, and instead holds itself to an ethical standard that verges on the sanctimonious: “We are not evil.” In the music industry these days, a straight shooter apparently has to be that blunt. +={Buckman, John+4;Magnatune+8;music:CC licenses for+8;Creative Commons (CC) licenses:music, for+8} + +Magnatune is a four-person enterprise based in Berkeley, California, that since 2003 has been pioneering a new open business model for identifying and distributing high-quality new music. It does not lock up the music with anticopying technology or digital rights management. It does not exploit its artists with coercive, unfair contracts. It does not harass its customers for making unauthorized copies. Internet users can in fact listen to all of Magnatune’s music for free (not just music snippets) via online streaming.~{ John Buckman entry in Wikipedia, at http://en.wikipedia.org/wiki/John_ Buckman. }~ + +Buckman, a former software programmer turned entrepreneur in his thirties, previously founded and ran Lyris Technologies, an e-mail list management company that he sold in 2005. In deciding to start Magnatune, he took note of the obvious realities that the music industry has tried to ignore: radio is boring, CDs cost too much, record labels exploit their artists, file sharing is not going to go away, people love to share music, and listening to music on the Internet is too much work. “I thought, why not make a record label that has a clue?” said Buckman.~{ John Buckman at Magnatune home page, at http://www.magnatune.com/ info/why. }~ + +Well before the band Radiohead released its In /{Rainbows}/ album with a “pay what you want” experiment, Magnatune was inviting its customers to choose the amount they would be willing to pay, from $5 to $18, for any of Magnatune’s 547 albums. Buckman explains that the arrangement signals a respect for customers who, after all, have lots of free music choices. It also gives them a chance to express their appreciation for artists, who receive 50 percent of the sales price. “It turns out that people are quite generous and they pay on average about $8.40, and they really don’t get anything more for paying more other than feeling like they’re doing the right thing,” said Buckman.~{ John Buckman, interview with Matthew Magee of Out-Law.com, radio podcast, September 13, 2007, at http://www.out-law.com/page-8468. }~ About 20 percent pay more than $12.~{ John Buckman at iCommons, June 15, 2007. For an extensive profile of Buckman and Magnatune, see http://www.openrightsgroup.org/creative business/index.php/John_Buckman:_Magnatune. }~ +={Radiohead} + +“The reality is today nobody really needs to pay for music at all,” he acknowledges. “If you choose to hit the ‘buy’ button at Magnatune then you’re one of the people who has decided to actually pay for music. Shouldn’t we reflect that honest behavior back and say, well, if you’re one of the honest people how much do you want to pay?”~{ John Buckman, interview with Matthew Magee, September 13, 2007. }~ The set-your-own-price approach is part of Magnatune’s larger strategy of building the business by cultivating open, interactive relationships with its customers and artists. “If you set up a trusting world,” explains Buckman, “you can be rewarded.” + +Magnatune’s business model embraces the openness of the Internet and makes it a virtue, rather than treating it as a bothersome liability that must be elaborately suppressed. All of Magnatune’s music is released as MP3 files, with no digital rights management, under a CC Attribution-NonCommercial-ShareAlike license. This means that customers can legally make their own remixes and covers of songs, and take samples, so long as the uses are noncommercial and carry the same CC license. Magnatune also invites customers to give free downloads of purchased music to three friends. Podcasters have free access to the entire Magnatune catalog. + +By using a CC license, Magnatune saves a bundle by not having to oversee complex terms and conditions for usage of music. Nor does it have to maintain a DRM system and police the behavior of its customers, both of which squander a key marketing asset: consumer goodwill. Instead, the music circulates freely and, in so doing, expands public awareness of Magnatune’s 244 artists. + +Two-thirds of Magnatune’s revenues comes from licensing its music to films, ads, television, and shops. Like so many open business models, it has carved out a mid-tier niche between “expensive and proprietary” and “cheap and crummy.” Most mainstream music licensing involves either expensive, highly lawyered deals with record labels or insipid stock music from royalty-free CDs. Magnatune’s innovation is to offer high-quality music in multiple genres at flatrate licenses for sixteen different usage scenarios. The deals can be easily consummated via the Web; artists share in half the proceeds. No accounting flimflam. To date, Magnatune has licensed its music to more than one thousand indie films and many commercials. + +Magnatune is a small, fledgling enterprise in the $4 billion music industry. It does not have all the answers, and it may be sideswiped by bigger players at some point. But Magnatune is lean, nimble, profitable, and growing. It has shown how innovative business models can flourish in the open environment of the Internet. Unlike its bloated, besieged competitors, Magnatune is willing to listen closely to its customers, artists, and licensing clients. It is fair-minded and straightforward; it wants to share the wealth and let the music flow. + +2~ Open Networks Spur New Business Models +={open networks:new business models in+16;open business models:open networks and+16} + +Openness does not come intuitively to many businesses. Competitive advantage has long been associated with exclusive control and secrecy. But as the Internet’s power expands, conventional businesses are feeling pressures to rethink their “closed” business models. A new breed of “open businesses” is demonstrating that a reliance on open-source software, open content, and an ethic of transparency in dealings with all corporate stakeholders can be tremendously competitive. +={open business models:transparency in;transparency;Internet:rise of} + +Open businesses understand the Great Value Shift discussed in chapter 5 — that working through open networks and commons is likely to generate greater consumer attention, engagement, and loyalty — and thus sales — and may outperform a more exclusive regime of control. Working on an open network is also the best way for a company to get smarter faster, and to stay alert to changing market conditions. It bears noting that business models are not an either/or choice — that is, all open or all closed. There is a continuum of choices, as we will see below. Sometimes there are heated strategic and moral debates about what level of openness to adopt, yet the general trend in business today is clear: toward openness. +={Great Value Shift;open business models:Great Value Shift, and} + +Even as broadcast networks decry the posting of copyrighted television programs on YouTube, they clearly welcome the ratings spikes that ensue. Wireless telephony is fragmented among many proprietary systems, but pressures are now growing to make them compete on an open platform.~{ See,e.g., Walter S.Mossberg, “Free My Phone,” /{Wall Street Journal}/, October 22, 2007, p. R1. }~ European regulators are calling for “open document format” standards to prevent Microsoft from abusing its proprietary standards in its Office suite of software. There are even calls for open standards for avatars in virtual worlds like Second Life, The Lounge, and Entropia Universe, so that our digital alter egos can glide from one virtual community to another.~{ Steve Lohr, “Free the Avatars,” New York Times, October 15, 2007. }~ +={YouTube;Microsoft:competition against;open business models:interoperability of+1} + +Why this inexorable trend toward openness? Because on open networks, excessive control can be counterproductive. The overall value that can be created through interoperability is usually greater than the value that any single player may reap from maintaining its own “walled network.”~{ See Elliot E. Maxwell, “Open Standards, Open Source, and Open Innovation: Harnessing the Benefits of Openness,” /{Innovations:Technology, Governance, Globalization}/ 1, no. 3 (Summer 2006), at http://www.emaxwell.net. }~ For a company to reap value from interoperability, however, it must be willing to compete on an open platform and it must be willing to share technical standards, infrastructure, or content with others. Once this occurs, proprietary gains come from competing to find more sophisticated ways to add value in the production chain, rather than fighting to monopolize basic resources. Advantage also accrues to the company that develops trusting relationships with a community of customers. +={open business models:value created in+9;value:creation of+9} + +Free software was one of the earliest demonstrations of the power of online commons as a way to create value. In his classic 1997 essay “The Cathedral and the Bazaar,” hacker Eric S. Raymond provided a seminal analysis explaining how open networks make software development more cost-effective and innovative than software developed by a single firm.~{ Eric Raymond, “The Cathedral and the Bazaar,” May 1997, at http:// www.catb.org/~esr/writings/cathedral-bazaar. The essay has been translated into nineteen languages to date. }~ A wide-open “bazaar” such as the global Linux community can construct a more versatile operating system than one designed by a closed “cathedral” such as Microsoft. “With enough eyes, all bugs are shallow,” Raymond famously declared. Yochai Benkler gave a more formal economic reckoning of the value proposition of open networks in his pioneering 2002 essay “Coase’s Penguin, or, Linux and the Nature of the Firm.”~{ Yochai Benkler, “Coase’s Penguin, or, Linux and the Nature of the Firm,” /{Yale Law Journal}/ 112, no. 369 (2002), at http://www.benkler.org/CoasesPen guin.html. }~ The title is a puckish commentary on how GNU/Linux, whose mascot is a penguin, poses an empirical challenge to economist Ronald Coase’s celebrated “transaction cost” theory of the firm. In 1937, Coase stated that the economic rationale for forming a business enterprise is its ability to assert clear property rights and manage employees and production more efficiently than contracting out to the marketplace. +={Benkler, Yochai:open networks, on+3;Raymond, Eric S.:“The Cathedral and the Bazaar”;free software:creation of value, and;Linux:open business models, and;Microsoft:competition against;Coase, Ronald;GNU/Linux:open business models, and;transaction costs:theory of;open business models:“transaction cost” theory, and} + +What is remarkable about peer production on open networks, said Benkler, is that it undercuts the economic rationale for the firm; commons-based peer production can perform certain tasks more efficiently than a corporation. Those tasks must be modular and divisible into small components and capable of being efficiently integrated, Benkler stipulated. The larger point is that value is created on open networks in very different ways than in conventional markets. Asserting proprietary control on network platforms may prevent huge numbers of people from giving your work (free) social visibility, contributing new value to it, or remixing it. “The only thing worse than being sampled on the Internet,” said Siva Vaidhyanathan, with apologies to Oscar Wilde, “is not being sampled on the Internet.” +={Vaidhyanathan, Siva} + +The /{New York Times}/'s experience with its paid subscription service, TimesSelect, offers a great example. The /{Times}/ once charged about fifty dollars a year for online access to its premier columnists and news archives. Despite attracting more than 227,000 subscribers and generating about $10 million a year in revenue, the /{Times}/ discontinued the service in 2007.~{ Richard Pérez-Peña, “Times to Stop Charging for Parts of Its Web Site,” /{New York Times}/, September 18, 2007. }~ A /{Times}/ executive explained that lost subscription revenues would be more than offset by advertising to a much larger online readership with free access. The /{Financial Times}/ and the /{Economist}/ have dropped their paywalls, and the /{Wall Street Journal}/ in effect has done so by allowing free access via search engines and link sites. From some leading citadels of capitalism, a rough consensus had emerged: exclusivity can /{decrease}/ the value of online content.~{ Frank Ahrens, “Web Sites, Tear Down That Wall,” /{Washington Post}/, November 16, 2007, p. D1. See also Farhad Manjoo, “The Wall Street Journal’s Website Is Already (Secretly) Free,” /{Salon}/, March 21, 2008, at http://machinist.salon .com/blog/2008/03/21/wsj/index.html. }~ +={New York Times} + +While enormous value can be created on open networks, it can take different forms, notes David P. Reed, who studies information architectures.~{ David P. Reed, “The Sneaky Exponential — Beyond Metcalfe’s Law to the Power of Community Building,” at http://www.reed.com/Papers/GFN/ reedslaw.html. }~ One of the most powerful types of network value is what Reed calls “Group-Forming Networks,” or GFNs — or what Benkler might call commons-based peer production and I would call, less precisely, the commons. Reed talks about “scale-driven value shifts” that occur as a network grows in size. Greater value is created as a network moves from a broadcast model (where “content is king”) to peer production (where transactions dominate) and finally, to a group-forming network or commons (where jointly constructed value is produced and shared). +={Reed, David P.;Benkler, Yochai:The Wealth of Networks;commons-based peer production+3;group-forming networks (GFNs)} + +It is unclear, as a theoretical matter, how to characterize the size and behavior of various “value networks” on the Web today. For simplicity’s stake — and because Web platforms are evolving so rapidly — I refer to two general value propositions, Web 2.0 and the commons. Web 2.0 is about creating new types of value through participation in distributed open networks; the commons is a subset of Web 2.0 that describes fairly distinct, self-governed communities that focus on their own interests, which usually do not involve moneymaking. +={Web 2.0:open business, and+4} + +The rise of Web 2.0 platforms and the commons clearly has some serious implications for business strategy and organization. Just consider how Craigslist is displacing millions of dollars of classified newspaper ads; how open-access journals are threatening the economic base of commercial academic journals; and how usergenerated content is competing with network television. At the same time, activities that once occurred through informal social means (finding a date, organizing a gathering, obtaining word-ofmouth recommendations) are increasingly becoming commercial endeavors on the Web. Especially when the commons has strong mechanisms to preserve its value-creating capacity, such as the GPL, open networks are helping to convert more market activity into commons-based activity, or at least shifting the boundary between commodity markets and proprietary, high-value-added markets. As this dynamic proceeds, the social and the commercial are blurring more than ever before. + +Many “value chains” that have long sustained conventional businesses are being disrupted. As described in chapter 5, more efficient types of distributed media are disrupting the production/distribution chain that sustains Centralized Media. The Long Tail lets online consumers “pull” niche products that they want rather than enduring a relentless marketing “push” of products they don’t want. Commons-based peer production is a nonmarket version of the Long Tail: dispersed communities of people with niche interests can find one another, form social communities, bypass the market, and collaborate to create the niche resources that they want. +={Long Tail;Centralized Media:production/distribution chain of} + +The question facing many businesses is how to develop stable, long-term business models that can coexist with productive commons, if not leverage them for market gain. Their goal is to find ingenious ways to “monetize” the social relationships of online communities (by selling targeted advertising, personal data, niche products, etc.). Open businesses aim to do this in a respectful, public-spirited way; other, more traditional firms may have fewer scruples because, for them, “it’s all about the money.” + +But here’s the rub: a company can go only so far in monetizing the value-generating capacities of a commons without enclosing it or enraging the commoners. A company may consider itself shrewd for acquiring the copyrights for user-generated content, for example, or for blocking user access to third-party widgets that it disapproves of.~{ See, e.g., Paula Lehman, “MySpace Plays Chicken with Users,” BusinessWeek Online, April 12, 2007. }~ But participants in Web 2.0 communities will protest or simply leave if a corporate host starts to dictate obnoxious policies. A company can try to run its Web 2.0 platform as a feudal fiefdom, but it risks inciting users to revolt and start their own (nonmarket) online communities, reinventing themselves as commoners. Although there is an implicit social ethic to Web 2.0 platforms, none is necessarily “free” in the Stallman sense of “freedom.” +={Stallman, Richard:freedom, and} + +Unfortunately, there is no clear consensus about how exactly to define an “open business.” Accordingly, assessments of their social, political, or economic virtue can be slippery. Some analysts such as Henry Chesbrough regard a business as “open” if it relaxes or modifies its intellectual property controls, or changes its organizational practices, as a way to reap value from open networks.~{ Henry Chesbrough, /{Open Business Models: How to Thrive in the New Innovation Landscape}/ (Cambridge, MA: Harvard Business School Press, 2006). }~ Others believe that an open business should use open-source software, and support the copying and sharing of works through CC or other open-content licenses. Sometimes the idea of open business is yoked to a vaguely defined notion of “social responsibility.” It is not always clear whether this ethic is a moral gloss or a structural feature, but in general open businesses strive to practice a more open, accountable, and socially enlightened vision of commerce. +={Chesbrough, Henry;open business models:definition, no consensus+2|social responsibility, and} + +One champion of this vision is OpenBusiness, a Web site jointly created by Creative Commons UK in partnership with CC Brazil and the FGV Law School in Rio de Janeiro, Brazil. The mission of OpenBusiness is to “analyze and explain models by which people can share their knowledge and creativity with others whilst at the same time enjoying the more traditional incentives of profit, individual success and societal advancement.”~{ http://www.openbusiness.org. }~ By its lights, an open business is commons-friendly if it is committed to “transparency,” “sustainable systems,” and to putting “the health and welfare of people above everything else.” An open business also tries to generate as many “positive externalities” as possible — knowledge, social relationships, revenues — which it is willing to share with its stakeholders. +={OpenBusiness;commoners:sharing by;open business models:international} + +It is perhaps best to approach open businesses as an eclectic social phenomenon in search of a theory. As it has been said about Wikipedia, “It works in practice, but not in theory.”~{ From blog of Professor Karim Lakhani, Harvard Business School, April 27, 2007. }~ It is risky to overtheorize phenomena that are still fluid and emerging. Still, specific examples of open business can help us understand some basic principles of open networks, and how some businesses are using CC licenses to build innovative sorts of enterprises. +={Wikipedia:social movement, as} + +2~ Share the Wealth, Grow a Commercial Ecosystem +={open business models:commercial ecosystem, as+5} + +The idea that a company can make money by giving away something for free seems so counterintuitive, if not ridiculous, that conventional business people tend to dismiss it. Sometimes they protesteth too much, as when Microsoft’s Steve Ballmer compared the GNU GPL to a “cancer” and lambasted open-source software as having “characteristics of communism.”~{ Joe Wilcox and Stephen Shankland, “Why Microsoft is wary of open source,” CNET, June 18, 2001; and Lea, Graham, “MS’ Ballmer: Linux is communism,” /{Register}/ (U.K.), July 31, 2000. }~ In truth, “sharing the wealth” has become a familiar strategy for companies seeking to develop new technology markets. The company that is the first mover in an emerging commercial ecosystem is likely to become the dominant player, which may enable it to extract a disproportionate share of future market rents. Giving away one’s code or content can be a great way to become a dominant first mover. +={Ballmer, Steve;General Public License (GPL):critics of;open business models:first movers+1;Microsoft:competition against+1} + +Netscape was one of the first to demonstrate the power of this model with its release of its famous Navigator browser in 1994. The free distribution to Internet users helped develop the Web as a social and technological ecosystem, while helping fuel sales of Netscape’s Web server software. (This was before Microsoft arrived on the scene with its Internet Explorer, but that’s another story.) At a much larger scale, IBM saw enormous opportunities for building a better product by using GNU/Linux. The system would let IBM leverage other people’s talents at a fraction of the cost and strengthen its service relationships with customers. The company now earns more than $2 billion a year from Linux-related services.~{ Yochai Benkler, /{The Wealth of Networks}/ (Yale University Press, 2006), Figure 2.1 on p. 47. }~ +={GNU/Linux:IBM, and;IBM:GNU/Linux, and;Netscape;World Wide Web:social activity on} + +Today, sharing and openness are key to many business strategies. “Open Source: Now It’s an Ecosystem,” wrote /{BusinessWeek}/ in 2005, describing the “gold rush” of venture capital firms investing in startups with open-source products. Most of them planned to give away their software via the Web and charge for premium versions or for training, maintenance, and support.~{ “Open Source: Now It’s an Ecosystem,” BusinessWeek Online, October 3, 2005. }~ + +The pioneers in using open platforms to develop commercial ecosystems on the Internet are Amazon, Google, Yahoo, and eBay. Each has devised systems that let third-party software developers and businesses extend their platform with new applications and business synergies. Each uses systems that dynamically leverage users’ social behaviors and so stimulate business — for example, customer recommendations about books, search algorithms that identify the most popular Web sites, and reputation systems that enhance consumer confidence in sellers. Even Microsoft, eager to expand the ecology of developers using its products, has released 150 of its source code distributions under three “Shared Source” licenses, two of which meet the Free Software Foundation’s definition of “free.”~{ Microsoft’s Shared Source Licenses, at http://www.microsoft.com/resources/ sharedsource/licensingbasics/sharedsourcelicenses.mspx; see also Lessig blog, “Microsoft Releases Under ShareAlike,” June 24, 2005, at http://lessig .org/blog/2005/06/microsoft_releases_under_share.html. }~ +={Amazon;eBay;Microsoft:“Shared Source” licenses of;Yahoo;Google;World Wide Web:social activity on} + +More recently, Facebook has used its phenomenal reach — more than 80 million active users worldwide — as a platform for growing a diversified ecology of applications. The company allows software developers to create custom software programs that do such things as let users share reviews of favorite books, play Scrabble or poker with others online, or send virtual gifts to friends. Some apps are just for fun; others are the infrastructure for independent businesses that sell products and services or advertise. In September 2007, Facebook had more than two thousand software applications being used by at least one hundred people.~{ Vauhini Vara, “Facebook Gets Help from Its Friends,” Wall Street Journal, June 22, 2007. See also Riva Richmond, “Why So Many Want to Create Facebook Applications,” /{Wall Street Journal}/, September 4, 2007. }~ +={Facebook} + +2~ Open Content as a Gateway to Commercial Opportunities + +Of course, not every business can own a major platform, as Google, eBay, and Facebook do. Still, there are many other opportunities. One of the most popular is to use open platforms to attract an audience, and then strike a deal with an advertiser or commercial distributor, or sell premium services (“get discovered”). Another approach is to use open content to forge a spirited community to which things may be sold (“build a market on a commons”). +={eBay;Facebook+1;Google} + +!{/{Get discovered.}/}! This dynamic has been played out countless times on YouTube, MySpace, Facebook, and other high-traffic social networking sites. An unknown remix artist suddenly becomes famous when his track is discovered by a network swarm: the story of DJ Danger Mouse that we saw in chapter 6. A band attracts a huge following through viral word of mouth: the story of Jake Shapiro and Two Ton Shoe’s stardom in South Korea. There are even calculated scams to get discovered, like the lonelygirl15 series of videos purportedly shot by a teenage girl in her bedroom, which became a huge Internet sensation in 2006.~{ Joshua Davis, “The Secret World of Lonelygirl,” Wired, December 2006, at http://www.wired.com/wired/archive/14.12/lonelygirl.html. }~ +={DJ Danger Mouse;MySpace;YouTube;Shapiro, Jake;Two Ton Shoe;Internet:virtual word of mouth on;open business models:using open platforms to get discovered+15} + +As any television network will tell you, the capacity to aggregate audiences is worth a lot of money. The customary way of monetizing this talent is to sell advertising. Or one can parlay newfound name recognition into side deals with the mass media, which have always depended upon “star power” as a draw. Thus, Ana Marie Cox was able to parley her notoriety as a political gossip on her Wonkette blog into a job as Washington editor of /{Time}/ magazine. Perez Hilton, a Hollywood blogger who attracted a following, was offered a lucrative perch at the E! cable television channel. We saw in chapter 6 how producer Samuli Torssonen’s /{Star Wreck}/ attracted millions of Internet viewers, enabling him to strike a deal with Universal Studios to distribute a DVD version. With the same visions of stardom, or at least paying gigs, in mind, thousands of bands now have fan sites, music downloads, and banner ads on MySpace and other sites to promote themselves.~{ Elizabeth Holmes, “Famous, Online,” /{Wall Street Journal}/, August 8, 2006. }~ +={Cox, Ana Marie;Hilton, Perez;MySpace;Star Wreck Studios;Torssonen, Samuli} + +The CC NonCommercial license is one way to help pursue the “get discovered” business strategy. The license allows authors to seek a global Internet audience without having to cede rights to any commercial opportunities. It is not, however, a terribly reliable way to make money, which is why some artists, especially musicians, find fault with the implicit promise of the NC license. Many serious artists regard the NC license as too speculative a mechanism to get paid for one’s creative work. It is a fair complaint, as far as it goes. The real problem is the closed, highly concentrated music industry, which has a hammerlock on marketing, radio play, and distribution. Newcomers and mid-tier talent cannot get past the corporate gatekeepers to reach an audience, let alone make money. + +In an attempt to bridge the sharing economy with the market, and thereby open up some new channels of commercial distribution for commoners, the Creative Commons in late 2007 introduced a new protocol, CC+. The new project aims to make it easier for the owners of NC-licensed content to signal that agreements, products, or services beyond the scope of the CC licenses are on offer — for example, commercial licensing, warranties, or higherquality copies. A photographer who has hundreds of NC-licensed photos on Flickr would be able to continue to let people use those photos for noncommercial purposes — but through CC+, he could also sell licensing rights to those who want to use the photos for commercial purposes. CC+ is a metadata architecture and standard that allows third-party intermediaries to develop services for consummating commercial transactions. People can use CC+ as a simple “click-through” mechanism for acquiring commercial rights for music, photos, text, and other content. +={Creative Commons (CC):CC+, and+2} + +One of the earliest “copyright management” companies to take advantage of the CC+ standard was RightsAgent, a Cambridge, Massachusetts, company founded by Rudy Rouhana. RightsAgent essentially acts as a go-between for people who create NC-licensed works on the Web and those who wish to buy rights to use them for commercial purposes. Just as PayPal facilitates the exchange of money on the Internet, so RightsAgent aspires to be a paid intermediary for facilitating the sale of user-generated content. +={Rouhana, Rudy;RightsAgent} + +The rise of CC+ and associated companies brings to mind Niva Elkin-Koren’s warning that the Creative Commons licenses can be a slippery slope that merely promotes a property-oriented, transactional mentality — the opposite of the commons. On the other hand, many people operating in the noncommercial sharing economy, such as musicians and photographers, have long complained that, as much as they enjoy participating in the commons, they still need to earn a livelihood. +={Elkin-Koren, Niva;Creative Commons (CC) licenses:critics of} + +Revver is another company that has developed an ingenious way to promote the sharing of content, yet still monetize it based on the scale of its circulation. Revver is a Los Angeles–based startup that hosts user-generated video. All videos are embedded with a special tracking tag that displays an ad at the end. Like Google’s AdWords system, which charges advertisers for user “click-throughs” on ad links adjacent to Web content, Revver charges advertisers for every time a viewer clicks on an ad. The number of ad views can be tabulated, and Revver splits ad revenues 50-50 with video creators. Key to the whole business model is the use of the CC AttributionNonCommercial-No Derivatives license. The license allows the videos to be legally shared, but prohibits anyone from modifying them or using them for commercial purposes. +={Revver+2;Google;videos and film+2;Internet:videos and films on+2;World Wide Web:videos and film on+2} + +One of the most-viewed videos on Revver sparked a minor pop trend. It showed kids dropping Mentos candies into bottles of CocaCola, which produces an explosive chemical reaction. The video is said to have generated around $30,000.~{ Revver entry at Wikipedia, at http://en.wikipedia.org/wiki/Revver. }~ So is new media going to feature silly cat videos and stupid stunts? Steven Starr, a co-founder of Revver, concedes the ubiquity of such videos, but is quick to point to “budding auteurs like Goodnight Burbank, Happy Slip, Studio8 and LoadingReadyRun, all building audiences.” He also notes that online, creators “can take incredible risks with format and genre, can grow their own audience at a fraction of network costs, can enjoy free syndication, hosting, audience-building and ad services at their disposal.”~{ Interview with Steven Starr, “Is Web TV a Threat to TV?” Wall Street Journal, August 7, 2007, at http://online.wsj.com/article/SB118530221391976425 .html. }~ +={Starr, Steven} + +Blip.tv is another video content-sharing Web site that splits ad revenues with video creators (although it is not automatic; users must “opt in”). Unlike many videos on YouTube and Revver, blip.tv tends to feature more professional-quality productions and serialized episodes, in part because its founders grew out of the “videoblogging” community. Blip.tv espouses an open business ethic, with shout-outs to “democratization, openness, and sustainability.” While there is a tradition for companies to spout their high-minded principles, blip.tv puts some bite into this claim by offering an open platform that supports many video formats and open metadata standards. And it allows content to be downloaded and shared on other sites. Users can also apply Creative Commons licenses to their videos, which can then be identified by CC-friendly search engines. For all these reasons, Lessig has singled out blip.tv as a “true sharing site,” in contrast to YouTube, which he calls a “faking sharing site” that “gives you tools to /{make}/ it seem as if there’s sharing, but in fact, all the tools drive traffic and control back to a single site.”~{ Lessig blog post, “The Ethics of Web 2.0,” October 20, 2006, at http:// www.lessig.org/blog/archives/003570.shtml. }~ +={blip.tv+1;YouTube+1;Web 2.0:open business, and+3;open business models:open networks and;Lessig, Lawrence:open business sites, and+4} + +Lessig’s blog post on blip.tv provoked a heated response from blogger Nicholas Carr, a former executive editor of the /{Harvard Business Review}/. The contretemps is worth a close look because it illuminates the tensions between Web 2.0 as a business platform and Web 2.0 as a commons platform. In castigating YouTube as a “fake sharing site,” Carr accused Lessig of sounding like Chairman Mao trying to root out counterrevolutionary forces (that is, capitalism) with “the ideology of digital communalism.” +={Carr, Nicholas+2;Web 2.0:commons platform, as+3} + +_1 Like Mao, Lessig and his comrades are not only on the wrong side of human nature and the wrong side of culture; they’re also on the wrong side of history. They fooled themselves into believing that Web 2.0 was introducing a new economic system — a system of “social production” — that would serve as the foundation of a democratic, utopian model of culture creation. They were wrong. Web 2.0’s economic system has turned out to be, in effect if not intent, a system of exploitation rather than a system of emancipation. By putting the means of production into the hands of the masses but withholding from those same masses any ownership over the product of their work, Web 2.0 provides an incredibly efficient mechanism to harvest the economic value of the free labor provided by the very, very many and concentrate it into the hands of the very, very few. + +_1 The Cultural Revolution is over. It ended before it even began. The victors are the counterrevolutionaries. And they have $1.65 billion [a reference to the sale price of YouTube to Google] to prove it.~{ Nicholas G. Carr, “Web 2.0lier than Thou,” Rough Type blog, October 23, 2006. Joichi Ito has a thoughtful response in his blog, “Is YouTube Web 2.0?” October 22, 2006, at http://joi.ito.com/archives/2006/10/22/is_youtube _web_20.html; and Lessig responded to Carr in his blog, at http://lessig .org/blog/2006/10/stuck_in_the_20th_century_or_t.html. The “communism discourse” persists, and not just among critics of free culture. Lawrence Liang of CC India used this epigraph in a book on open-content licenses: “There is a specter haunting cultural production, the specter of open content licensing.” which he attributes to “Karl Marx (reworked for the digital era).” From Liang, /{Guide to Open Content Licenses}/ (Rotterdam, Netherlands: Piet Zwart Institute, Institute for Postgraduate Studies and Research, Willem de Kooning Academy Hogeschool, 2004). }~ + +Lessig’s response, a warm-up for a new book, /{Remix}/, released in late 2008, pointed out that there are really /{three}/ different economies on the Internet — commercial, sharing, and hybrid. The hybrid economy now emerging is difficult to understand, he suggested, because it “neither gives away everything, nor does it keep everything.” The challenge of open business models, Lessig argues, is to discover the “golden mean.” +={Lessig, Lawrence:Remix;Internet:hybrid economy enabled by+1|sharing economy of+1|commercial economy of+1} + +It can be hard to conceptualize a “hybrid sector” when we are accustomed to dividing the world into “private” and “public” sectors, and “profit-making” and “nonprofit” enterprises. Open business models quickly run up against deep-seated prejudices that associate property with “freedom” and sharing with “communism.” How can there be a middle ground? Although some like Nicholas Carr seem to hanker for the predatory enterprises of an earlier capitalism, only this time on Web 2.0 platforms, that is not likely to happen in a world of distributed computing. Power is too dispersed for predators to survive very long, and besides, the commoners are too empowered. + +!{/{ Build a market on a commons.}/}! A number of online business models are based on building communities of deep social affection and respect, and then using the community as a platform for selling merchandise, advertising, or products. Interestingly, some of the most successful “customer relationship” models revolve around music. The Grateful Dead’s strategy of building a business around a rabid fan base (discussed in chapter 6) occurred well before the Internet became prevalent. It is paradigmatic of the digital age, nonetheless. If the band had locked up its music and prohibited free taping of its concert performances and sharing of homemade tapes, it would have effectively weakened the fan base that sustained its business model. Sharing concert tapes actually made Deadheads more inclined to buy t-shirts, official music releases, and concert tickets because the tape sharing deepened the community’s identity and quasi-spiritual ethic. The Grateful Dead’s focus on touring as opposed to studio albums not only intensified the sharing ethic of its fan base, it obliged the band to “keep on truckin’ ” in order to keep earning money. +={commons:building a market on+11;open business models:building a market on a commons+11;communities:commons, and;Grateful Dead;music:market building on a commons+11} + +The Brazilian /{tecnobrega}/ music scene discussed briefly in chapter 7 is another example of artists making money through respectful, in-person relationships with their fans. In the town of Belém, Brazil, /{tecnobrega}/ artists release about four hundred CDs every year, but none are sold in stores; street vendors sell them for $1.50 apiece. The CDs function mostly as advertising for live “sound system” parties on the outskirts of town that attract as many as five thousand people and use state-of-the-art audio technology. Immediately following the performances, some artists also sell a significant number of “instant CDs” that are of better quality (and more expensive) than those sold in the streets. (Interestingly, street sales do not compete with after-concert sales.) +={Brazil:tecnobrega music scene in+6} + +“In their live presentations, the tecnobrega DJ’s usually acknowledge the presence of people from various neighborhoods, and this acknowledgement is of great value to the audience, leading thousands of buy copies of the recorded live presentation,” said Ronaldo Lemos of CC Brazil, who has studied Brazil’s record industry.~{ Interview with Ronaldo Lemos, September 15, 2006. }~ The same basic model is also at work in other grassroots musical genres in Brazil, such as baile funk, which originated in the shantytowns of Rio de Janeiro. +={Lemos da Silva, Ronaldo+4} + +Artists make most of their money from these live performances, not from CDs, said Lemos. Bands earn an average of $1,100 per solo performance at these events, and $700 when playing with other bands — this, in a region where the average monthly income is $350. Altogether, Lemos estimates that the sound system parties as a business sector earn $1.5 million per month, on fixed assets of $8 million. + +“The band Calypso has been approached several times by traditional record labels,” said Lemos, “but they turned down all the offers. The reason is that they make more money by means of the existing business model. In an interview with the largest Brazilian newspaper, the singer of the band said, ‘We do not fight the pirates. We have become big /{because}/ of piracy, which has taken our music to cities where they would never have been.’ ” Calypso has sold more than 5 million albums in Brazil and is known for attracting as many as fifty thousand people to its concerts, Lemos said.~{ Ronaldo Lemos, “From Legal Commons to Social Commons: Developing Countries and the Cultural Industry in the 21st Century,” 2006, at http://www.icommons.org/resources/from-legal-commons-to-social-comm ons-brazil-and-the-cultural-industry-1. See Paula Martini post on iCommons blog, “Over the Top: The New (and Bigger) Cultural Industry in Brazil,” September 28, 2007, at http://www.icommons.org/articles/over-the-top-thenew-and-bigger-cultural-industry-in-brazil. }~ +={piracy} + +Another highly successful open business model in the Brazilian music scene is TramaVirtual, an open platform on which more than 15,000 musicians have uploaded some 35,000 albums. Fans can then download the music for free. While this does not sound like a promising business proposition, it makes a lot of sense in the context of Brazil’s music marketplace. Major record labels release a minuscule number of new Brazilian music CDs each year, and they sell for about $10 to $15.~{ Ibid. }~ Only the cultured elite can afford music CDs, and the native musical talent — which is plentiful in Brazil — has no place to go. With such a constricted marketplace, TramaVirtual has become hugely popular by showcasing new and interesting music. +={TramaVirtual+2} + +TramaVirtual’s artistic and social cachet — itself the product of open sharing in a commons — has enabled it to develop a highly respected brand identity. “By exploiting the trademark,” said Lemos, “Trama has been able to create parallel businesses that work with music, but not in the same way that a record label does.”~{ Interview with Ronaldo Lemos, November 6, 2006. }~ For instance, Trama created a business that sponsors free concerts at universities under its trademark sponsorship. It then sells marketing rights at the concerts to cosmetic makers and car companies. Musicians have gained wide public exposure through Trama, and then used that association to negotiate international record and marketing deals for themselves. CSS (Cansei de Ser Sexy) won a record contract with the American label Sub Pop, for example. + +For the past five years, a related business model for music on an international scale has been emerging in Luxembourg. In only three years, Jamendo has amassed a huge international following in much the same way as TramaVirtual — by attracting music fans to its open platform for free music sharing. (The name /{Jamendo}/ is a mix of the words /{jam}/ and /{crescendo}/.) The site is not a music retailer but a repository for free music — with a business model overlay to pay the bills. Jamendo’s purpose is not to maximize returns to shareholders, in other words, but to service musicians and fans in a self-sustaining way. It makes most of its money from “tip jar” donations from fans and from advertising on the Web pages and streamed music. Ad revenues are shared 50-50 with artists, and any donations are passed along to individual artists, minus a small transaction fee. +={Jamendo+4} + +The Jamendo community is sizable and growing. By 2008 it had more than 357,000 active members from around the world. Part of the draw is the catalog of more than 10,000 albums, all free. Unlike Magnatune, Jamendo does not select the artists that are featured on its site; everyone is welcome to upload his or her music. To help fans identify music they like, the site offers many sophisticated tools. There are some 60,000 member-written reviews, custom playlists, community ratings of albums, and “folksonomy” tags for albums and songs.~[* Folksonomies, a cross of /{taxonomy}/ and /{folk}/, are essentially user-generated tags attached to each song and album, which enables categories of music to emerge from the “bottom up,” as fans regard the music, rather than through top-down marketing categories.]~ Fans are /{urged}/ to download music through peerto-peer networks such as BitTorrent and eMule because it reduces Jamendo’s bandwidth expenses. +={Magnatune;music:“folksonomy” tags in} + +“Users can listen, download, review, remix, and ‘widgetize,’” said Sylvain Zimmer, the founder and chief technology officer of Jamendo. As part of its commitment to musicians, the site has a forum for artists and listings of concerts, as well as open APIs~[* An API is an “application programming interface,” a set of protocols that enable a software application to operate on a computer operating system, library, or service. Many companies use proprietary APIs to retain control over who may develop applications that will interoperate with their software. Other companies that wish to encourage development of compatible applications— and thus promote a software ecosystem entwined with the operating system or service — use open APIs.]~ so the Jamendo ecosystem can be integrated into other software. +={Zimmer, Sylvain+2;APIs (application programming interfaces)} + +What’s striking about Jamendo is its nonchalant international feel, as if it were only natural to browse for “deathmetal,” “powerpop,” “hypnotique,” “ambient,” “psytrance,” and “jazzrock” on the same site. (These are just a few of the scores of folksonomy tags that can be used to browse the catalog.) “We are a Babel, not a label,” said Zimmer, who reports that India and Japan are heavy downloaders of Jamendo music. Complete, official versions of the site are available in French, the original language for the site, and now English and German. Incomplete versions of the site are available in Spanish, Polish, Portuguese, Russian, Turkish, Italian, Swedish, Czech, and Ukrainian. +={music:“folksonomy” tags in} + +Virtually all the albums on Jamendo use one or more of the six basic CC licenses. The CC ethic is a perfect match for the company’s community-driven business model, said Zimmer. “The best way of detecting CC-incompatible content and commercial uses of NC-licensed work is the community. The Creative Commons makes the community feel more confident and active.”~{ Sylvain Zimmer of Jamendo, presentation at iCommons Summit, Dubrovnik, Croatia, June 15, 2007. }~ He adds that if the site’s managers run too many ads, “the community will tell you.” + +2~ Commoners as Co-creators of Value +={open business models:value created in+13;value:creation of+13} + +For businesses operating on open networks, it is a mistake to regard people merely as customers; they are collaborators and even coinvestors. As more companies learn to interact closely with their customers, it is only natural that conversations about the product or service become more intimate and collaborative. The roles of the “consumer” and “producer” are starting to blur, leading to what some business analysts call the “prosumer”~{ Don Tapscott and Anthony D. Williams, /{Wikinomics: How Mass Collaboration Changes Everything}/ (New York Portfolio, 2006), chapter 5, “The Prosumers.” }~ and the “decentralized co-creation of value.”~{ David Bollier, /{The Rise of Collective Intelligence: Decentralized Co-creation of Value as a New Paradigm of Commerce and Culture}/ (Washington, DC: Aspen Institute Communications and Society Program, 2008).}~ The basic idea is that online social communities are becoming staging areas for the advancement of business objectives. Businesses see these communities as cost-effective ways to identify promising innovations, commercialize them more rapidly, tap into more reliable market intelligence, and nurture customer goodwill. + +Amateurs who share with one another through a loose social commons have always been a source of fresh ideas. Tech analyst Elliot Maxwell (citing Lessig) notes how volunteers helped compile the /{Oxford English Dictionary}/ by contributing examples of vernacular usage; how the Homebrew Computer Club in the San Francisco Bay area developed many elements of the first successful personal computer; and how sharing among auto enthusiasts helped generate many of the most important early automotive innovations.~{ Elliot Maxwell, “Open Standards, Open Source, and Open Innovation: Harnessing the Benefits of Openness,” /{Innovations:Technology, Governance, Globalization}/ 1, no. 3 (Summer 2006), at http://www.emaxwell.net, p. 150. }~ In our time, hackers were the ones who developed ingenious ways to use unlicensed electromagnetic spectrum as a commons, which we now know as Wi-Fi. They tinkered with the iPod to come up with podcasts, a new genre of broadcasting that commercial broadcasters now emulate.~{ Elliot E. Maxwell drew my attention to these examples in his excellent essay “Open Standards, Open Source, and Open Innovation.” }~ Numerous self-organized commons have incubated profitable businesses. Two movie buffs created the Internet Movie Database as separate Usenet newsgroups in 1989; six years later they had grown so large that they had merged and converted into a business that was later sold to Amazon.~{ Wikipedia entry, IMDB, at http://en.wikipedia.org/wiki/Internet_Movie _Database. }~ The Compact Disc Database was a free database of software applications that looks up information about audio CDs via the Internet. It was originally developed by a community of music fans as a shared database, but in 2000 it had grown big enough that it was sold and renamed Gracenote.~{ Wikipedia entry, CDDB, at http://en.wikipedia.org/wiki/CDDB. }~ +={Amazon;Gracenote;Homebrew Computer Club;iPod;Maxwell, Elliot;Oxford English Dictionary;Wi-Fi;hackers:community of;commons:sources of new ideas, as+11} + +A commons can be highly generative because its participants are tinkering and innovating for their own sake — for fun, to meet a challenge, to help someone out. Amateurs are not constrained by conventional business ideas about what may be marketable and profitable. They do not have to meet the investment expectations of venture capitalists and Wall Street. Yet once promising new ideas do surface in the commons, market players can play a useful role in supplying capital and management expertise to develop, improve, and commercialize an invention. + +Because online commons are such a rich source of new ideas, the most farsighted companies are trying to learn how they might be harnessed to help them innovate and compete more effectively. MIT professor Eric von Hippel is one of the foremost researchers of this process. His 2005 book /{Democratizing Innovation}/ describes how the leading participants in high-performance sports — extreme skiing, mountain biking, skateboarding, surfing, and hot-rodding — are forming “innovation communities” that work closely with manufacturers.~{ Eric von Hippel, /{Democratizing Innovation}/ (Cambridge, MA: MIT Press, 2005), available at http://mitpress.mit.edu/democratizing_innovation_pdf. }~ The most active practitioners of these sports are intimately familiar with the equipment and have their own imaginative ideas about what types of innovations the sport needs. Indeed, many of them have already jerry-rigged their own innovations — better cockpit ventilation in sailplanes, improved boot and bindings on snowboards, a method for cutting loose a trapped rope used by canyon climbers. For companies willing to listen to and collaborate with users, says von Hippel, “communities of interest are morphing into communities of creation and communities of production.” +={von Hippel, Eric+1} + +“Users that innovate can develop exactly what they want, rather than relying on manufacturers to act as their (often very imperfect) agents,” von Hippel writes. “Moreover, individuals users do not have to develop everything they need on their own: they can benefit from innovations developed and freely shared by others.”~{ Ibid., p. 1 }~ Besides finding empirical examples of this trend, von Hippel has developed a theoretical vocabulary for understanding how collaborative innovation occurs. He probes the user motivations for “free revealing” of their knowledge, the attractive economics that fuel “users’ low-cost innovation niches,” and the public policies that sometimes thwart user-driven innovation (patent rights for a field may be fragmented, anticopying restrictions such as the Digital Millennium Copyright Act may prevent user tinkering, etc.). +={Digital Millennium Copyright Act (DMCA) [1998]} + +User-driven innovation is not as esoteric as the “extreme sports” examples may suggest. It is, in fact, a growing paradigm. In one of the more celebrated examples, Lego, the Danish toymaker, invited some of its most fanatic users to help it redesign its Mindstorms robotics kit. The kits are meant to let kids (and adults) build a variety of customized robots out of a wild assortment of plastic Lego pieces, programmable software, sensors, and motors.~{ Tapscott and Williams, /{Wikinomics}/, pp. 130–31. }~ In 2004, when some Lego users reverse-engineered the robotic “brain” for the Mindstorms kit and put their findings on the Internet, Lego at first contemplated legal action. Upon reflection, however, Lego realized that hackers could be a valuable source of new ideas for making its forthcoming Mindstorms kit more interesting and cool. +={Lego+1;hackers:innovations by+1} + +Lego decided to write a “right to hack” provision into the Mindstorms software license, “giving hobbyists explicit permission to let their imaginations run wild,” as Brendan I. Koerner wrote in /{Wired}/ magazine. “Soon, dozens of Web sites were hosting thirdparty programs that help Mindstorms users build robots that Lego had never dreamed of: soda machines, blackjack dealers, even toilet scrubbers. Hardware mavens designed sensors that were far more sophisticated than the touch and light sensors included in the factory kit.”~{ Brendan I. Koerner, “Geeks in Toyland,” /{Wired}/, February 2006. }~ It turns out that not only are Lego fans happy to advise the company, the open process “engenders goodwill and creates a buzz among the zealots, a critical asset for products like Mindstorms that rely on word-of-mouth evangelism,” said Koerner. In the end, he concluded, the Mindstorm community of fanatics has done “far more to add value to Lego’s robotics kit than the company itself.” +={Koerner, Brendan I.;Internet:virtual word of mouth on} + +Another improbable success in distributed, user-driven innovation is Threadless, a Chicago-based t-shirt company. Threadless sells hundreds of original t-shirt designs, each of which is selected by the user community from among more than eight hundred designs submitted every week. The proposed designs are rated on a scale of one to five by the Web site’s more than 600,000 active users. Winners receive cash awards, recognition on the Web site, and their names on the t-shirt label. Every week, Threadless offers six to ten new t-shirts featuring the winning designs. +={Threadless+1} + +In 2006, the company sold more than 1.5 million t-shirts without any traditional kind of marketing. Its business model is so rooted in the user community that Threadless co-founders Jake Nickell and Jacob DeHart have declined offers to sell their t-shirts through conventional, big-name retailers. Threadless’s business model has helped it overcome two major challenges in the apparel industry, write Harvard Business School professor Karim R. Lakhani and consultant Jill A. Panetta — the ability “to attract the right design talent at the right time to create recurring fashion hits,” and the ability “to forecast sales so as to be better able to match production cycles with demand cycles.”~{ Karim R. Lakhani and Jill A. Panetta, “The Principles of Distributed Innovation,” Research Publication No. 2007-7, Berkman Center for Internet & Society, Harvard Law School, October 2007, at http://papers.ssrn.com/abstract _id=1021034. See also Darren Dahl, “Nice Threads,” /{Southwest Airlines Spirit}/, December 2006. }~ +={DeHart, Jacob;Nickell, Jake;Lakhani, Karim R.;Panetta, Jill A.} + +A number of companies have started successful enterprises based on the use of wikis, the open Web platforms that allow anyone to contribute and edit content and collaborate. Evan Prodromou, the founder of Wikitravel, a free set of worldwide travel guides, has identified four major types of wiki businesses: service providers who sell access to wikis (Wikispace, wetpaint, PBwiki); content hosters of wikis (wikiHow, Wikitravel, Wikia); consultants who advise companies how to run their own wikis (Socialtext); and content developers (WikiBiz, an offshoot of Wikipedia). +={Prodromou, Evan+1;wikis+1} + +Since the success of a wiki-based business depends upon honoring the integrity of wiki users, Prodromou scorns what he sees as the backhanded strategies of business models based on “wikinomics” and “crowdsourcing.” He sees such models as sly attempts to get “suckers” to do free work for the entrepreneur owning the business. A sustainable commercial wiki, said Prodromou at a conference, respects the community of users and does not try to exploit them. It strives to fulfill a “noble purpose” for users and demonstrate in a transparent way that it offers value. Any hint of trickery or calculation begins to sow distrust and erode the community. Yet any wiki-based business must be able to set boundaries that allow the owners to make responsible business decisions; those decisions, however, must respect the wiki community’s values.~{ Evan Prodromou presentation, “Commercialization of Wikis: Open Community that Pays the Bills,” South by Southwest Interactive conference, March 10, 2007. }~ + +It is hard to predict what new models of “decentralized cocreation of value” will take root and flourish, but the experiments are certainly proliferating. Staples, the office supplies store, now hosts a contest inviting the public to suggest inventions that Staples can develop and sell under the its brand name.~{ William J. Bulkeley, “Got a Better Letter Opener?” /{Wall Street Journal}/, July 13, 2006. }~ A number of massmarket advertisers have hosted competitions inviting users to create ads for their products. One of the more interesting frontiers in userdriven innovation is tapping the audience for investment capital. +={Staples} + +SellaBand (“You are the record company”) is a Web site that invites bands to recruit five thousand “Believers” to invest $10 apiece in their favorite bands; upon reaching the $50,000 mark, a band can make a professional recording, which is then posted on the SellaBand site for free downloads. Bands and fans can split advertising revenues with SellaBand.~{ http://www.sellaband.com. }~ Robert Greenwald, the activist documentary filmmaker, used e-mail solicitations, social networks, and the blogosphere to ask ordinary citizens to help finance his 2006 film /{Iraq for Sale: The War Profiteers}/.~{ William Booth, “His Fans Greenlight the Project,” /{Washington Post}/, August 20, 2006. }~ +={Greenwald, Robert;SellaBand} + +2~ Reintegrating the Sharing and Commercial Economies + +If there is persistent skepticism about the very idea of open business models, from both business traditionalists focused on the bottom line and commoners committed to sharing, it is because the commons and the commercial economy seem to represent such divergent moral values and social orders. One depends upon reciprocal exchanges of monetary value, with the help of individual property rights and contracts; the other depends upon the informal social circulation of value, without individual property rights or quid pro quos. A market is impersonal, transactional, and oriented to a bottom line; a commons tends to be personal and social and oriented to continuous relationships, shared values, and identity. + +Yet, as the examples above show, the market and the commons interpenetrate each other, yin/yang style. Each “adds value” to the other in synergistic ways. Historically, this has always been true. Adam Smith, the author of /{The Wealth of Nations}/, was also the author of /{The Theory of Moral Sentiments}/, about the moral and social norms that undergird market activity. The market has always depended upon the hidden subsidies of the commons (folk stories, vernacular motifs, amateur creativity) to drive its engine of wealth creation. And the commons builds its sharing regimes amid the material wealth produced by the market (free software is developed on commercially produced computers). +={Smith, Adam:The Theory of Moral Sentiments} + +What has changed in recent years is our perceptions. The actual role of the commons in creative endeavors has become more culturally legible. For businesses to function well on Web 2.0 platforms, they must more consciously integrate social and market relationships in functional, sustainable ways. If the results sometimes seem novel, if not bizarre, it is partly because networking technologies are making us more aware that markets are not ahistorical, universal entities; they are rooted in social relationships. Open business models recognize this very elemental truth, and in this sense represent a grand gambit to go back to the future. +={open business models:open networks and;Web 2.0:open business, and} + +1~ 11 SCIENCE AS A COMMONS +={Science Commons+90} + +/{Web 2.0 tools, open access, and CC licenses are helping to accelerate scientific discovery.}/ + +It was one of those embarrassing episodes in science: Two sets of researchers published papers in a German organic chemistry journal, /{Angewandte Chemie}/, announcing that they had synthesized a strange new substance with “12-membered rings.” Then, as blogger and chemist Derek Lowe tells the story, “Professor Manfred Cristl of Wurzburg, who apparently knows his pyridinium chemistry pretty well, recognized this as an old way to make further pyridinium salts, not funky twelve-membered rings. He recounts how over the last couple of months he exchanged awkward emails with the two sets of authors, pointing out that they seem to have rediscovered a 100-year-old reaction. . . .”~{ Derek Lowe, “Neat! Wish It Were True!” /{In the Pipeline}/ [blog], November 29, 2007, at http://pipeline.corante.com. See also, Donna Wentworth, “Why We Need to Figure Out What We Already Know,” Science Commons blog, January 4, 2008, at http://sciencecommons.org/weblog/archives/2008/01/04/ why-we-need-to-figure-out-what-we-already-know. }~ +={Lowe, Derek} + +In the Internet age, people generally assume that these kinds of things can’t happen. All you have to do is run a Web search for “pyridinium,” right? But as scientists in every field are discovering, the existence of some shard of highly specialized knowledge does not necessarily mean that it can be located or understood. After all, a Google search for “pyridinium” turns up 393,000 results. And even peer reviewers for journals (who may have been partly at fault in this instance) have the same problem as any researcher: the unfathomable vastness of the scientific and technical literature makes it difficult to know what humankind has already discovered. + +Paradoxically, even though academic science played the central role in incubating the Internet (in conjunction with the military), it has not fared very well in developing it to advance research. Most search engines are too crude. Journal articles can be expensive and inaccessible. They do not link to relevant Web resources or invite reader comment. Nor do they contain metadata to facilitate computer-based searches, collaborative filtering, and text mining. Scientific databases are plentiful but often incompatible with one another, preventing researchers from exploring new lines of inquiry. Lab researchers who need to share physical specimens still have to shuffle papers through a bureaucratic maze and negotiate with lawyers, without the help of eBay- or Craigslist-like intermediaries. + +“The World Wide Web was designed in a scientific laboratory to facilitate access to scientific knowledge,” observed Duke law professor James Boyle in 2007. “In every other area of life — commercial, social networking, pornography — it has been a smashing success. But in the world of science itself? With the virtues of the open Web all around us, we have proceeded to build an endless set of walled gardens, something that looks a lot like Compuserv or Minitel and very little like a world wide web for science.”~{ James Boyle, “The Irony of a Web Without Science,” /{Financial Times}/, September 4, 2007, at http://www.ft.com/cms/s/2/39166e30-5a7f-11dc-9bcd0000779fd2ac.html. }~ +={Boyle, James:Science Commons, and;science:scientific knowledge+2} + +Therein lies a fascinating, complicated story. To be sure, various scientific bodies have made great progress in recent years in adapting the principles of free software, free culture, and Web 2.0 applications to their research. Open-access journals, institutional repositories, specialty wikis, new platforms for collaborative research, new metatagging systems: all are moving forward in different, fitful ways. Yet, for a field of inquiry that has long honored the ethic of sharing and “standing on the shoulders of giants,” academic science has lagged behind most other sectors. + +Part of the problem is the very nature of scientific knowledge. While the conventional Web works fairly well for simple kinds of commerce and social purposes, the Research Web for science requires a more fine-grained, deliberately crafted structure.~{ John Wilbanks, director of the Science Commons, introduced me to this term. }~ Science involves /{practices}/, after all; it is not just about information. The “wisdom of the crowds” is not good enough. Scientific knowledge tends to be significantly more specialized and structured than cultural information or product recommendations. The Web systems for organizing, manipulating, and accessing that knowledge, accordingly, need to be more hierarchical and structured, often in quite specific ways depending upon the discipline. A scientist cannot just type “signal transduction genes in pyramidal neurons” into a search engine; she needs to be able to locate specific genes and annotations of them. Data may be strewn across dozens of different data systems, and those are not likely to be interoperable. This means that technical standards need to be coordinated, or some metasystem developed to allow different data reservoirs to communicate with one another. A scientist must be able to use computers to browse and organize a vast literature. And so on. + +Much as scientists would like to build new types of Internet-based commons, they have quickly run up against a thicket of interrelated problems: overly broad copyright and patent limitations; access and usage restrictions by commercial journal publishers and database owners; and university rules that limit how cell lines, test animals, bioassays, and other research tools may be shared. In a sense, scientists and universities face a classic collective-action problem. Everyone would clearly be better off if a more efficient infrastructure and enlightened social ethic could be adopted — but few single players have the resources, incentive, or stature to buck the prevailing order. There is no critical mass for instigating a new platform for scientific inquiry and “knowledge management.” +={copyright law:property rights, and+1|science, in;property rights:copyright law, and+1;science:copyright and patent restrictions in+1} + +Like so many other sectors confronting the Great Value Shift, science in the late 1990s found itself caught in a riptide. The proprietarian ethic of copyright and patent law was intensifying (as we saw in chapter 2), spurring scientists and universities to claim private ownership in knowledge that was previously treated as a shared resource.~{ See, e.g., Jennifer Washburn, /{University Inc.: The Corporate Corruption of Higher Education}/ (New York: Basic Books, 2005); Derek Bok, /{Universities in the Marketplace: The Commercialization of Higher Education}/ (Princeton, NJ: Princeton University Press, 2003); Sheldon Krimsky, /{Science in the Private Interest: Has the Lure of Profits Corrupted Biomedical Research}/ (New York: Rowman & Littlefield, 2003); and Corynne McSherry, /{Who Owns Academic Work? Battling for Control of Intellectual Property}/ (Cambridge, MA: Harvard University Press, 2001). }~ Yet at the same time the Internet was demonstrating the remarkable power of open sharing and collaboration. Even as market players sought to turn data, genetic knowledge, and much else into private property rights, a growing number of scientists realized that the best ideals of science would be fulfilled by recommitting itself to its core values of openness and sharing. Open platforms could also strengthen the social relationships that are essential to so much scientific inquiry.~{ John Seely Brown and Paul Duguid, /{The Social Life of Information}/ (Cambridge, MA: Harvard Business School Pulishing, 2000). See also, e.g., Jane E. Fountain, “Social Capital: Its Relationship to Innovation in Science and Technology,” /{Science and Public Policy}/ 25, no. 2 (April 1998), pp. 103–15. }~ +={Great Value Shift} + +Perhaps the most salient example of the power of open science was the Human Genome Project (HGP), a publicly funded research project to map the 3 billion base pairs of the human genome. Many other scientific projects have been attracted by the stunning efficacy and efficiency of the open research model. For example, the HapMap project is a government-supported research effort to map variations in the human genome that occur in certain clusters, or haplotypes. There is also the SNP Consortium, a public-private partnership seeking to identify single-nucleotide polymorphisms (SNPs) that may be used to identify genetic sources of disease. Both projects use licenses that put the genomic data into the public domain. +={Human Genome Project (HGP);science:Human Genome Project} + +A 2008 report by the Committee for Economic Development identified a number of other notable open research projects.~{ Committee for Economic Development, /{Harnessing Openness to Transform American Health Care}/ (Washington, DC: CED, 2008). }~ There is the PubChem database, which amasses data on chemical genomics from a network of researchers; the Cancer Biomedical Informatics Grid, a network of several dozen cancer research centers and other organizations that shares data, research tools, and software applications; and TDR Targets a Web clearinghouse sponsored by the World Health Organization that lets researchers share genetic data on neglected diseases such as malaria and sleeping sickness. It is telling that Bill Gates, who in his commercial life is a staunch advocate of proprietary control of information, has been a leader, through his Bill & Melinda Gates Foundation, in requiring research grantees to share their data. +={Gates, Bill} + +There has even been the emergence of open-source biotechnology, which is applying the principles of free software development to agricultural biotech and pharmaceutical development.~{ See, e.g., Rockefeller Foundation, “2005 Bellagio Meeting on Open Source Models of Collaborative Innovation in the Life Sciences” [report], Bellagio, Italy, September 2005. See also Janet Elizabeth Hope, “Open Source Biotechnology,” Ph.D. diss., Australian National University, December 2004. }~ Richard Jefferson, the founder of Cambia, a nonprofit research institute in Australia, launched the “kernel” of what he calls the first opensource biotech toolkit. It includes patented technologies such as TransBacter, which is a method for transferring genes to plants, and GUSPlus, which is a tool for visualizing genes and understanding their functions.~{ Interview with Richard Jefferson, September 7, 2006. See also http://www .cambia.org. }~ By licensing these patented research tools for open use, Jefferson hopes to enable researchers anywhere in the world— not just at large biotech companies or universities — to develop their own crop improvement technologies. +={Jefferson, Richard} + +2~ The Viral Spiral in Science + +Sociologist Robert Merton is often credited with identifying the social values and norms that make science such a creative, productive enterprise. In a notable 1942 essay, Merton described scientific knowledge as “common property” that depends critically upon an open, ethical, peer-driven process.~{ Robert Merton, “Science and Democratic Social Structure,” in /{Social Theory and Social Structure}/, 3d ed. (New York: Free Press, 1968), pp. 604–15. }~ Science is an engine of discovery precisely because research is available for all to see and replicate. It has historically tried to keep some distance from the marketplace for fear that corporate copyrights, patents, or contractual agreements will lock up knowledge that should be available to everyone, especially future scientists.~{ Richard R. Nelson, “The Market Economy and the Scientific Commons,” /{Research Policy}/ 33, no. 3 (April 2004), pp. 455–71. See also Karim R. Lakhani et al., “The Value of Openness in Scientific Problem Solving,” Harvard Business School Working Paper 07-050, January 2007, at http://www.hbs.edu/ research/pdf/07-050.pdf. }~ Secrecy can also make it difficult for the scientific community to verify research results. +={Merton, Robert;science:cientific knowledge+2} + +Although scientific knowledge eventually becomes publicly available, it usually flows in semi-restricted ways, at least initially, because scientists usually like to claim personal credit for their discoveries. They may refuse to share their latest research lest a rival team of scientists gain a competitive advantage. They may wish to claim patent rights in their discoveries. + +So scientific knowledge is not born into the public sphere, but there is a strong presumption that it ought to be treated as a shared resource as quickly as possible. As law scholar Robert Merges noted in 1996, “Science is not so much given freely to the public as shared under a largely implicit code of conduct among a more or less well identified circle of similarly situated scientists. In other words . . . science is more like a limited-access commons than a truly open public domain.”~{ Robert Merges, “Property Rights Theory and the Commons: The Case of Scientific Research,” /{Social Philosophy and Policy}/ 13, no. 2 (Summer 1996), pp. 145–61. }~ In certain disciplines, especially those involving large capital equipment such as telescopes and particle accelerators, the sharing of research is regarded as a kind of membership rule for belonging to a club. +={Merges, Robert} + +As Web 2.0 innovations have demonstrated the power of the Great Value Shift, the convergence of open source, open access, and open science has steadily gained momentum.~{ John Willinsky, “The Unacknowledged Convergence of Open Source, Open Access and Open Science,” /{First Monday}/ 10, no. 8 (August 2005), at http:// firstmonday.org/issues/issue10_8/willinsky/index.html. }~ Creative Commons was mindful of this convergence from its beginnings, but it faced formidable practical challenges in doing anything about it. “From the very first meetings of Creative Commons,” recalled law professor James Boyle, a CC board member, “we thought that science could be the killer app. We thought that science could be the place where Creative Commons could really make a difference, save lives, and have a dramatic impact on the world. There is massive, unnecessary friction in science and we think we can deal with it. Plus, there’s the Mertonian ideal of science, with which Creative Commons couldn’t fit more perfectly.”~{ Interview with James Boyle, August 15, 2006. }~ +={Merton, Robert;Boyle, James:Science Commons, and+1;Great Value Shift;Web 2.0:Great Value Shift, and} + +But despite its early interest in making the Web more research-friendly, Creative Commons realized that science is a special culture unto itself, one that has so many major players and niche variations that it would be foolhardy for an upstart nonprofit to try to engage with it. So in 2002 Creative Commons shelved its ambitions to grapple with science as a commons, and focused instead on artistic and cultural sectors. By January 2005, however, the success of the CC licenses emboldened the organization to revisit its initial idea. As a result of deep personal engagement by several Creative Commons board members — computer scientist Hal Abelson, law professors James Boyle and Michael Carroll, and film producer Eric Saltzman — Creative Commons decided to launch a spin-off project, Science Commons. The new initiative would work closely with scientific disciplines and organizations to try to build what it now calls “the Research Web.” +={Abelson, Hall:CC board, on;Carroll, Michael W.;Saltzman, Eric;Science Commons:CC Commons spinoff, and+5} + +Science Commons aims to redesign the “information space” — the technologies, legal rules, institutional practices, and social norms — so that researchers can more easily share their articles, datasets, and other resources. The idea is to reimagine and reinvent the “cognitive infrastructures” that are so critical to scientific inquiry. Dismayed by the pressures exerted by commercial journal publishers, open-access publishing advocate Jean-Claude Guédon has called on librarians to become “epistemological engineers.”~{ Jean-Claude Guédon, “In Oldenburg’s Long Shadow: Librarians, Research Scientists, Publishers and the Control of Scientific Publishing,” at http:// www.arl.org/resources/pubs/mmproceedings/138guedon.shtml. }~ They need to design better systems (technical, institutional, legal, and social) for identifying, organizing, and using knowledge. The payoff? Speedier research and greater scientific discovery and innovation. It turns out that every scientific discipline has its own special set of impediments to address. The recurring problem is massive, unnecessary transaction costs. There is an enormous waste of time, expense, bureaucracy, and logistics in acquiring journal articles, datasets, presentations, and physical specimens. +={Science Commons:libraries, and+5;science:transaction costs in+1;transaction costs:in science+1;libraries:Science Commons, and} + +If transaction costs could be overcome, scientists could vastly accelerate their research cycles. They could seek answers in unfamiliar bodies of research literature. They could avoid duplicating other people’s flawed research strategies. They could formulate more imaginative hypotheses and test them more rapidly. They could benefit from a broader, more robust conversation (as in free software — “with enough eyes, all bugs are shallow”) and use computer networks to augment and accelerate the entire scientific process. + +That is the vision of open science that Science Commons wanted to address in 2005. It recognized that science is a large, sprawling world of many institutional stakeholders controlling vast sums of money driving incommensurate agendas. In such a milieu, it is not easy to redesign some of the most basic processes and norms for conducting research. Science Commons nonetheless believed it could play a constructive role as a catalyst. + +It was fortunate to have some deep expertise not just from its board members, but from two Nobel Prize winners on its scientific advisory panel (Sir John Sulston and Joshua Lederberg) and several noted scholars (patent scholar Arti Rai, innovation economist Paul David, and open-access publishing expert Michael B. Eisen). The director of Science Commons, John Wilbanks, brought a rare mix of talents and connections. He was once a software engineer at the World Wide Web Consortium, specializing in the Semantic Web; he had founded and run a company dealing in bioinformatics and artificial intelligence; he had worked for a member of Congress; and he was formerly assistant director of the Berkman Center at Harvard Law School. +={David, Paul;Eisen, Michael B.;Lederberg, Joshua;Rai, Arti;Sulston, Sir John;Wilbanks, John+1} + +After obtaining free office space at MIT, Wilbanks set off to instigate change within the scientific world — and then get out of the way. “We’re designing Science Commons to outstrip ourselves,” Wilbanks told me. “We don’t want to control any of this; we’re designing it to be decentralized. If we try to control it, we’ll fail.” + +With a staff of seven and a budget of only $800,000 in 2008, Science Commons is not an ocean liner like the National Academy of Science and the National Science Foundation; it’s more of a tug-boat. Its strategic interventions try to nudge the big players into new trajectories. It is unencumbered by bureaucracy and entrenched stakeholders, yet it has the expertise, via Creative Commons, to develop standard licensing agreements for disparate communities. It knows how to craft legal solutions that can work with technology and be understood by nonlawyers. + +In 2006, Science Commons embarked upon three “proof of concept” projects that it hopes will be models for other scientific fields. The first initiative, the Scholar’s Copyright Project, aspires to give scientists the “freedom to archive and reuse scholarly works on the Internet.” It is also seeking to make the vast quantities of data on computerized databases more accessible and interoperable, as a way to advance scientific discovery and innovation. +={Scholar’s Copyright Project;Science Commons:Scholar’s Copyright Project, and} + +A second project, the Neurocommons, is a bold experiment that aims to use the Semantic Web to make a sprawling body of neurological research on the Web more accessible. The project is developing a new kind of Internet platform so that researchers will be able to do sophisticated searches of neuroscience-related journal articles and explore datasets across multiple databases. +={Neurocommons;Science Commons:Neurocommons, and the} + +Finally, Science Commons is trying to make it cheaper and easier for researchers to share physical materials such as genes, proteins, chemicals, tissues, model animals, and reagents, which is currently a cumbersome process. The Biological Materials Transfer Project resembles an attempt to convert the pony express into a kind of Federal Express, so that researchers can use an integrated electronic data system to obtain lab materials with a minimum of legal complications and logistical delays. +={Biological Materials Transfer Project} + +In many instances, Science Commons has been a newcomer to reform initiatives already under way to build open repositories of scientific literature or data. One of the most significant is the openaccess publishing movement, which has been a diverse, flourishing effort in academic circles since the 1990s. It is useful to review the history of the open access (OA) movement because it has been an important pacesetter and inspiration for the open-science ethic. +={education:open access movement;open access (OA) movement+22;Science Commons:open access movement, and} + +2~ The Open-Access Movement +={open access (OA) movement+19;Science Commons:open access movement, and+19} + +The open-access movement has a fairly simple goal: to get the scientific record online and available to everyone. It regards this task as one of the most fundamental challenges in science. Open-access publishing generally consists of two modes of digital access — openaccess archives (or “repositories”) and open-access journals. In both instances, the publisher or host institution pays the upfront costs of putting material on the Web so that Internet users can access the literature at no charge.~[* “Open access” can be a confusing term. In the context of a rivalrous, depletable natural resource like timber or grazing land, an open-access regime means that anyone can use and appropriate the resource, resulting in its overexploitation and ruin. An /{open-access regime}/ is not the same as a /{commons}/, however, because a commons does have rules, boundaries, sanctions against free riders, etc., to govern the resource. However, in the context of an infinite, nonrivalrous resource like information, which can be copied and distributed at virtually no cost, an open-access regime does not result in overexploitation of the resource. For this reason, open access in an Internet context is often conflated with the commons — even though “open access,” in a natural resource context, tends to produce very different outcomes.]~ + +The appeal of OA publishing stems from the Great Value Shift described in chapter 5. “OA owes its origin and part of its deep appeal to the fact that publishing to the Internet permits both wider dissemination and lower costs than any previous form of publishing,” writes Peter Suber, author of /{Open Access News}/ and a leading champion of OA.~{ http://www.earlham.edu/~peters/fos/fosblog.html. }~ “The revolutionary conjunction is too good to pass up. But even lower costs must be recovered if OA is to be sustainable.” In most cases, publishing costs are met by scientific and academic institutions and/or by subsidies folded into research grants. Sometimes an OA journal will defray its publishing costs by charging authors (or their grant funders) a processing fee for articles that they accept. +={Great Value Shift;Suber, Peter} + +Just as free software and music downloads have disrupted their respective industries, so OA publishing has not been a welcome development among large academic publishers such as Elsevier, Springer, Kluwer, and Wiley. Online publishing usually costs much less than traditional print publishing and it allows authors to retain control over their copyrights. Both of these are a big incentive for disciplines and universities to start up their own OA journals. In addition, OA publishing makes it easier for research to circulate, and for authors to reach larger readerships. This not only augments the practical goals of science, it bolsters the reputation system and open ethic that science depends upon. + +Commercial publishers have historically emphasized their shared interests with scholars and scientists, and the system was amicable and symbiotic. Academics would produce new work, validate its quality through peer review, and then, in most cases, give the work to publishers at no charge. Publishers shouldered the expense of editorial production, distribution, and marketing and reaped the bulk of revenues generated. The arrangement worked fairly well for everyone until journal prices began to rise in the early 1970s. Then, as subscription rates continued to soar, placing unbearable burdens on university libraries in the 1990s, the Internet facilitated an extremely attractive alternative: open-access journals. Suddenly, conventional business models for scholarly publishing had a serious rival, one that shifts the balance of power back to scientists and their professional communities. + +Publishers have long insisted upon acquiring the copyright of journal articles and treating them as “works for hire.” This transfer of ownership enables the publisher, not the author, to determine how a work may circulate. Access to an article can then be limited by the subscription price for a journal, the licensing fees for online access, and pay-per-view fees for viewing an individual article. Publishers may also limit the reuse, republication, and general circulation of an article by charging high subscription or licensing fees, or by using digital rights management. If a university cannot afford the journal, or if a scholar cannot afford to buy individual articles, research into a given topic is effectively stymied. + +Open-access champion John Willinsky notes, “The publishing economy of scholarly journals is dominated by a rather perverse property relation, in which the last investor in the research production chain — consisting of university, researcher, funding agency and /{publisher}/ — owns the resulting work outright through a very small investment in relation to the work’s overall cost and value.”~{ Willinsky, “The Unacknowledged Convergence.” }~ Scientists and scholars virtually never earn money from their journal articles, and only occasionally from their books. Unlike commercial writers, this is no problem for academics, whose salaries are intended to free them to study all sorts of niche interests despite the lack of “market demand.” Their works are not so much “intellectual property” that must yield maximum revenues as “royaltyfree literature,” as Peter Suber calls it. Academics write and publish to contribute to their fields and enhance their standing among their peers. +={Suber, Peter;Willinsky, John} + +Not surprisingly, many commercial publishers regard OA publishing as a disruptive threat. It can, after all, subvert existing revenue models for scholarly publishing. This does not mean that OA publishing cannot support a viable business model. Much of OA publishing is sustained through “author-side payments” to publishers. In certain fields that are funded by research grants, such as biomedicine, grant makers fold publishing payments into their grants so that the research can be made permanently available in open-access journals. A leading commercial publisher, BioMed Central, now publishes over 140 OA journals in this manner. Hindawi Publishing Corporation, based in Cairo, Egypt, publishes more than one hundred OA journals and turns a profit. And Medknow Publications, based in Mumbai, India, is also profitable as a publisher of more than forty OA journals. + +It remains an open question whether the OA business model will work in fields where little research is directly funded (and thus upfront payments are not easily made). As Suber reports, “There are hundreds of OA journals in the humanities, but very, very few of them charge a fee on the author’s side; most of them have institutional subsidies from a university say, or a learned society.”~{ Interview with Peter Suber, June 28, 2006. }~ Yet such subsidies, in the overall scheme of things, may be more attractive to universities or learned societies than paying high subscription fees for journals or online access. +={Suber, Peter+1} + +The tension between commercial publishers and academic authors has intensified over the past decade, fueling interest in OA alternatives. The most salient point of tension is the so-called “serials crisis.” From 1986 to 2006, libraries that belong to the Association of Research Libraries saw the cost of serial journals rise 321 percent, or about 7.5 percent a year for twenty consecutive years.~{ Association of Research Libraries, /{ARL Statistics}/ 2005–06, at http://www.arl .org/stats/annualsurveys/ar/stats/arlstats06.shtml. }~ This rate is four times higher than the inflation rate for those years. Some commercial journal publishers reap profits of nearly 40 percent a year.~{ Peter Suber, “Creating an Intellectual Commons through Open Access,” in Charlotte Hess and Elinor Ostrom, eds., /{Understanding Knowledge as a Commons: From Theory to Practice}/ (Cambridge, MA: MIT Press, 2007), p. 175. }~ By 2000 subscription rates were so crushing that the Association of American Universities and the Association of Research Libraries issued a joint statement that warned, “The current system of scholarly publishing has become too costly for the academic community to sustain.”~{ Association of Research Libraries, “Tempe Principles for Emerging Systems of Scholarly Publishing,” May 10, 2000, at http://www.arl.org/resources/pubs/ tempe/index.shtml. }~ Three years later, the high price of journals prompted Harvard, the University of California, Cornell, MIT, Duke, and other elite research universities to cancel hundreds of journal subscriptions — a conspicuous act of rebellion by the library community. +={libraries:“serials crisis”, and|Science Commons, and;Science Commons:libraries, and} + +As journal prices have risen, the appeal of OA publishing has only intensified. Unfortunately, migrating to OA journals is not simply an economic issue. Within academia, the reputation of a journal is deeply entwined with promotion and tenure decisions. A scientist who publishes an article in /{Cell}/ or /{Nature}/ earns far more prestige than she might for publishing in a little-known OA journal. + +So while publishing in OA journals may be economically attractive, it flouts the institutional traditions and social habits that scientists have come to rely on for evaluating scientific achievement. The OA movement’s challenge has been to document how OA models can help a university, and so it has collaborated with university administrators to showcase exemplary successes and work out new revenue models. It is urging promotion and tenure committees, for example, to modify their criteria to stop discriminating against new journals just because they are new, and hence to stop discriminating against OA journals (which are all new). Much of this work has fallen to key OA leaders like the Open Society Institute, the Hewlett Foundation, Mellon Foundation and the library-oriented SPARC (Scholarly Publishing and Academic Resources Coalition) as well as individuals such as John Willinsky, Jean-Claude Guédon, Stevan Harnad, and Peter Suber. +={Suber, Peter;Willinsky, John;Guédon, Jean-Claude;Harnad, Stevan} + +One of the first major salvos of the movement came in 2000, when biomedical scientists Harold E. Varmus, Patrick O. Brown, and Michael B. Eisen called on scientific publishers to make their literature available through free online public archives such as the U.S. National Library of Medicine’s PubMed Central. Despite garnering support from nearly 34,000 scientists in 180 countries, the measure did not stimulate the change sought. It did alert the scientific world, governments, and publishers about the virtues of OA publishing, however, and galvanized scientists to explore next steps. +={Brown, Patrick O.;Varmus, Harold E.} + +At the time, a number of free, online peer-reviewed journals and free online archives were under way.~{ http://www.earlham.edu/~peters/fos/timeline.htm. }~ But much of the momentum for organized OA movement began in 2001, when the Open Society Institute convened a group of leading librarians, scientists, and other academics in Hungary. In February 2002 the group released the Budapest Open Access Initiative, a statement that formally describes “open access” as the freedom of users to “read, download, copy, distribute, print, search or link to the full texts of . . . articles, crawl them for indexing, pass them as data to software, or use them for any other lawful purpose, without financial, legal or technical barriers other than those inseparable from gaining access to the Internet itself.”~{ The Budapest Open Access Initiative can be found at http://www.soros.org/ openaccess. }~ Two subsequent statements, the Bethesda Declaration and the Berlin Declaration, in June 2003 and October 2003, respectively, expanded upon the definitions of open access and gave the idea new prominence. (Suber calls the three documents the “BBB definition” of open access.)~{ http://www.earlham.edu/~peters/fos/overview.htm. }~ +={Suber, Peter;Budapest Open Access Initiative (2002);libraries:open access movement, and} + +Creative Commons licenses have been critical tools in the evolution of OA publishing because they enable scientists and scholars to authorize in advance the sharing, copying, and reuse of their work, compatible with the BBB definition. The Attribution (BY) and Attribution-Non-Commercial (BY-NC) licenses are frequently used; many OA advocates regard the Attribution license as the preferred choice. The protocols for “metadata harvesting” issued by the Open Archives Initiative are another useful set of tools in OA publishing. When adopted by an OA journal, these standardized protocols help users more easily find research materials without knowing in advance which archives they reside in, or what they contain. + +There is no question that OA is transforming the market for scholarly publishing, especially as pioneering models develop. The Public Library of Science announced its first two open-access journals in December 2002. The journals represented a bold, high-profile challenge by highly respected scientists to the subscription-based model that has long dominated scientific publishing. Although Elsevier and other publishers scoffed at the economic model, the project has expanded and now publishes seven OA journals, for biology, computational biology, genetics, pathogens, and neglected tropical diseases, among others. + +OA received another big boost in 2004 when the National Institutes for Health proposed that all NIH-funded research be made available for free one year after its publication in a commercial journal. The $28 billion that the NIH spends on research each year (more than the domestic budget of 142 nations!) results in about 65,000 peer-reviewed articles, or 178 every day. Unfortunately, commercial journal publishers succeeded in making the proposed OA policy voluntary. The battle continued in Congress, but it became clear that the voluntary approach was not working. Only 4 percent of researchers published their work under OA standards, largely because busy, working scientists did not consider it a priority and their publishers were not especially eager to help. So Congress in December 2007 required NIH to mandate open access for its research within a year of publication.~{ Peter Suber has an excellent account of the final OA legislation in /{SPARC Open Access Newsletter}/, no. 17, January 2, 2008, at http://www.earlham.edu/ ~peters/fos/newsletter/01-02-08.htm. }~ +={National Institutes for Health (NIH)} + +What may sound like an arcane policy battle in fact has serious implications for ordinary Americans. The breast cancer patient seeking the best peer-reviewed articles online, or the family of a person with Huntington’s disease, can clearly benefit if they can acquire, for free, the latest medical research. Scientists, journalists, health-care workers, physicians, patients, and many others cannot access the vast literature of publicly funded scientific knowledge because of high subscription rates or per-article fees. A freely available body of online literature is the best, most efficient way to help science generate more reliable answers, new discoveries, and commercial innovations. + +While large publishers continue to dominate the journal market, OA publishing has made significant advances in recent years. In June 2008, the Directory of Open Access Journals listed more than 3,400 open-access journals containing 188,803 articles. In some fields such as biology and bioinformatics, OA journals are among the top-cited journals. In fact, this is one of the great advantages of OA literature. In the networked environment, articles published in OA journals are more likely to be discovered by others and cited, which enhances the so-called impact of an article and the reputation of an author. + +Although journals may or may not choose to honor OA principles, any scientist, as the copyright holder of his articles, can choose to “self-archive” his work under open-access terms. But commercial publishers generally don’t like to cede certain rights, and authors usually don’t know what rights to ask for, how to assert them in legal language, and how to negotiate with publishers. So it is difficult for most academics to assert their real preferences for open access. To help make things simpler, SPARC and MIT developed what is called an “author’s addendum.” It is a standard legal contract that authors can attach to their publishing contracts, in which they reserve certain key rights to publish their works in OA-compliant ways. + +2~ The Scholar’s Copyright Project +={Scholar’s Copyright Project+18;Science Commons:Scholar’s Copyright Project, and+18} + +In an attempt to help the open-access movement, Science Commons in 2007 developed its own suite of amendments to publishing contracts. The goal has been to ensure that “at a minimum, scholarly authors retain enough rights to archive their work on the Web. Every Science Commons Addendum ensures the freedom to use scholarly articles for educational purposes, conference presentations, in other scholarly works or in professional activities.”~{ Science Commons brochure [undated]. }~ The ultimate goal is to enable authors “to have the clear and unambiguous freedom to engage in their normal everyday scholarly activities without contending with complex technology, continuous amendments to contracts or the need for a lawyer.”~{ Science Commons, “Scholar’s Copyright Project — Background Briefing,” at http://sciencecommons.org/literature/scholars_copyright.html. }~ +={open access (OA) movement} + +To make the whole process easier for scientists, Science Commons developed the Scholar’s Copyright Addendum Engine. This point-and-click Web-based tool lets authors publish in traditional, subscription-based journals while retaining their rights to post copies on the Internet for download, without most copyright and financial restrictions. There are also options for “drag and drop” self-archiving to repositories such as MIT’s DSpace and the National Library of Medicine’s PubMed Central. Besides making selfarchiving easier and more prevalent, Science Commons hopes to standardize the legal terms and procedures for self-archiving to avoid a proliferation of incompatible rights regimes and document formats. “The engine seems to be generating a dialogue between authors and publishers that never existed,” said John Wilbanks. “It’s not being rejected out of hand, which is really cool. To the extent that the addendum becomes a norm, it will start to open up the [contractual] limitations on self-archiving.”~{ Interview with John Wilbanks, November 19, 2007. }~ +={Wilbanks, John} + +Harvard University gave self-archiving a big boost in February 2008 when its faculty unanimously voted to require all faculty to distribute their scholarship through an online, open-access repository operated by the Harvard library unless a professor chooses to “opt out” and publish exclusively with a commercial journal. Robert Darnton, director of the Harvard library, said, “In place of a closed, privileged and costly system, [the open-access rule] will help open up the world of learning to everyone who wants to learn.”~{ Patricia Cohen, “At Harvard, a Proposal to Publish Free on the Web,” /{New York Times}/, February 12, 2008. See also Peter Suber’s coverage of the decision in Open Access News, at http://www.earlham.edu/~peters/fos/2008/02/moreon-imminent-oa-mandate-at-harvard.html, and subsequent days. }~ Harvard’s move was the first time that a university faculty, and not just the administration, initiated action to take greater control of its scholarly publishing. While some critics complain the new policy does not go far enough, most OA advocates hailed the decision as a major step toward developing alternative distribution models for academic scholarship. +={Darnton, Robert;Harvard University;open access (OA) movement} + +By far, the more ambitious aspect of the Scholar’s Copyright project is the attempt to free databases from a confusing tangle of copyright claims. In every imaginable field of science — from anthropology and marine biology to chemistry and genetics — databases are vital tools for organizing and manipulating vast collections of empirical data. The flood of data has vastly increased as computers have become ubiquitous research tools and as new technologies are deployed to generate entirely new sorts of digital data streams— measurements from remote sensors, data streams from space, and much more. But the incompatibility of databases — chiefly for technical and copyright reasons — is needlessly Balkanizing research to the detriment of scientific progress. “There is plenty of data out there,” says Richard Wallis of Talis, a company that has built a Semantic Web technology platform for open data, “but it is often trapped in silos or hidden behind logins, subscriptions or just plain difficult to get hold of.” He added that there is a lot of data that is “just out there,” but the terms of access may be dubious.~{ Donna Wentworth blog post, “Ensuring the freedom to integrate — why we need an ‘open data’ protocol,” Science Commons blog, December 20, 2007, at http://sciencecommons.org/weblog/archives/2007/12/20/ensuring-thefreedom-to-integrate. }~ +={Wallis, Richard;science:databases+14;Science Commons:ownership of data, and+14} + +Questions immediately arise: Can a database be legally used? Who owns it? Will the database continue to be accessible? Will access require payment later on? Since data now reside anywhere in the world, any potential user of data also has to consider the wide variations of copyright protection for databases around the world. + +The question of how data shall be owned, controlled, and shared is a profoundly perplexing one. History has shown the virtue of sharing scientific data — yet individual scientists, universities, and corporations frequently have their own interests in limiting how databases may be used. Scientists want to ensure the integrity of the data and any additions to it; they may want to ensure preferential access to key researchers; companies may consider the data a lucrative asset to be privately exploited. Indeed, if there is not some mechanism of control, database producers worry that free riders will simply appropriate useful compilations and perhaps sell it or use it for their own competitive advantage. Or they may fail to properly credit the scientists who compiled the data in the first place. Inadequate database protection could discourage people from creating new databases in the future. + +A National Research Council report in 1999 described the problem this way: “Currently many for-profit and not-for-profit database producers are concerned about the possibility that significant portions of their databases will be copied or used in substantial part by others to create ‘new’ derivative databases. If an identical or substantially similar database is then either re-disseminated broadly or sold and used in direct competition with the original rights holder’s database, the rights holder’s revenues will be undermined, or in extreme cases, the rights holder will be put out of business.”~{ National Research Council, /{A Question of Balance: Private Rights and the Public Interest in Scientific and Technical Databases}/ (Washington, DC: National Academy Press, 1999), p. 14. }~ + +In the late 1990s, when the Human Genome Project and a private company, Celera, were competing to map the human genome, the publicly funded researchers were eager to publish the genome sequencing data as quickly as possible in order to prevent Celera or any other company from claiming exclusive control over the information. They wanted the data to be treated as “the common heritage of humanity” so that it would remain openly accessible to everyone, including commercial researchers. When Sir John Sulston of the Human Genome Project broached the idea of putting his team’s research under a GPL-like license, it provoked objections that ownership of the data would set a worrisome precedent. A GPL for data amounts to a “reach-through” requirement on how data may be used in the future. This might not only imply that data can be owned — flouting the legal tradition that facts cannot be owned — it might discourage future data producers from depositing their data into public databases.~{ John Sulston and Georgina Ferry, /{The Common Threat: A Story of Science, Politics, Ethics and the Human Genome}/ (Washington, DC: Joseph Henry Press, 2002), pp. 212–13. }~ +={Human Genome Project (HGP);science:Human Genome Project;Sulston, Sir John;General Public License (GPL):“reach-through” requirement} + +The International HapMap Project attempted such a copyleft strategy with its database of genotypes; its goal is to compare the genetic sequences of different individuals to identify chromosomal regions where genetic variants are shared.~{ http://www.hapmap.org. }~ The project initially required users to register and agree to certain contract terms in order to use the database. One key term prohibited users from patenting any genetic information from the database or using patents to block usage of HapMap data.~{ Andrés Guadamuz González, “Open Science: Open Source Licenses in Scientific Research,” /{North Carolina Journal of Law & Technology}/ 7, no. 2 (Spring 2006), pp. 349–50. }~ This viral, open-content license for data seemed to provide a solution to the problem of how to keep data in the commons. But in time the HapMap Project found that its license inhibited people’s willingness to integrate their own data with the HapMap database. It therefore abandoned its license and now places all of its data into the public domain; it is now available to be used by anyone for any purpose, although it has issued guidelines for the “responsible use and publication” of the data.~{ http://www.hapmap.org/guidelines_hapmap_data.html.en. }~ +={International HapMap Project} + +The basic problem with applying copyright law to databases is how to draw the line between what is private property and what remains in the commons. “If you try to impose a Creative Commons license or free-software-style licensing regime on a database of uncopyrightable facts,” explained John Wilbanks, “you create an enormous amount of confusion in the user about where the rights start and stop.”~{ Interview with John Wilbanks, November 19, 2007. }~ It is not very practical for a working scientist to determine whether copyright protection applies only to the data itself, to the database model (the structure and organization of the data), or to the data entry and output sheet. A scientist might reasonably presume that his data are covered by copyright law, and then use that right to apply a CC ShareAlike license to the data. But in fact, the data could be ineligible for copyright protection and so the CC license would be misleading; other scientists could ignore its terms with impunity. At the other extreme, other scientists may be unwilling to share their data at all lest the data circulate with no controls whatsoever. Data are either overprotected or underprotected, but in either case there is great ambiguity and confusion. +={Wilbanks, John;copyright law:science, in+1;science:copyright and patent restrictions in+1;Creative Commons (CC) licenses:applied to databases} + +For two years, Science Commons wrestled with the challenge of applying the CC licenses to databases. Ultimately, the project came to the conclusion that “copyright licenses and contractual restrictions are simply the wrong tool, even if those licenses are used with the best of intentions.” There is just too much uncertainty about the scope and applicability of copyright — and thus questions about any licenses based on it. For example, it is not entirely clear what constitutes a “derivative work” in the context of databases. If one were to query hundreds of databases using the Semantic Web, would the federated results be considered a derivative work that requires copyright permissions from each database owner? There is also the problem of “attribution stacking,” in which a query made to multiple databases might require giving credit to scores of databases. Different CC licenses for different databases could also create legal incompatibilities among data. Data licensed under a CC ShareAlike license, for example, cannot be legally combined with data licensed under a different license. Segregating data into different “legal boxes” could turn out to impede, not advance, the freedom to integrate data on the Web. +={attribution stacking;copyright law:attribution stacking in databases, and|derivative works, on;derivative works} + +After meeting with a variety of experts in scientific databases, particularly in the life sciences, biodiversity, and geospatial research, the Science Commons came up with an ingenious solution to the gnarly difficulties. Instead of relying on either copyright law or licenses, Science Commons in late 2007 announced a new legal tool, CC0 (CC Zero), which creates a legal and technical platform for a scientific community to develop its own reputation system for sharing data. +={CC Zero (CC0)+6;Creative Commons (CC):CC0 (CC Zero), and+6;Science Commons:CC0 (CC Zero), and+6} + +CC0 is not a license but a set of protocols. The protocols require that a database producer waive all rights to the data based on intellectual property law — copyrights, patents, unfair competition claims, unfair infringement rights — a “quitclaim” that covers everything. Then it requires that the database producer affirmatively declare that it is not using contracts to encumber future uses of the data. Once a database is certified as complying with the protocols, as determined by Science Commons, it is entitled to use a Science Commons trademark, “Open Access Data,” and CC0 metadata. The trademark signals to other scientists that the database meets certain basic standards of interoperability, legal certainty, ease of use, and low transaction costs. The metadata is a functional software tool that enables different databases to share their data. + +“What we are doing,” said John Wilbanks, “is reconstructing, contractually, the public domain. The idea is that with any conforming implementation — any licensed database — you have complete freedom to integrate with anything else. It creates a zone of certainty for data integration.”~{ Ibid. }~ Unlike public-domain data, the databases that Science Commons certifies as meeting open-data protocols cannot be taken private or legally encumbered. To qualify to use the Open Access Data mark, databases must be interoperable with other databases licensed under the protocols. If someone falsely represents that his data are covered by the license, Science Commons could pursue a trademark infringement case. +={Wilbanks, John;public domain:reconstructing+4} + +To develop this scheme, Science Commons’s attorney Thinh Nguyen worked closely with Talis, a company that has built a Semantic Web technology platform for open data and developed its own open database license. Nguyen also worked with the company’s legal team, Jordan Hatcher and Charlotte Waelde, and with the Open Knowledge Foundation, which has developed the Open Knowledge Definition. +={Nguyen, Thinh;Hatcher, Jordan;Waelde, Charlotte;Open Knowledge Definition} + +The CC0 approach to data represents something of a breakthrough because it avoids rigid, prescriptive legal standards for a type of content (data) that is highly variable and governed by different community norms. CC0 abandons the vision of crafting a single, all-purpose copyright license or contract for thousands of different databases in different legal jurisdictions. Instead it tries to create a legal framework that can honor a range of variable social norms that converge on the public domain. Each research community can determine for itself how to meet the CC0 protocols, based on its own distinctive research needs and traditions. Different norms can agree to a equivalency of public-domain standards without any one discipline constraining the behaviors of another. +={public domain:social norms, and} + +The system is clever because it provides legal reliability without being overly prescriptive. It is simple to use but still able to accommodate complex variations among disciplines. And it has low transaction costs for both producers and users of data. Over time, the databases that comply with the CC0 protocols are likely to grow into a large universe of interoperable open data. +={science:transaction costs in;transaction costs:science, in} + +It is still too early to judge how well the CC0 program is working, but initial reactions have been positive. “The solution is at once obvious and radical,” said Glyn Moody, a British journalist who writes about open-source software. “It is this pragmatism, rooted in how science actually works, that makes the current protocol particularly important.” Deepak Singh, the co-founder of Bioscreencast, a free online video tutorial library for the scientific community, said, “I consider just the announcement to be a monumental moment.”~{ Moody and Singh quotations from Donna Wentworth, Science Commons blog post, December 20, 2007. }~ +={Moody, Glyn;Singh, Deepak} + +2~ The Neurocommons + +Every day there is so much new scientific literature generated that it would take a single person 106 years to read it all.~{ Brian Athey, University of Michigan, presentation at Commons of Science conference, National Academy of Science, Washington, DC, October 3, 2006. }~ In a single year, over twenty-four thousand peer-reviewed journals publish about 2.5 million research articles.~{ Stevan Harnad, “Maximizing Research Impact Through Institutional and National Open-Access Self-Archiving Mandates,” /{Electronics & Computer Science E-Prints Repository}/, May 2006, available at http://eprints.ecs.soron.ac.uk/ 12093/02/harnad-crisrey.pdf. }~ Our ability to generate content has far outstripped our ability to comprehend it. We are suffering from a cognitive overload — one that can only be addressed by using software and computer networks in innovative ways to organize, search, and access information. For many years, Sir Tim Berners-Lee, the celebrated inventor of the World Wide Web, and his colleagues at the World Wide Web Consortium (W3C), based at MIT, have been trying to solve the problem of information overload by developing a “new layer” of code for the Web. +={Berners-Lee, Tim;World Wide Web Consortium (W3C)} + +This visionary project, the so-called Semantic Web, aspires to develop a framework for integrating a variety of systems, so they can communicate with one another, machine to machine. The goal is to enable computers to identify and capture information from anywhere on the Web, and then organize the results in sophisticated and customized ways. “If you search for ‘signal transduction genes in parameter neurons,’ ” said John Wilbanks of Science Commons, “Google sucks. It will get you 190,000 Web pages.” The goal of the Semantic Web is to deliver a far more targeted and useful body of specialized information. +={Semantic Web+6;Science Commons:Semantic Web, and the+6;World Wide Web:Semantic Web+6;ilbanks, John} + +A key tool is the Unique Resource Identifier, or URI, which is analogous to the Unique Resource Locator, or URL, used by the Web. Affix a URI to any bit of information on the Web, and the Semantic Web will (so it is hoped) let you mix and match information tagged with that URI with countless other bits of information tagged with other URIs. It would not matter if the bit of information resides in a journal article, database, clinical image, statistical analysis, or video; the point is that the URI would identify a precise bit of information. By enabling cross-linking among different types of information, the idea is that scientists will be able to make all sorts of unexpected and serendipitous insights. +={Unique Resource Identifier (URI);Unique Resource Locator (URL)} + +For example, geneticists studying Huntington’s disease, a rare neurodegenerative disorder, and experts studying Alzheimer’s disease are both exploring many of the same genes and proteins of the brain. But because of the specialization of their disciplines, the chances are good that they read entirely different scientific journals and attend different conferences. There is no easy or systematic way for scientists in one specialty to explore the knowledge that has developed in another specialty. The Semantic Web could probably help. + +Unfortunately, for a grand dream that has been touted since the 1990s, very little has developed. The W3C has been embroiled in the design challenges of the Semantic Web for so long that many companies and computer experts now scoff at the whole idea of the Semantic Web. There have been too many arcane, inconclusive debates about computer syntax, ontology language, and philosophical design choices that no one is holding their breath anymore, waiting for the Semantic Web to arrive. (Wikipedia defines a computer ontology as “a data model that represents a set of concepts within a domain and the relationships between those concepts. It is used to reason about the objects within that domain.”) The vision of the Semantic Web may have the potential to revolutionize science, but few people have seen much practical value in it over the near term, and so it has garnered little support. +={World Wide Web Consortium (W3C)+1} + +Wilbanks, who once worked at the W3C, was frustrated by this state of affairs. Although he has long believed in the promise of the Semantic Web, he also realized that it is not enough to extol its virtues. One must demonstrate its practicality. “The way to herd cats is not to herd cats,” he said, citing a colleague, “but to put a bowl of cream on your back stoop and run like hell.” For Wilbanks, the bowl of cream is the Neurocommons knowledge base, a project that seeks to integrate a huge amount of neuroscientific research using Semantic Web protocols and is easy to use. +={Neurocommons+5;Science Commons:Neurocommons, and the+5;Wilbanks, John+4} + +“The way to overcome the inertia that the Semantic Web critics rightly point out, is not to sit down and argue about ontologies,” said Wilbanks. “It’s to release something that’s useful enough that it’s worth wiring your database into the commons system. If I want to get precise answers to complicated questions that might be found in my own database, among others, now I can do that. I simply have to wire it into the Neurocommons. You don’t need to come to some magical agreement about ontology; you just need to spend a couple of days converting your database to RDF [Resource Description Framework, a set of Semantic Web specifications], and then— boom! — I’ve got all of the other databases integrated with mine.” By getting the ball rolling, Science Commons is betting that enough neuroscience fields will integrate their literature to the Neurocommons protocols and make the new commons a lively, sustainable, and growing organism of knowledge. +={RDF (Resource Description Framework)} + +Using the “open wiring” of the Semantic Web, the Neurocommons has already integrated information from fifteen of the top twenty databases in the life sciences and neuroscience. The data have been reformatted to conform to Semantic Web protocols and the scientific literature, where possible, has been tagged so that it can be “text-mined” (searched for specific information via URI tags). “We have put all this stuff into a database that we give away,” said Wilbanks. “It’s already been mirrored in Ireland, and more mirrors are going up. It’s sort of like a ‘knowledge server,’ instead of a Web server.” +={Unique Resource Identifier (URI)} + +Commercial journal publishers already recognize the potential power of owning and controlling metadata in scientific literature and datasets. To leverage this control many are starting to make copyright claims in certain kinds of metadata, and to amend their contracts with libraries in order to limit how they may retrieve electronic information. “There is a lot at stake here,” says Villanova law professor Michael Carroll. “What Science Commons wants to do is make sure that metadata is an open resource.”~{ Interview with Michael Carroll, August 7, 2006. }~ +={Carroll, Michael W.;libraries:Science Commons, and;Science Commons:libraries, and} + +Wilbanks has high hopes that the Neurocommons project, by providing a useful demonstration of Semantic Web tools, will hasten the interoperability of specialized knowledge that is currently isolated from related fields. It comes down to how to motivate a convergence of knowledge. Instead of arguing about which discipline’s ontology of specialized knowledge is superior to another’s — and making little headway toward a consensus — Wilbanks has a strategy to build a knowledge tool that is useful. Period. His bet is that a useful “knowledge server” of integrated neuroscientific information will be a powerful incentive for adjacent disciplines to adapt their own literature and databases to be compatible. The point is to get the commons going — while allowing the freedom for it to evolve. Then, if people have disagreements or quibbles, they will be free to change the ontologies as they see fit. “The version [of the Neurocommons] that we are building is useful and it is free,” Wilbanks said. “That means that if you want to integrate with it, you can. It means that if you want to redo our work your way, you can— as long as you use the right technical formats. You can reuse all of our software.” + +The problem with a field like neuroscience, which has so many exploding frontiers, is that no single company or proprietary software platform can adequately manage the knowledge. The information is simply too copious and complex. Like so many other fields of knowledge that are large and complicated, it appears that only an open-source model can successfully curate the relevant information sources. A Web-based commons can be remarkably efficient, effective, and scalable. This has been the lesson of free and open-source software, wikis, and the Web itself. Although it is too early to tell how the Neurocommons project will evolve, the initial signs are promising. A number of foundations that support research for specific diseases — Alzheimer’s disease, Parkinson’s, autism, epilepsy, Huntington’s disease — have already expressed interest in the Neurocommons as a potential model for advancing research in their respective fields. + +2~ Open Physical Tools +={Science Commons:open physical tools in+10} + +Science is not just about text and data, of course. It also involves lots of tangible /{stuff}/ needed to conduct experiments. Typical materials include cell lines, monoclonal antibodies, reagents, animal models, synthetic materials, nano-materials, clones, laboratory equipment, and much else. Here, too, sharing and collaboration are important to the advance of science. But unlike digital bits, which are highly malleable, the physical materials needed for experiments have to be located, approved for use, and shipped. Therein lies another tale of high transaction costs impeding the progress of science. As Thinh Nguyen, counsel for Science Commons, describes the problem: +={Nguyen, Thinh+1} + +_1 The ability to locate materials based on their descriptions in journal articles is often limited by lack of sufficient information about origin and availability, and there is no standard citation for such materials. In addition, the process of legal negotiation that may follow can be lengthy and unpredictable. This can have important implications for science policy, especially when delays or inability to obtain research materials result in lost time, productivity and research opportunities.~{ Thinh Nguyen, “Science Commons: Material Transfer Agreement Project,” /{Innovations}/, Summer 2007, pp. 137–43, at http://www.mitpressjournals.org/ doi/pdf/10.1162/itgg.2007.2.3.137. }~ + +To the nonscientist, this transactional subculture is largely invisible. But to scientists whose lab work requires access to certain physical materials, the uncertainties, variations, and delays can be crippling. Normally, the transfer of materials from one scientist to another occurs through a Material Transfer Agreement, or MTA. The technology transfer office at one research university will grant, or not grant, an MTA so that a cell line or tissue specimen can be shipped to a researcher at another university. Typically, permission must be granted for the researcher to publish, disseminate, or use research results, and to license their use for commercialization. +={Material Transfer Agreements (MTAs)+7;science:Material Transfer Agreements (MTAs)+7} + +While certain types of transactions involve material that could conceivably generate high royalty revenues, a great many transactions are fairly low-value, routine transfers of material for basic research. Paradoxically, that can make it all the harder to obtain the material because consummating an MTA is not a high priority for the tech transfer office. In other cases, sharing the material is subject to special agreements whose terms are not known in advance. + +Corporations sometimes have MTAs with onerous terms that prevent academic researchers from using a reagent or research tool. Individual scientists sometimes balk at sharing a substance because of the time and effort needed to ship it. Or they may wish to prevent another scientist from being the first to publish research results. Whatever the motivation, MTAs can act as a serious impediment to verification of scientific findings. They can also prevent new types of exploratory research and innovation. + +Wilbanks describes the existing system as an inefficient, artisanal one that needs to becomes more of a streamlined industrial system. Just as Creative Commons sought to lower the transaction costs for sharing creative works, through the use of standard public licenses, so Science Commons is now trying to standardize the process for sharing research materials. The idea is to reduce the transaction costs and legal risks by, in Nguyen’s words, “creating a voluntary and scalable infrastructure for rights representation and contracting.”~{ Ibid. }~ Like the CC licenses, the Science Commons MTAs will consist of “three layers” of licenses — the standard legal agreement, the machine-readable metadata version, and the “humanreadable deed” that nonlawyers can understand. +={Wilbanks, John;Nguyen, Thinh;science:transaction costs in;transaction costs:science, in} + +There are already some successful systems in place for sharing research materials, most notably the Uniform Biological Material Transfer Agreement (UBMTA), which some 320 institutions have accepted, as well as a Simple Letter Agreement developed by the National Institutes of Health. The problem with these systems is that they cannot be used for transfers of materials between academic and for-profit researchers. In addition, there are many instances in which UBMTA signatories can opt out of the system to make modifications to the UBMTA on a case-by-case basis. +={National Institutes for Health (NIH);Uniform Biological Material Transfer Agreement (UBMTA)} + +To help standardize and streamline the whole system for sharing research materials, Science Commons is working with a consortium of ten research universities, the iBridge Network, to develop a prototype system. The hope is that by introducing metadata to the system, and linking that information to standard contracts and human-readable deeds, scientists will be able to acquire research materials much more rapidly by avoiding bureaucratic and legal hassles. Just as eBay, Amazon, and Federal Express use metadata to allow customers to track the status of their orders, so the Science Commons MTA project wants to develop a system that will allow searching, tracking, and indexing of specific shipments. It is also hoped that metadata links will be inserted into journal articles, enabling scientists to click on a given research material in order to determine the legal and logistical terms for obtaining the material. +={iBridge Network;Science Commons:iBridge Network, and} + +Wilbanks envisions a new market of third-party intermediaries to facilitate materials transfers: “There’s an emerging network of third parties — think of them as ‘biology greenhouses’ — who are funded to take in copies of research materials and manufacture them on demand — to grow a quantity and mail them out. What Science Commons is trying to do with the Materials Transfer Project is to put together a functional system where materials can go to greenhouses under standard contracts, with digital identifiers, so that the materials can be cross-linked into the digital information commons. Anytime you see a list of genes, for example, you will be able to right-click and see the stuff that’s available from the greenhouses under standard contract, and the cost of manufacture and delivery in order to access the tool. Research materials need to be available under a standard contract, discoverable with a digital identifier, and fulfillable by a third party. And there needs to be some sort of acknowledgment, like a citation system.” +={Wilbanks, John} + +At one level, it is ironic that one of the oldest commons-based communities, academic science, has taken so long to reengineer its digital infrastructure to take advantage of the Internet and open digital systems. Yet academic disciplines have always clung tightly to their special ways of knowing and organizing themselves. The arrival of the Internet has been disruptive to this tradition by blurring academic boundaries and inviting new types of cross-boundary research and conversation. If only to improve the conversation, more scientists are discovering the value of establishing working protocols to let the diverse tribes of science communicate with one another more easily. Now that the examples of networked collaboration are proliferating, demonstrating the enormous power that can be unleashed through sharing and openness, the momentum for change is only going to intensify. The resulting explosion of knowledge and innovation should be quite a spectacle. + +1~ 12 OPEN EDUCATION AND LEARNING +={education+44} + +/{Managing educational resources as a commons can make learning more affordable and exciting.}/ + +In the late 1990s, as Richard Baraniuk taught electrical engineering to undergraduates at Rice University, the furthest thing from his mind was revolutionizing learning. He just wanted to make digital signal processing a more palatable subject for his students. Baraniuk, an affable professor with a venturesome spirit, was frustrated that half of his undergraduate class would glaze over when he taught signal processing, perhaps because it involves a lot of math. But then he explained the social ramifications of signal processing — for wiretapping, the Internet, the airwaves, radar, and much more. Students got excited. +={Baraniuk, Richard+13;Rice University+13} + +“If I wanted to reach a broader class of people, outside of Rice University,” Baraniuk said, “that would be very difficult. The standard thing is to write your own book.” But he quickly realized that writing the 176th book ever written on signal processing (he counted) would not be very efficient or effective. It would take years to write, and then additional years to traverse the editorial, production, and distribution process. And even if the book were successful, it would reach only five thousand readers. Finally, it would be a static artifact, lacking the timeliness and interactivity of online dialogue. A book, Baraniuk ruefully observed, “redisconnects things.”~{ Interview with Richard Baraniuk, January 21, 2008. }~ + +As chance had it, Baraniuk’s research group at Rice was just discovering open-source software. “It was 1999, and we were moving all of our workstations to Linux,” he recalled. “It was just so robust and high-quality, even at that time, and it was being worked on by thousands of people.” Baraniuk remembers having an epiphany: “What if we took books and ‘chunked them apart,’ just like software? And what if we made the IP open so that the books would be free to re-use and remix in different ways?’” +={Linux:education, and} + +The vision was exciting, but the tools for realizing it were virtually nonexistent. The technologies for collaborative authoring and the legal licenses for sharing, not to mention the financing and outreach for the idea, would all have to be developed. Fortunately, the Rice University administration understood the huge potential and helped Baraniuk raise $1 million to put together a skunk works of colleagues to devise a suitable software architecture and nonprofit plan. A colleague, Don Johnson, dubbed the enterprise “Connexions.” +={Connexions+10;education:Connexions+10;Johnson, Don} + +The group made a number of choices that turned out to be remarkably shrewd. Instead of organizing teaching materials into a “course” or a “textbook,” for example, the Connexions planners decided to build an open ecosystem of shared knowledge. Just as the Web is “small pieces loosely joined,” as David Weinberger’s 2003 book put it, so Connexions decided that the best way to structure its educational content was as discrete modules (such as “signal processing”) that could be reused in any number of contexts. The planners also decided to build a system on the open Semantic Web format rather than a simple interlinking of PDF files. This choice meant that the system would not be tethered to a proprietary or static way of displaying information, but could adapt and scale in the networked environment. Modules of content could be more easily identified and used for many different purposes, in flexible ways. +={education:Semantic Web, and;Semantic Web;World Wide Web:Semantic Web;Weinberger, David} + +By the summer of 2000, the first version of Connexions went live with two Rice University courses, Fundamentals of Electronic Engineering and Introduction to Physical Electronics. The goal was to let anyone create educational materials and put them in the repository. Anyone could copy and customize material on the site, or mix it with new material in order to create new books and courses. Materials could even be used to make commercial products such as Web courses, CD-ROMs, and printed books. By the end of 2000, two hundred course modules were available on Connexions: a modest but promising start. + +It turned out to be an auspicious moment to launch an open platform for sharing. A wave of Web 2.0 applications and tools was just beginning to appear on the Internet. Innovators with the savvy to take advantage of open networks, in the style of free and open software, could amass huge participatory communities in very short order. For Connexions, the living proof was Kitty Schmidt-Jones, a private piano teacher from Champaign, Illinois. She discovered Connexions through her husband and posted a 276-page book on music theory to the site. “Kitty is not the kind of person who would be a music textbook author,” said Baraniuk, “but she thought that music education is important, and said, ‘I can do this, too!’ By 2007 /{Understanding Basic Music Theory}/ had been downloaded more than 7.5 million times from people around the world. A Connexions staffer attending a conference in Lithuania met an educator from Mongolia who lit up at the mention of Schmidt-Jones. “We use her work in our schools!” he said. +={education:Web 2.0 applications, and;Web 2.0: applications:education, and;Schmidt-Jones, Kitty} + +Besides curating a collection of educational content, Connexions has developed a variety of open-source applications to let authors create, remix, share, and print content easily. The project has also developed systems to let users rate the quality of materials. Professional societies, editorial boards of journals, and even informal groups can use a customizable software “lens” to tag the quality of Connexions modules, which can then be organized and retrieved according to a given lens. + +It was a stroke of good fortune when Baraniuk and his associates learned, in 2002, that Lawrence Lessig was developing a new licensing project called Creative Commons. As the CC team drafted its licenses, Connexions helped it understand academic needs and then became one of the very first institutional adopters of the CC licenses. Connexions decided to require that its contributors license their works under the least restrictive CC license, CC-BY (Attribution). This was a simple decision because most textbook authors write to reach large readerships, not to make money. +={Lessig, Lawrence:CC licenses, and} + +The real expansion of Connexions as a major international repository of teaching materials did not occur until early 2004, when the software platform had been sufficiently refined. Then, with virtually no publicity, global usage of the Connexions site took off. It helped that Rice University has never sought to “own” the project. Although it administers the project, the university has deliberately encouraged grassroots participation from around the world and across institutions. Electrical engineering faculty at ten major universities are cooperating in developing curricula, for example, and diverse communities of authors are adding to content collections in music, engineering, physics, chemistry, bioinformatics, nanotechnology, and history. In 2008, Connexions had 5,801 learning modules woven into 344 collections. More than 1 million people from 194 countries are using the materials, many of which are written in Chinese, Italian, Spanish, and other languages. + +One of Connexion’s neatest tricks is offering printed textbooks for a fraction of the price of conventional textbooks. Because the content is drawn from the commons, a 300-page hardback engineering textbook that normally sells for $125 can be bought for $25, through a print-on-demand publishing partner, QOOP.com. Ten percent of the purchase price is earmarked to support Connexions, and another 10 percent helps disadvantaged students obtain textbooks for free. Unlike conventional textbooks, which may be a year or two old, Connexions materials are generally up-to-date. + +By providing an alternative to the spiraling costs of academic publishing, Connexions’s publishing model may actually help a number of academic disciplines pursue their scholarly missions. Over the past decade, some sixty university presses have closed or downsized for economic reasons. “If you’re in art history, anthropology, or the humanities, you get tenure based on your monographs published by a university press,” Baraniuk said. “The problem is that, as university presses shut down, there’s nowhere to publish books anymore.” It is often financially prohibitive to publish art history books, for example, because such books typically require highquality production and small press runs. An overly expensive market structure is blocking the flow of new scholarly publishing. +={education:university presses+1} + +One solution: a new all-digital hybrid business model for academic publishing. As the Connexions platform has proved itself, Rice University saw the virtue of reopening Rice University Press (RUP), which it had closed ten years earlier.~{ Rice University Press homepage, at http://www.ricepress.rice.edu. }~ The new RUP retains the editorial structure, high standards, and focus on special fields of a conventional academic press, but it now works within a “branded partition” of Connexions. RUP posts all of its books online as soon as the manuscripts are finalized, and all books are licensed under a CC-BY (Attribution) license. The press does not have to pay for any warehouse or distribution costs because any physical copies of the books are printed on demand. The sales price includes a mission-support fee for RUP and the author’s royalty. “Because the RUP has eliminated all the back-end costs,” said Baraniuk, “they figure they can run it from five to ten times more cheaply than a regular university press.” + +The Connexions publishing model has inspired a group of more than twenty community colleges to develop its own publicdomain textbooks to compete with expensive commercial textbooks. The Community College Consortium for Open Educational Resources~{ http://cccoer.pbwiki.com. }~ —led by Foothill–De Anza Community College District in Los Altos, California — plans to publish the ten most popular textbooks used in community colleges, and expand from there. The consortium will make the books available for free online and sell hardcover versions for less than thirty dollars. Even if the effort gains only a small slice of the textbook market, it will help hold down the prices of commercial textbooks and demonstrate the viability of a new publishing model. More to the point, by slashing one of the biggest costs facing community college students, the project will help thousands of lower-income students to stay in college. + +2~ MIT’s OpenCourseWare Initiative +={MIT OpenCourseWare+8;OpenCourseWare+8;education:OpenCourseWare+8} + +The other pioneering visionary in open education has been MIT. In April 2001, MIT president Charles Vest shocked the world when he announced that MIT would begin to put the materials for all two thousand of its courses online for anyone to use, for free. The new initiative, called OpenCourseWare, would cover a wide array of instructional materials: lecture notes, class assignments, problem sets, syllabi, simulations, exams, and video lectures. Putting the materials online in a searchable, consistent format was expected to take ten years and cost tens of millions of dollars. (The Hewlett and Mellon foundations initially stepped forward with two $5.5 million grants, supplemented by $1 million from MIT.) +={Vest, Charles+1} + +The project had its origins two years earlier, in 1999, when President Vest charged a study group with exploring how the university might develop online educational modules for lifelong learning. The assumption was that it would sell MIT-branded course materials to the budding “e-learning” market. At the time, Columbia University was developing Fathom.com, a bold for-profit co-venture with thirteen other institutions, to sell a wide variety of digital content. Publishers and universities alike envisioned a lucrative new market for academic and cultural materials. + +OpenCourseWare (OCW) was a startling move because it flatly rejected this ambition, and appeared to be either a foolish or magnanimous giveaway of extremely valuable information. Knowledge was assumed to be a species of property that should be sold for as dear a price as possible; few people at the time recognized that the Great Value Shift on the Internet was reversing this logic. The idea that giving information away might actually yield greater gains— by enhancing an institution’s visibility, respect, and influence on a global scale — was not seen as credible. After all, where’s the money? +={Great Value Shift;Internet:Great Value Shift, and} + +After studying the matter closely, MIT decided that the online market was not likely to be a boon, and that posting course materials online would send a strong message about MIT’s values. President Vest conceded that the plan “looks counter-intuitive in a market-driven world.” But he stressed that OpenCourseWare would combine “the traditional openness and outreach and democratizing influence of American education and the ability of the Web to make vast amounts of information instantly available.”~{ MIT press release, “MIT to make nearly all course materials available free on the World Wide Web,” April 4, 2001.}~ Professor Steven Lerman, one of the architects of the OCW plan, told the /{New York Times}/, “Selling content for profit, or trying in some ways to commercialize one of the core intellectual activities of the university, seemed less attractive to people at a deep level than finding ways to disseminate it as broadly as possible.”~{ Carey Goldberg, “Auditing Classes at M.I.T., on the Web and Free,” /{New York Times}/, April 4, 2001, p. 1. }~ +={Vest, Charles;Lerman, Steven} + +MIT also realized the dangers of propertizing college courses and teaching materials, said computer scientist Hal Abelson, another member of the OCW study group (and a CC board member). Ownership, he said, “can be profoundly destructive to the idea of a university community . . . The more people can stop talking about property and start talking about the nature of a faculty member’s commitment to the institution, the healthier the discussion will be. It’s not really about what you own as a faculty member; it’s about what you do as a faculty member.”~{ Interview with Hal Abelson, “OpenCourseWare and the Mission of MIT,” /{Academe}/, September/October 2002, pp. 25–26. }~ +={Abelson, Hal:OpenCourseWare, and} + +School officials stressed that using MIT courseware on the Web is not the same as an MIT education. Indeed, the free materials underscore the fact that what really distinguishes an MIT education is one’s participation in a learning community. Unlike the Connexions content, MIT’s OpenCourseWare is a fairly static set of course materials; they are not modular or constantly updated. In addition, they are licensed under a CC BY-NC-SA (AttributionNonCommercial-ShareAlike.) license. While this prevents businesses from profiting from MIT course materials, it also prevents other educational institutions from remixing them into new courses or textbooks. +={communities:learning;education:learning community, in a} + +Despite these limitations, MIT’s OCW materials have been profoundly influential. The course Laboratory in Software Engineering, for example, has been used by students in Karachi, Pakistan; the island of Mauritius; Vienna, Austria; and Kansas City, Missouri, among scores of other places around the world.~{ David Diamond, “MIT Everyware,” /{Wired}/, September 2003. }~ Ten of the leading Chinese universities now use hundreds of MIT courses, leading three noted OER experts, Daniel E. Atkins, John Seely Brown, and Allen L. Hammond, to conclude that MIT’s OCW “has had a major impact on Chinese education.”~{ Daniel E. Atkins, John Seely Brown, and Allen L. Hammond, “A Review of the Open Educational Resources (OER) Movement: Achievements, Challenges and New Opportunities,” February 2007, at http://www.oerderves .org/wp-content/uploads/2007/03/a-review-of-the-open-educational-re sources-oer-movement_final.pdf, p. 23. }~ Noting the life-changing impact that OCW has had on students in rural villages in China and West Africa, Atkins and his co-authors cite “the power of the OCW as a means for cross-cultural engagement.” Over the course of four years, from October 2003 through 2007, the OCW site received nearly 16 million visits; half were newcomers and half were repeat visits. +={Atkins, Daniel E.;Brown, John Seely;Hammond, Allen L.;education:OER movement;Open Educational Resources (OER) movement} + +OCW is becoming a more pervasive international ethic now that more than 120 educational institutions in twenty nations have banded together to form the OpenCourseWare Consortium. Its goal is to create “a broad and deep body of open educational content using a shared model.”~{ OpenCourseWare Consortium, at http://www.ocwconsortium.org. }~ Although plenty of universities are still trying to make money from distance education courses, a growing number of colleges and universities realize that OCW helps faculty connect with other interested faculty around the world, build a college’s public recognition and recruitment, and advance knowledge as a public good. + +2~ The Rise of the Open Educational Resources Movement +={education:OER movement+19;Open Educational Resources (OER) movement+19} + +While Connexions and MIT’s OpenCourseWare have understandably garnered a great deal of attention, all sorts of fascinating educational projects, big and small, have popped up on the Internet as Web 2.0 innovations matured. Some of these projects have become celebrated, such as Wikipedia, the Public Library of Science, and the Internet Archive. Others, though less celebrated, represent a dazzling mosaic of educational innovation and new possibilities. In a sense, the Long Tail has come to education; even the most obscure subjects have a sustainable niche on the Internet. The groundswell has even produced its own theorists, conveners, and infrastructure builders. Utah State University hosts the Center for Open Sustainable Learning, which is a clearinghouse for open educational tools. Carnegie Mellon has an Open Learning Initiative that designs educational courses. And so on. +={Long Tail;Wikipedia} + +While American institutions and educators have been the first movers in this field, it has quickly taken on an international grassroots flavor. Thousands of commoners from around the world have started their own projects. MathWorld has become the Web’s most extensive mathematical resource. Curriki is a wiki that offers lessons plans and guidance for teachers. The British Library’s Online Gallery features digitized versions of Mozart’s musical diary and sketches by Leonardo da Vinci. U.K. and Australian high school students can now use the Internet to operate the Faulkes Telescope on the island of Maui, Hawaii. Students around the world do much the same with Bugscope, a scanning electronic microscope that can be operated remotely. + +It is hard to set a precise date when the practitioners in this area realized that such wildly diverse projects might constitute a coherent movement with a shared agenda. But as more grantees began to discover each other, the movement-in-formation adopted a rather ungainly name to describe itself — “Open Educational Resources,” or OER. + +Most OER projects share a simple and powerful idea — “that the world’s knowledge is a public good and that technology in general and the World Wide Web in particular provide an extraordinary opportunity for everyone to share, use and reuse knowledge.” That is how Atkins and his co-authors define OER. It consists of “teaching, learning and research resources that reside in the public domain or have been released under an intellectual property license that permits their free use or re-purposing by others.”~{ Ibid. }~ +={Atkins, Daniel E.;Brown} + +The heart of the OER movement is, of course, open sharing and collaboration. OER advocates regard learning as an intrinsically social process, and so they believe that knowledge and learning tools ought to freely circulate. Inspired by the GPL and the CC licenses, OER advocates believe they should be free to copy, modify, and improve their learning tools and pass them forward to others. There is a presumption that artificial barriers to the free flow of information should be eliminated, and that teachers and learners should be empowered to create their own knowledge commons. +={Creative Commons (CC) licenses:tools for creating commons, as} + +The OER movement has a special importance for people who want to learn but don’t have the money or resources, which is to say, people in developing nations, low-income people, and people with specialized learning needs. For the 4 billion people who live in the developing world, schooling is a privilege, textbooks are rare, and money is scarce. In many African nations, there would not be libraries if books were not photocopied. The OER movement aspires to address these needs. OER projects can provide important benefits in industrialized nations, too, where subscriptions to research journals are often prohibitively expensive and many community college students drop out because textbooks cost more than tuition. + +The OER movement is currently in a formative stage, still trying to make sense of the many players in the movement and understand the complex impediments to its progress. Some of this could be seen at a “speed geeking” session at the iCommons Summit in 2007 in Dubrovnik, Croatia. Speed geeking, a puckish variation on “speed dating,” consists of people listening to a short presentation, asking questions and then moving on to the next presentation. After five minutes, a moderator blows a whistle and shouts, “Everyone move — now!” A speed geek can learn about twelve different projects, and meet twelve interesting people, in a single hour. +={Croatia} + +% added Croatia missing Croatia reference in source book index + +In this case, the speed geeking took place in a sweltering loft space without air-conditioning, in a medieval building overlooking the Adriatic Sea. At the first station, a group of participants marveled at a sturdy lime-green laptop of a kind that was about to be distributed to millions of children around the world. The One Laptop Per Child project, the brainchild of Nicholas Negroponte of MIT’s Media Lab, is an ambitious nonprofit initiative to build a sturdy, kidfriendly laptop filled with open-source software and Wi-Fi capabilities for $100.~{ See, e.g., John Markoff, “For $150, Third-World Laptop Stirs a Big Debate,” /{New York Times}/, November 30, 2006. }~ (The cost turned out to be $188, but is expected to decline as production volume grows.) Hundreds of thousands of the so-called XO laptops have now been distributed to kids in Peru, Uruguay, Mexico and other poor nations. +={Negroponte, Nicholas;One Laptop Per Child} + +/{Tweet!}/ Next stop: the Free High School Science Textbooks project in South Africa is developing a free set of science textbooks for students in grades ten through twelve. The project depends on volunteers to write modules of text about various physics, chemistry, and mathematical topics. Paid editors then craft the text into a coherent, high-quality textbook; printing is funded by donations. +={Free High School Science Textbooks} + +Five minutes later, it was on to Educalibre, a Chilean project that is installing free software on old computers so that they can be reused in classrooms. Educalibre is also trying to integrate free software into high school curricula, especially math. The project seeks to bring open-source software principles into formal education. + +Next, Delia Browne of the National Education Access Licence for Schools, or NEALS, explained that some ten thousand Australian schools pay millions of dollars each year to collecting societies in order to reprint materials that the Australian schools themselves have produced. NEALS wants to eliminate this expense, as well as millions of dollars in photocopying expenses, by creating a vast new commons of freely shareable educational materials. Its solution is to persuade Australian schools, as copyright holders, to adopt a special license so that participating schools can copy and share each other’s materials. +={Browne, Delia;National Education Access Licence for Schools (NEALS)} + +/{Tweet!}/ At the next station, Ed Bice of San Francisco explained how his nonprofit group, Meedan.net, is developing a “virtual town square” for Arabic- and English-speaking Internet users. Using realtime translation and social networking tools, the site aspires to open up a new global conversation between Arabs and the rest of the world. It plans to break down cultural barriers while opening up educational opportunities to Arab populations. +={Bice, Ed} + +/{Tweet! Tweet!}/ Neeru Paharia, a former executive director of the Creative Commons, introduced her fledgling project, AcaWiki. Paharia is concerned that too many academic articles are locked behind paywalls and are not readily accessible to everyone. AcaWiki plans to recruit graduate students, academics, and citizens to write summaries of academic papers. Since many grad students make abstracts as part of their routine research, it would not be difficult to pool thousands of summaries into a highly useful, searchable Web collection. +={Paharia, Neeru} + +The speed geekers in Dubrovnik were sweaty and overstimulated at the end, but gratified to learn that there are a great many OER projects under way throughout the world; they just aren’t very well known or coordinated with one another. Two of the participants — J. Philipp Schmidt of the University of the Western Cape and Mark Surman of the Shuttleworth Foundation, both of South Africa — conceded that “there is still a great deal of fuzziness about what this movement includes,” and that “we don’t yet have a good ‘map’ of open education.” But the significance of grassroots initiatives is unmistakable. “There is a movement afoot here,” they concluded, “and it is movement with an aim no less than making learning accessible and adaptable for all.”~{ J. Philipp Schmidt and Mark Surman, “Open Sourcing Education: Learning and Wisdom from the iSummit 2007,” September 2, 2007, at http://icommons .org/download_banco/open-sourcing-education-learning-and-wisdom-from -isummit-2007. }~ “Education,” another participant predicted, “will drive the future of the Commons movement.” +={Schmidt, J. Philipp;Surman, Mark} + +In a sign that the OER movement is getting serious as a movement, thirty of its leaders met in Cape Town, South Africa, and in January 2008 issued the Cape Town Open Education Declaration.~{ http://www.capetowndeclaration.org. Schmidt and Surman, “Open Sourcing Education.” }~ The declaration is a call to make learning materials more freely available online, and to improve education and learning by making them more collaborative, flexible, and locally relevant. The declaration outlines the challenge: “Many educators remain unaware of the growing pool of open educational resources. Many governments and educational institutions are either unaware or unconvinced of the benefits of open education. Differences among licensing schemes for open resources create confusion and incompatibility. And, of course, the majority of the world does not have access to the computers and networks that are integral to most current open education efforts.” + +New funding support is materializing from foundations like the Open Society Institute and the Shuttleworth Foundation, and the Creative Commons has instigated a new project, ccLearn, headed by Ahrash Bissell, to help coordinate OER factions and tackle barriers to further progress. +={Bissell, Ahrash} + +Despite the challenges it faces, the Open Educational Resources movement has a promising future if only because it has such an appealing ethos and practical value. It offers to lower the costs and increase the efficiencies of learning. It helps to generate high-quality materials that address specific learning needs. Where markets are too expensive or unresponsive, collective provisioning through the commons can meet needs effectively and in socially convivial ways. + +Such intangible satisfactions may be one of the secrets of the OER movement’s success to date. Institutions and individuals take pleasure in contributing to the public good. There is pleasure in helping people who thirst for an education, whether in Africa or in a community college, to acquire the resources they need. For learners, the OER movement offers new, more flexible styles of learning. Over time, it seems likely that OER projects will transform the familiar “information transfer” models of formal education into more informal and participatory learning communities. Passive students will more easily become passionate, self-directed learners. + +Finally, at a time of great geopolitical rivalries and cultural animosities, the OER movement holds itself forth as an arena of transnational cooperation. It regards diversity as a strength and social inequity as a challenge to be squarely met. It is a measure of the movement’s idealism that Schmidt and Surman, the South African OER commoners, compare open education to “a flock of migratory geese, moving back and forth between North and South. The flock combines birds from all places. Each goose takes a turn leading the flock, taking the strain, and then handing over to their peers. The flock is not confined to just the North, or the South. It flourishes as a global movement.” 14 +={Schmidt, J. Philipp;Surman, Mark} + +1~ CONCLUSION: THE DIGITAL REPUBLIC AND THE FUTURE OF DEMOCRATIC CULTURE +={free culture+56} + +/{You never change things by fighting the existing reality. To change something, build a new model that makes the existing model obsolete. —R. Buckminster Fuller}/ +={Fuller, R. Buckminster} + +Legend has it that, upon leaving Independence Hall on the final day of the Constitutional Convention in 1787, Benjamin Franklin was approached by a woman, who asked, “Well, Doctor, what have we got — a Republic or a Monarchy?” Franklin famously replied, “A Republic, if you can keep it.” The American colonies had imagined and engineered a new constitutional order, but its survival would depend on countless new struggles and innovations. An American civic culture had to be invented. +={Franklin, Benjamin+1} + +The Franklin vignette might well be applied to the digital republic that the commoners have built. Except that, instead of asking, “Well, Mr. Stallman and Professor Lessig, what have we got — a free culture or a proprietary tyranny?” the question might better be posed to the commoners themselves. Their very existence answers the question, Tyranny or freedom? Free culture exists. It exists to the extent that people practice its ideals. It is not pervasive; many people have no idea what it is; it overlaps in fuzzy ways with the market. But it is flourishing wherever online communities have devised satisfactory commons structures — through law, software, and social norms — to capture the value that they create. Or, as the American Framers put it, to secure the blessings of liberty to ourselves and our posterity. +={commoners:influence of+3} + +As the preceding chapters make clear, the commoners are now a respected force in culture, politics, and economics. Their influence can be felt in varying degrees in the worlds of music, video, photography, and books; in software, Web design, and Internet policies; in social networks and peer-to-peer communities; in business, science, and education; and in scores of countries that have ported the Creative Commons licenses and developed their own commons-based projects. + +Thanks to the Internet, the commons is now a distinct sector of economic production and social experience. It is a source of “value creation” that both complements and competes with markets. It is an arena of social association, self-governance, and collective provisioning that is responsive and trustworthy in ways that government often is not. In a sense, the commons sector is a recapitulation of civil society, as described by Alexis de Tocqueville, but with different capacities. +={Tocqueville, Alexis de;Internet:socially created value of} + +Yet even with the great advances that the commoners have made in building their own shared platforms, tools, and content, the digital republic is not secure. In most countries, the commoners have less conventional political power than corporations, which means that the interests of citizens, consumers, and users are scanted in the policies that govern market competition, intellectual property, and life on the Internet.~{ For a nice overview of these policy contests, see Yochai Benkler, /{The Wealth of Networks: How Social Production Transforms Markets and Freedom}/ (New Haven, CT: Yale University Press, 2006), chapter 11, “The Battle Over the Institutional Ecology of the Digital Environment,” pp. 383–459. }~ Faced with the Great Value Shift, mass-media and entertainment corporations are not eager to surrender their historic market franchises to newcomers without a fight; they are resisting competition from open business models and the commons. +={Great Value Shift;value;Centralized Media:competition, and;Internet:attempts to control+3} + +In the United States, cable broadcast operators and telephone carriers are threatening the very future of the Internet as a commons infrastructure. They wish to assert greater control over Web access and traffic, and so are staunchly resisting “net neutrality” rules that would require them to act as nondiscriminatory common carriers. They would like to leverage their roles as oligopolistic gatekeepers to the Internet, and boost their revenues, by choosing whose Web sites will receive superior transmission and whose communications may be censored or put in the “slow lane.” +={Internet:net neutrality rules on|future of} + +At a further extreme, authoritarian countries such as China, Saudi Arabia, Egypt, and Singapore have shown that national governments still retain great powers to censor and control Internet communications.~{ Shanthi Kalathil and Taylor C. Boas, /{Open Networks, Closed Regimes: The Impact of the Internet on Authoritarian Rule}/ (Washington, DC: Carnegie Endowment for International Peace, 2003). }~ Even the United States government is reportedly engaged in extensive surveillance of Internet traffic, ostensibly for antiterrorism purposes. Meanwhile, many poor nations, especially in Africa and Asia, are struggling simply to get online and create their own digital commons. + +These battles are all part of a larger struggle over “the institutional ecology of the digital environment,” in Yochai Benkler’s words — a struggle that is likely to continue for many years. What powers and capabilities will the commoners and their institutions have relative to business and government, and how will they be able to protect and enhance the value created within the commons? +={Benkler, Yochai:social movements, on} + +2~ A New Species of Citizenship +={citizenship:new species of+22;democracy:new species of citizenship+22;free culture:new species of citizenship, as+22} + +Perhaps the most enduring contribution of the free software, free culture, and other “open movements” has been their invention of a new species of citizenship. Despite significant differences of philosophy and implementation, these commons share some basic values about access, use, and reuse of creative works and information. No matter their special passions, the commoners tend to be improvisational, resourceful, self-directed, collaborative, and committed to democratic ideals. They celebrate a diversity of aesthetics, viewpoints, and cultures. They are egalitarian in spirit yet respectful of talent and achievement. There is a strong predilection to share because the accrual of digital contributions (code, content, metatags) will lead to a greater good for all and perhaps even democratic change. But there is no hostility to commercial activity — indeed, there is a lively admiration for entrepreneurialism — so long as it does not violate basic creative and civic freedoms or core principles of the Internet (openness, interoperability, sharing). The disagreements that do exist center on how best to achieve those goals. +={free culture:sharing ethic of} + +As this book has shown, the Internet is enabling a new species of citizenship in modern life. It is not just a “nice thing.” It is a powerful force for change. The new technologies have been instrumental in helping the commoners imagine and build a digital republic of their own. Over the long term, this citizenship and the culture that it is fostering are likely to be a politically transformative force. They just might help real-world democracies restore a measure of their waning legitimacy and competence.~{ David Bollier, /{The Rise of Netpolitik: How the Internet Is Changing International Politics and Diplomacy}/ (Washington, DC: Aspen Institute Communications and Society Program, 2003). }~ +={Internet:mass participation in} + +David R. Johnson, a lawyer and scholar, describes the citizen of the Internet — the “netizen” — as a significant historical development because he or she can potentially compete with government as a source of binding rule sets. In a brilliant essay, “The Life of the Law Online,” Johnson writes that “we haven’t had a real competition for survival among rule sets. The competition is only between the rule of (our one) law and, presumably, anarchy. So the tendency of all rule sets to become more complicated over time, especially when written by people considering only parts of the system in analytical isolation, has not been checked by evolutionary forces.”~{ David R. Johnson, “The Life of the Law Online,” /{First Monday}/ 11, no. 2 (February 2006), at http://firstmonday.org/issues/issue11_2/johnson/index.html. }~ Government has an unchecked monopoly on lawmaking even though its relationship to the governed, whose consent is vital, is now greatly attenuated. +={Johnson, David R.+1;commoners:“netizens”+1} + +One evolutionary “competitor” to government-made law and to markets is the netizen — or, in my terms, the commoner. For the most part, members of a commons generate and maintain the rules that govern their collective. By Johnson’s reckoning, the commons must be considered a new social metabolism for creating law; it is a new type of “legal organism.” It is, in Johnson’s words, “a selfcausing legal order composed of systems that adopt goals that serve the values of those they regulate, without excessively imposing those goals on others.” +={commons:new type of legal organism;law:commons as new type of legal organism} + +A commons is a kind of biological entity operating in a complex cultural ecosystem. It has its own internal systems for managing its affairs, interacting with its environment, repairing itself, and defining its own persistent identity. It is a force by which ordinary people can express their deepest interests and passions, directly and without institutional mediation, on a global stage. This is an unprecedented capacity in communications, culture, and, indeed, human history. +={commons:definition of} + +To understand why the commoner represents a great leap forward in citizenship, it helps to consider the history of citizenship in the oldest democracy in the world, the United States. In his book /{The Good Citizen}/, sociologist Michael Schudson describes the evolution of three distinct types of citizenship over the past three centuries: +={Schudson, Michael+2;citizenship:history-making+16;Internet:citizenship, and+16} + +_1 When the nation was founded, being a citizen meant little more than for property-owning white males to delegate authority to a local gentleman — and accept his complimentary glass of rum on election day. This “politics of assent” gave way early in the nineteenth century to a “politics of parties.” Parties conducted elaborate campaigns of torchlight processions and monster meetings; voting day was filled with banter, banners, fighting and drinking. . . . The third model of citizenship, ushered in by Progressive reformers, was a “politics of information.” Campaigning became less emotional and more educational. Voting was by secret ballot.~{ Michael Schudson, /{The Good Citizen: A History of American Civic Life}/ (New York: Free Press, 1998), dust jacket. }~ + +We are heirs to the “politics of information,” a model of citizenship that presumes, as economics does, that we are rational actors who, if armed with sufficient quantities of high-quality information, will make educated decisions and optimize civic outcomes. But as Walter Lippmann noted and Schudson echoes, “if democracy requires omnicompetence and omniscience from its citizens, it is a lost cause.”~{ Ibid., p. 310. }~ Life is too busy, fast, and complex. A new type of citizenship is needed. Schudson offers a fairly weak prescription — the “monitorial citizen,” a watchdog who vigilantly monitors the behavior of power. +={Lippmann, Walter} + +But it is precisely here that the Internet is offering up a new, more muscular model of citizenship. I call it /{history-making citizenship}/. The rise of the blogosphere over the past ten years is emblematic of this new paradigm of citizenship. So is citizen-journalism, free software, Wikipedia, the Open Educational Resources movement, open business models like Jamendo and Flickr, and the Creative Commons and iCommons communities. In one sense, the citizenship that these groups practice is “monitorial” in that their members spend a great deal of time watching and discussing. But “monitoring” barely begins to describe their activities. The commoners have the ability — rare in pre-Internet civic life — to publish and incite others to action, and then organize and follow through, using a growing variety of powerful tools. With the advent of blogs, meetups, social networking, text messaging, and many other digital systems, citizens are able to communicate, coordinate, organize, and take timely action on a wide range of matters, including matters of public and political concern. +={commoners:influence of+1} + +I call the new sorts of citizen behaviors “history-making” because ordinary people are able to assert moral agency and participate in making change.~{ I am inspired in this choice of terms by Charles Spinosa, Frnando Flores, and Hubert L. Dreyfus in their book, /{Disclosing New Worlds: Entrepreneurship, Democratic Action, and the Cultivation of Solidarity}/ (Cambridge, MA: MIT Press, 1997). }~ This capacity is not reserved chiefly to large, impersonal institutions such as corporations, government agencies, and other bureaucracies. It is not a mere “participatory citizenship” in which people can volunteer their energies to a larger a more influential leader, political party, or institution in order to help out. It is a citizenship in which /{the commoners themselves}/ choose projects that suit their talents and passions. Dispersed, unorganized groups of strangers can build their own platforms and social norms for pursuing their goals; instigate public action that would not otherwise occur (and that may clash with the practices of existing institutions); and push forward their own distinctive agenda. +={commons:political implications of} + +These behaviors exist in some measure in offline realms, of course, but they are a growing norm in the digital republic. A few examples will suffice to make the point. The Web helped create and propel a handful of cause-oriented candidacies — Howard Dean, Ron Paul, Ned Lamont~[* Lamont was an insurgent candidate for U.S. Senate from Connecticut challenging Senator Joseph Lieberman in a campaign that helped culturally validate opposition to the U.S. war in Iraq.]~ — who rapidly raised enormous sums of money, galvanized large numbers of passionate supporters, and altered mainstream political discourse. Although none prevailed in their races, Barack Obama made a quantum leap in online organizing in 2008, raising $50 million in a single month from supporters via the Internet. Obama’s candidacy was buoyed by the rise of the “netroots” — Web activists with a progressive political agenda— whose size and credibility enable them to sway votes in Congress, raise significant amounts of campaign funds, and influence local activism. The stories are now legion about blogs affecting political life — from the resignation of Senate majority leader Trent Lott after he praised the racist past of Senator Strom Thurmond at his hundredth birthday party, to the electoral defeat of Senate candidate George Allen after his uttering of an ethnic slur, /{macaca}/, was posted on YouTube. +={Dean, Howard;Lamont, Ned;Obama, Barack;Paul, Ron;Internet:political campaigns on;Allen, George;Lott, Trent;YouTube} + +Citizens are now able to initiate their own policy initiatives without first persuading the mainstream media or political parties to validate them as worthy. For example, a handful of citizens troubled by evidence of “hackable” electronic voting machines exposed the defects of the Diebold machines and the company’s efforts to thwart public scrutiny and reforms.~{ See, e.g.,Yochai Benkler, /{The Wealth of Networks}/, pp. 225–32. }~ (The effort has led to a nationwide citizen effort, www.blackboxvoting.org, to expose security problems with voting machines and vote counting.) An ad hoc group of activists, lawyers, academics, and journalists spontaneously formed around a public wiki dealing with the lethal side effects of a bestselling antipsychotic drug Zyprexa, and the manufacturer’s allegedly illegal conduct in suppressing evidence of the drug’s risks. (Prosecutors later sought a $1 billion fine against Eli Lilly.)~{ Jonah Bossewitch, “The Zyprexa Kills Campaign: Peer Production and the Frontiers of Radical Pedagogy,” /{Re-public}/, at http://www.re-public.gr/en/ ?p=144. }~ + +The Web is giving individuals extra-institutional public platforms for articulating their own facts and interpretations of culture. It is enabling them to go far beyond voting and citizen vigilance, to mount citizen-led interventions in politics and governance. History-making citizens can compete with the mass media as an arbiter of cultural and political reality. They can expose the factual errors and lack of independence of /{New York Times}/ reporters; reveal the editorial biases of the “MSM” — mainstream media — by offering their own videotape snippets on YouTube; they can even be pacesetters for the MSM, as the blog Firedoglake did in its relentless reporting of the “Scooter” Libby trial (Libby, one of Vice President Cheney’s top aides, was convicted of obstruction of justice and perjury in connection with press leaks about CIA agent Valerie Plame.) Citizen-journalists, amateur videographers, genuine experts who have created their own Web platforms, parodists, dirty tricksters, and countless others are challenging elite control of the news agenda. It is no wonder that commercial journalism is suffering an identity crisis. Institutional authority is being trumped by the “social warranting” of online communities, many of which function as a kind of participatory meritocracy. +={Libby, “Scooter”;YouTube} + +History-making citizenship is not without its deficiencies. Rumors, misinformation, and polarized debate are common in this more open, unmediated environment. Its crowning virtue is its potential ability to mobilize the energies and creativity of huge numbers of people. GNU/Linux improbably drew upon the talents of tens of thousands of programmers; certainly our contemporary world with its countless problems could use some of this elixir— platforms that can elicit distributed creativity, specialized talent, passionate commitment, and social legitimacy. In 2005 Joi Ito, then chairman of the board of the Creative Commons, wrote: “Traditional forms of representative democracy can barely manage the scale, complexity and speed of the issues in the world today. Representatives of sovereign nations negotiating with each other in global dialog are limited in their ability to solve global issues. The monolithic media and its increasingly simplistic representation of the world cannot provide the competition of ideas necessary to reach informed, viable consensus.”~{ Joichi Ito, “Emergent Democracy,” chapter 1 in John Lebkowsky and Mitch Ratcliffe, eds., /{Extreme Democracy}/ (Durham, NC: Lulu.com, 2005), at http:// extremedemocracy.com/chapters/Chapter%20One-Ito.pdf. }~ Ito concluded that a new, not-yetunderstood model of “emergent democracy” is likely to materialize as the digital revolution proceeds. A civic order consisting of “intentional blog communities, ad hoc advocacy coalitions and activist networks” could begin to tackle many urgent problems. +={Ito, Joichi;NU/Linux;democracy:emergent+1|traditional forms of+5} + +Clearly, the first imperative in developing a new framework to host representative democracy is to ensure that the electronic commons be allowed to exist in the first place. Without net neutrality, citizens could very well be stifled in their ability to participate on their own terms, in their own voices. If proprietary policies or technologies are allowed to override citizen interests (Verizon Wireless in 2007 prevented the transmission of abortion rights messages on its text-messaging system, for example~{ Adam Liptak, “Verizon Reverses Itself on Abortion Messages,” /{New York Times}/, September 27, 2007, at http://www.nytimes.com/2007/09/27/busi ness/27cnd-verizon.html. }~), then any hope for historymaking citizenship will be stillborn. + +Beyond such near-term concerns, however, the emerging digital republic is embroiled in a much larger structural tension with –terrestrial “real world” governments. The commoner is likely to regard the rules forged in online commons as more legitimate and appropriate than those mandated by government. Again, David R. Johnson: +={Johnson, David R.} + +_1 The goals of a successful legal organism must be agreed upon by those who live within it, because a legal system is nothing more than a collective conversation about shared values. When it ceases to be that kind of internally entailed organism, the law becomes mere power, social “order” becomes tyranny, and the only option, over the long term at least, is war. + +_1 Organisms can’t be repaired from the outside. But, with reference to interactions that take place primarily online, among willing participants who seek primarily to regulate their own affairs, that’s exactly where existing governments are situated — outside the vibrant, self-regulating online spaces they seek to regulate. Their efforts to engineer the Internet as if it were a mechanism are not only fundamentally illegitimate but doomed by the very nature of the thing they seek to regulate. They are trying to create social order, of course. But they have not recognized . . . that order in complex systems creates itself.~{ Johnson, “The Life of the Law Online.” }~ + +After all, he or she is likely to have had a more meaningful personal role in crafting those rules. Now, of course, people live their lives in both online and terrestrial environments; there is no strict division between the two. That said, as people’s lives become more implicated in Internet spaces, citizens are likely to prefer the freedoms and affordances of the open-networked environment to the stunted correlates of offline politics, governance, and law. + +Indeed, this may be why so many activists and idealists are attracted to online venues. There is a richer sense of possibility. Contemporary politics and government have been captured by big money, professionals, and concentrated power. By contrast, in the digital republic, the ethic of transparency deals harshly with institutional manipulations, deceptions, and bad faith. They literally become part of your “permanent record,” forever available via a Google search. More fundamentally, the digital republic has a basic respect for everyone’s ability to contribute. It respects the principle of open access for all. The “consent of the governed” really matters. How sobering it is, then, to return to the “real world” of the American polity — or most other national governments — and realize that “money talks and bullshit walks.” How depressing to realize that the system is highly resistant to ordinary citizen action, such is the mismatch of resources. +={transparency+1;open business models:transparency in} + +The growing dissonance between the American system of governance, as practiced, and the more open, meritocratic online world was surely a factor in Lessig’s decision in 2007 to step down as CEO of Creative Commons, a move that eventually took place in April 2008. Lessig’s crushing responsibilities as the leader of Creative Commons — the international travel, the fund-raising, the strategic planning, the public events and movement obligations — had surely taken its toll. Feeling a personal need for new challenges as well as a responsibility to let new leaders emerge within the CC world, Lessig announced an ambitious new agenda for himself — tackling the “systemic corruption” of the democratic process in Congress. He joined with Joe Trippi, the campaign manager for Howard Dean’s 2004 presidential run, to launch a new organization, Change Congress, which seeks to ban special-interest campaign contributions, secure public financing for campaigns, and bring greater transparency to congressional proceedings. In a shuffle of roles, longtime board member James Boyle — who had been especially active on science and education initiatives — became the new chairman of Creative Commons. Board member Joi Ito, who had been chairman for a brief period, became CEO. +={Boyle, James:CC board, on|chairman, as;Change Congress (organization);Dean, Howard;democracy:corruption in|traditional forms of+1;Trippi, Joe;Lessig, Lawrence:political activity of} + +If Lessig is going to succeed in using the tools of the digital republic to reform and rejuvenate the American polity (and perhaps inspire other governments as well), he will have to confront the rather deeply rooted premises of the official constitutional order. The fast-paced, commons-based governance of the digital republic is naturally going to clash with a system of governance that revolves around bureaucratic hierarchies, a slow-moving system of law, archaic types of political intermediaries, and electoral principles designed for eighteenth-century life. Can the two be reconciled? The structural tensions are likely to be a significant and persistent issue for many, many years. + +2~ A Long-Term Power Shift? +={free culture:political nature of+23} + +It is hard to get a fix on this long-term transformation because the struggles to actualize an emergent democracy, as envisioned by Ito, are strangely apolitical and intensely political at the same time. They are apolitical in the sense that commoners are chiefly focused on the pragmatic technical challenges of their individual projects; they are not usually involved in official policymaking in legislatures or before courts and government agencies. Yet free software and free culture projects are highly political in the sense that commons projects, taken together over time, represent a profound challenge to the conventional market order and political culture. For example, Wikitravel, Jamendo, and open-access journals arguably provide better value than the commercial alternatives. The success of free software punctures the foundational assumptions of copyright law, making it easier to challenge new expansions of copyright law. Participatory commons are diverting viewer “eyeballs” away from commercial media and its genres of culture, spurring the growth of new hybrid forms of user-generated content. These kinds of effects, which advance project by project, month by month, are likely to have a longterm transformational impact. A new social ethic is taking root. +={Ito, Joichi;free software:FOSS/FLOSS+2;FOSS/FLOSS+2;copyright law:assumptions of;democracy:emergent} + +Free culture, though culturally progressive, is fairly nonjudgmental about ideological politics. When American conservatives decided they wanted to start Conservapedia because they found Wikipedia too liberal, Wikipedia founder Jimmy Wales was happy to bless it: “Free culture knows no bounds . . . We welcome the reuse of our work to build variants. That’s directly in line with our mission.”~{ Robert Mackey, “Conservapedia: The Word Says it All,” /{New York Times}/, March 8, 2007, at http://thelede.blogs.nytimes.com/2007/03/08/conserva pedia-the-word-says-it-all/?scp=1&sq=wales+conservapedia. }~ Anthropology professor E. Gabriella Coleman has found a similar ecumenicism in the free software movement, which is agnostic about conventional politics but adamant about its own polity of freedom.~{ E. Gabriella Coleman, “The Political Agnosticism of Free and Open Source Software and the Inadvertent Politics of Contrast,” /{Anthropology Quarterly}/ 77, no. 3 (Summer 2004), pp. 507–19. See also her Ph.D. dissertation, “The Social Construction of Freedom in Free and Open Source Software: Hackers, Ethics and the Liberal Tradition,” abstract at http://healthhacker.org/biella/cole man-abstract.pdf. }~ Thus, the FOSS movement has no position with respect to social justice or globalization issues, but it does demand a strict commitment to the “four freedoms” of software development. Johan Söderberg makes much the same case in his book /{Hacking Capitalism}/.~{ Johan Söderberg, /{Hacking Capitalism: The Free and Open Source Software Movement}/ (New York: Routledge, 2007). }~ +={Coleman, E. Gabriella;Wales, Jimmy;Söderberg, Johan} + +As projects like GNU/Linux, Wikipedia, open courseware, open-access journals, open databases, municipal Wi-Fi, collections of CC-licensed content, and other commons begin to cross-link and coalesce, the commons paradigm is migrating from the margins of culture to the center. The viral spiral, after years of building its infrastructure and social networks, may be approaching a Cambrian explosion, an evolutionary leap. +={commons:force for change, as+5} + +History suggests that any new style of politics and polity will arrive through models developed /{from within}/ the edifice of existing law, markets, and culture. A revolutionary coup or showdown with existing institutions will not be necessary. Superior working models — running code and a healthy commons — will trump polemics and exhortation. + +Ideological activists and political professionals are likely to scoff at this scenario. After all, they are suspicious of distributed political power, if not hostile to it. They prefer the levers of consolidated power (laws, court rulings, police powers) that are within their sphere of influence to the dispersed, sovereign powers of an online multitude. The latter is highly resistant to capture and control, and in that sense, profoundly threatening to the traditional configurations of political power. We have already seen how the mandarins of journalism, politics, and business are quick to lash out at the noncredentialed masses who dare to put forward their own interpretations of the world. + +However necessary it is to engage in the official governance of a nation, corrupted though it may be, the commoners have shown that building their own functioning commons can be a powerful force for change as well. A commons of technical standards for the Web — how mundane! — can achieve more than most antitrust lawsuits. A common pool of information can prevent a company from reaping easy monopoly rents from the control of a public good. Instead, the company must “move upstream” to provide more specialized forms of value (for example, sophisticated graphing of the information or data analysis). A commons may also be affirmatively helpful to businesses, as Eric von Hippel has shown, by aggregating a body of aficionados into a social community that can articulate customer needs and preferences in highly efficient ways: the commons as a cheap form of R & D and marketing. + +In either case, the rise of a commons can be disruptive not just because it changes how market power is exercised, but because it may disperse power to a broader community of participants. Recall Johnson’s observation that a commons is a “self-causing legal order” that competes with other legal orders. Individuals who affiliate with an online community may acquire the ability to manage their own social relationships and group identity. +={Johnson, David R.;commons:new type of legal organism+1;law:commons as new type of legal organism+1|political implications of+1;democracy:power of the commons in+1} + +This is not just a form of marketplace power, it is a form of /{political}/ power. In effect, a group may be able to neutralize the power of corporations to use brands to organize their identities. By developing its own discourse and identity, an online community can reject their treatment as a demographic cohort of consumers. They can assert their broader, nonmarket concerns. As a group of commoners, they are less susceptible to propaganda, ideology, and commercial journalism as tools for organizing their political allegiances. They have greater civic sovereignty. + +“Free cooperation aims at distributing power,” argues Geert Lovink, a Dutch media theorist: +={Lovink, Geert+1} + +_1 I am not saying that power as such disappears, but there is certainly a shift, away from the formal into the informal, from accountable structures towards a voluntary and temporal connection. We have to reconcile with the fact that these structures undermine the establishment, but not through recognizable forms of resistance. The “anti” element often misses. This is what makes traditional, unreconstructed lefties so suspicious, as these networks just do their thing and do not fit into this or that ideology, be it neoliberal or autonomous Marxist. Their vagueness escapes any attempt to deconstruct their intention either as proto-capitalist or subversive.~{ Geert Lovink, “Theses on Wiki Politics,” an exchange with Pavlos Hatzopoulos, /{Re-public}/, at http://www.re-public.gr/en/?p=135. }~ + +This can be disorienting. Energies are not focused on resisting an oppressor, but rather on building innovative, positive alternatives. In Buckminster Fuller’s terms, free culture is mostly about building new models that make the existing models obsolete. Instead of forging an identity in relation to an adversary, the movement has built an identity around an affirmative vision and the challenge of /{becoming}/. People feel fairly comfortable with a certain level of ambiguity because the whole environment is so protean, diverse, evolving, and dynamic. +={Fuller, R. Buckminster} + +The GPL and the CC licenses are ingenious hacks because they navigate this indeterminate ideological space with legally enforceable tools, while looking to informal social practice and norms to provide stable governance. (“Order without law,” in law professor Robert Ellickson’s formulation.)~{ Robert Ellickson, Order Without Law: How Neighbors Settle Disputes (Cambridge, MA: Harvard University Press, 2005). }~ The licenses use the existing legal order to achieve their goals (the sharing of tools and content), and so the strategies are not seen as politically provocative. Yet the licenses are nonetheless politically transformative because they help new communities of practice to organize themselves and do work that may question core premises of copyright law, conventional economics, and government policy in general. +={Ellickson, Robert} + +The beauty of this “ideological straddle” is that it enables a diverse array of players into the same tent without inciting sectarian acrimony. (There is some, of course, but mostly at the margins.) Ecumenical tolerance is the norm because orthodoxies cannot take root at the periphery where innovation is constantly being incubated. In any case, there is a widespread realization in the networked world that shared goals are likely to require variable implementations, depending on specific needs and contexts. + +It may appear that the free software hacker, blogger, tech entrepreneur, celebrity musician, college professor, and biological researcher have nothing in common. In truth, each is participating in social practices that are incrementally and collectively bringing into being a new sort of democratic polity. French sociologist Bruno Latour calls it the “pixellation of politics,”~{ Bruno Latour, “We Are All Reactionaries Today,” Re-public, at http://www .republic.gr/en/?p=129. }~ which conjures up a pointillist painting slowly materializing. The new polity is more open, participatory, dynamically responsive, and morally respected by “the governed” than the nominal democracies of nation-states. The bureaucratic state tends to be too large and remote to be responsive to local circumstances and complex issues; it is ridiculed and endured. But who dares to aspire to transcend it? +={Latour, Bruno} + +Sooner or later, history-making citizenship is likely to take up such a challenge. It already has. What is the digital republic, after all, but a federation of self-organized communities, each seeking to fulfill its members’ dreams by developing its own indigenous set of tools, rules, and ethics? The power of the commons stems from its role as an organizing template, and not an ideology. Because it is able to host a diverse and robust ecosystem of talent without squeezing it into an ideological straitjacket, the commons is flexible and resilient. It is based on people’s sincerest passions, not on remote institutional imperatives or ideological shibboleths. It therefore has a foundational support and energy that can outperform “mainstream” institutions. +={citizenship:history-making+1;commons:political implications of+1} + +This, truly, is the animating force of the viral spiral: the capacity to build one’s own world and participate on a public stage. (Cicero: “Freedom is participation in power.”) When such energies are let loose in an open, networked environment, all sorts of new and interesting innovations emerge. Since an online commons does not have the burden of turning a profit or supporting huge overhead, it can wait for serendipity, passion, and idiosyncratic brilliance to surface, and then rely on the Internet to propagate the fruits virally. +={Cicero:on freedom} + +Oddly enough, entrenched commercial interests do not seem to be alarmed by the disruptive long-term implications of free culture. If the users of CC licenses genuflect before the altar of copyright law, it would appear, that is sufficient. Due respect is being shown. Meanwhile, at the level of social practice, the commoners are gradually building a very different moral economy that converges, from different paths, on a new type of civic order. In /{Code}/, Lessig called it “freedom without anarchy, control without government, consensus without power.” +={Lessig, Lawrence:Code and Other Laws of Cyberspace|freedom, and} + +It is not entirely clear how the special capacities of bottom-up networks — a “non-totalizing system of structure that nonetheless acts as a whole,” in Mark Taylor’s words — can be integrated with conventional government and institutions of power. It is easy to imagine a future confrontation in the political culture, however, as the citizens of the digital republic confront the stodgy bureaucratic state (corporate and governmental). The latter will have the advantages of constitutional authority and state and economic power, but the former are likely to have the advantages of social legitimacy, superior on-the-ground information, and creative energy. How the digital republic will confront the old regime, or supplant it gradually as archaic institutions collapse over time, is the stuff of future history. +={Taylor, Mark;citizenship:history-making+1} + +Theory has its limits. The building of the digital republic was in many ways animated by theory, of course, chiefly the rejection of certain theories of copyright law and the invention of new narratives about creativity and the commons. But this project has not been an intellectual, theory-driven enterprise so much as a vast, collective enterprise of history-making citizenship. Using the affordances of digital technologies, individuals have stepped out of their customary or assigned roles to invent entirely new vehicles for creativity, social life, business, politics, science, and education. Individuals have come together to make some remarkable new tools and institutions to serve their needs and preferences. +={commons:sources of new ideas, as+3} + +The story of the commons is, in this sense, the story of a series of public-spirited individuals who are determined to build new vehicles for protecting shared wealth and social energies. It is the story of Richard Stallman fighting the privatization of software and the disenfranchisement of the hacker community. It is the story of Eric Eldred’s determination to go to jail if necessary to defend his ability to build a Web site for great American literature. The viral spiral, as I have called it, truly gained momentum when Lawrence Lessig, as a boundary-breaking law professor, decided to mount a constitutional test case and then to assemble a larger effort to imagine and build a new licensing scheme for sharing. +={Stallman, Richard;Eldred, Eric:public domain, and;Lessig, Lawrence:law in contemporary context, and;software:proprietary} + +The viral spiral then spins off in dozens of directions as newly empowered people discover the freedoms and satisfactions that can accrue to them through this ancient yet now rediscovered and refurbished social vessel. Taken together, countless commons projects are validating some new models of human aspiration. Instead of presuming that a society must revolve around competitive individuals seeking private, material gain (the height of “rationality,” economists tell us), the commons affirms a broader, more complex, and more enlightened paradigm of human self-interest. If the Invisible Hand presumes to align private interest and the public good, the commons has shown that cooperation and sharing can also serve this goal with great versatility and sophistication. +={commoners:sharing by} + +Over the long term, the real meaning of the viral spiral may lie in our discovery that the new platforms that we use to create and organize knowledge, and relate to one another, is changing how we think and how we conceptualize our place in the world. John Seely Brown, the former director of Xerox PARC, has said, “From my perspective, a key property of participatory cultures is that they help to create both a culture of learning and a culture of doing. The social basis of doing (e.g. networked communities of interest/ practice) that you see emerging here actually form reflective practicum(s). This, in turn, ends up grounding epistemology — ways of knowing — and provides a pathway back to a kind of pragmatism that Dewey first talked about that is situated between realism and idealism. This is the pathway to creating a learning society and a culture that can embrace change by unleashing and affording productive inquiry in powerful and exciting ways.”~{ John Seely Brown, personal communication, January 26, 2008. }~ +={Brown, John Seely;Dewey, John} + +By empowering us to “step into history” and take greater responsibility for more aspects of our lives, it is no exaggeration to say that the commons encourages us to become more integrated human beings. We learn to integrate our production with our consumption, our learning with our doing, and our ideals with practical realities. This is surely why the viral spiral has been so powerfully transformative. It has helped bring our personal needs and interests into a closer, more congenial alignment with the institutions that serve us. We may be caught in a messy transition, and there remains much to negotiate and debate, but we should count our blessings. Few generations are as fortunate in being able to imagine and build a new commons sector of such liberating potential. +={citizenship:history-making} + -- cgit v1.2.3 From 7879124b732e6a4bb287ab2946e42b75826c7819 Mon Sep 17 00:00:00 2001 From: Ralph Amissah Date: Mon, 23 Aug 2010 23:15:26 -0400 Subject: markup sample, viral sprial, many url fixes * incorrect line-breaking issues with urls occurred in conversion from pdf to text, reviewed --- data/v1/samples/viral_spiral.david_bollier.sst | 214 ++++++++++++------------- data/v2/samples/viral_spiral.david_bollier.sst | 214 ++++++++++++------------- 2 files changed, 214 insertions(+), 214 deletions(-) diff --git a/data/v1/samples/viral_spiral.david_bollier.sst b/data/v1/samples/viral_spiral.david_bollier.sst index 912191b..125ee67 100644 --- a/data/v1/samples/viral_spiral.david_bollier.sst +++ b/data/v1/samples/viral_spiral.david_bollier.sst @@ -97,7 +97,7 @@ It is perilous to generalize about a movement that has so many disparate parts p Yet the people who are inventing new commons have some deeper aspirations and allegiances. They glimpse the liberating potential of the Internet, and they worry about the totalizing inclinations of large corporations and the state, especially their tendency to standardize and coerce behavior. They object as well to processes that are not transparent. They dislike the impediments to direct access and participation, the limitations of credentialed expertise and arbitrary curbs on people’s freedom. -One of the first major gatherings of international commoners occurred in June 2006, when several hundred people from fifty nations converged on Rio de Janeiro, Brazil, for the iCommons Summit. The people of this multinational, eclectic vanguard blend the sophistication of the establishment in matters of power and politics with the bravado and playfulness of Beat poets. There were indie musicians who can deconstruct the terms of a record company licensing agreement with Talmudic precision. There were Web designers who understand the political implications of arcane rules made by the World Wide Web Consortium, a technical standards body. The lawyers and law professors who discourse about Section 114 of the Copyright Act are likely to groove on the remix career of Danger Mouse and the appropriationist antics of Negativland, a sound-collage band. James Boyle and Jennifer Jenkins, two law scholars at Duke Law School, even published a superhero comic book, /{Down by Law!}/, which demystifies the vagaries of the “fair use doctrine” through a filmmaker character resembling video game heroine Lara Croft.~{Keith Aoki, James Boyle, Jennifer Jenkins, /{Down by Law!}/ at http://www .duke.edu/cspd/comics.}~ (Fair use is a provision of copyright law that makes it legal to excerpt portions of a copyrighted work for noncommercial, educational, and personal purposes.) +One of the first major gatherings of international commoners occurred in June 2006, when several hundred people from fifty nations converged on Rio de Janeiro, Brazil, for the iCommons Summit. The people of this multinational, eclectic vanguard blend the sophistication of the establishment in matters of power and politics with the bravado and playfulness of Beat poets. There were indie musicians who can deconstruct the terms of a record company licensing agreement with Talmudic precision. There were Web designers who understand the political implications of arcane rules made by the World Wide Web Consortium, a technical standards body. The lawyers and law professors who discourse about Section 114 of the Copyright Act are likely to groove on the remix career of Danger Mouse and the appropriationist antics of Negativland, a sound-collage band. James Boyle and Jennifer Jenkins, two law scholars at Duke Law School, even published a superhero comic book, /{Down by Law!}/, which demystifies the vagaries of the “fair use doctrine” through a filmmaker character resembling video game heroine Lara Croft.~{Keith Aoki, James Boyle, Jennifer Jenkins, /{Down by Law!}/ at http://www.duke.edu/cspd/comics. }~ (Fair use is a provision of copyright law that makes it legal to excerpt portions of a copyrighted work for noncommercial, educational, and personal purposes.) ={commoners:gatherings of} 2~ The Rise of Socially Created Value @@ -111,10 +111,10 @@ This is why so many ordinary people — without necessarily having degrees, inst Hugh McGuire, a Montreal-based writer and Web designer, is one. In 2005, he started LibriVox, a digital library of free public-domain audio books that are read and recorded by volunteers. More than ten thousand people a day visit the Web site to download audio files of Twain, Kafka, Shakespeare, Dostoyevsky, and others, in nearly a dozen languages.~{ http://www.librivox.org. }~ The Faulkes Telescope Project in Australia lets high school students connect with other students, and with professional astronomers, to scan the skies with robotic, online telescopes.~{ http://faulkes-telescope.com. }~ In a similar type of learning commons, the Bugscope project in the United States enables students to operate a scanning electronic microscope in real time, using a simple Web browser on a classroom computer connected to the Internet.~{ http://bugscope.beckman.uiuc.edu. }~ ={Bugscope;LibriVox;McGuire, Hugh;Faulkes Telescope Project} -Thousands of individual authors, musicians, and filmmakers are using Web tools and Creative Commons licenses to transform markets for creative works — or, more accurately, to blend the market and commons into integrated hybrids. A nonprofit humanitarian group dedicated to doing reconstructive surgery for children in poor countries, Interplast, produced an Oscar-winning film, /{A Story of Healing}/, in 1997. Ten years later, it released the film under a Creative Commons license as a way to publicize Interplast’s work while retaining ownership of the film: a benefit for both film buffs and Interplast.~{ http://www.interplast.org and http://creativecommons.org/press-releases/ 2007/04/%E2%80%9Ca-story-of-healing%E2%80%9D-becomes-first-acad emy-award%C2%AE-winning-film-released-under-a-creative-commons-li cense. }~ +Thousands of individual authors, musicians, and filmmakers are using Web tools and Creative Commons licenses to transform markets for creative works — or, more accurately, to blend the market and commons into integrated hybrids. A nonprofit humanitarian group dedicated to doing reconstructive surgery for children in poor countries, Interplast, produced an Oscar-winning film, /{A Story of Healing}/, in 1997. Ten years later, it released the film under a Creative Commons license as a way to publicize Interplast’s work while retaining ownership of the film: a benefit for both film buffs and Interplast.~{ http://www.interplast.org and http://creativecommons.org/press-releases/2007/04/%E2%80%9Ca-story-of-healing%E2%80%9D-becomes-first-acad emy-award%C2%AE-winning-film-released-under-a-creative-commons-li cense. }~ ={Interplast} -Scoopt, a Glasgow, Scotland–based photography agency, acts as a broker to help bloggers and amateurs sell newsworthy photos and videos to the commercial media.~{ http://www.scoopt.com. }~ The Boston band Two Ton Shoe released its music on the Web for free to market its concerts. Out of the blue, a South Korean record label called one day to say it loved the band and could it come over to Seoul, all expenses paid, to perform four concerts? Each one sold out.~{ http://www.twotonshoe.com/news.html. }~ Boing Boing blogger and cyber-activist Cory Doctorow released his 2003 science-fiction novel, /{Down and Out in the Magic Kingdom}/, under a CC license, reaping a whirlwind of worldwide exposure.~{ See Doctorow’s preface to the second release of the book, February 12, 2004, Tor Books. See also his blog Craphound.com, September 9, 2006, at http:// www.craphound.com/?=p=1681. }~ +Scoopt, a Glasgow, Scotland–based photography agency, acts as a broker to help bloggers and amateurs sell newsworthy photos and videos to the commercial media.~{ http://www.scoopt.com. }~ The Boston band Two Ton Shoe released its music on the Web for free to market its concerts. Out of the blue, a South Korean record label called one day to say it loved the band and could it come over to Seoul, all expenses paid, to perform four concerts? Each one sold out.~{ http://www.twotonshoe.com/news.html. }~ Boing Boing blogger and cyber-activist Cory Doctorow released his 2003 science-fiction novel, /{Down and Out in the Magic Kingdom}/, under a CC license, reaping a whirlwind of worldwide exposure.~{ See Doctorow’s preface to the second release of the book, February 12, 2004, Tor Books. See also his blog Craphound.com, September 9, 2006, at http://www.craphound.com/?=p=1681. }~ ={Doctorow, Cory;Scoopt} 2~ The Commoners Build a Digital Republic of Their Own @@ -128,7 +128,7 @@ The commoners differ from most of their corporate brethren in their enthusiasm f It is all very well to spout such lofty goals. But how to actualize them? That is the story that the following pages recount. It has been the work of a generation, some visionary leaders, and countless individuals to articulate a loosely shared vision, build the infrastructure, and develop the social practices and norms. This project has not been animated by a grand political ideology, but rather is the result of countless initiatives, grand and incremental, of an extended global family of hackers, lawyers, bloggers, artists, and other supporters of free culture. ={commons:political implications of+3} -And yet, despite its focus on culture and its aversion to conventional politics, the growth of this movement is starting to have political implications. In an influential 2003 essay, James F. Moore announced the arrival of “an emerging second superpower.”~{ James F. Moore, “The Second Superpower Rears its Beautiful Head,” March 31, 2003, available at http://cyber.law.harvard.edu/people/jmoore/second superpower.html. }~ It was not a nation, but the coalescence of people from around the world who were asserting common values, and forming new public identities, via online networks. The people of this emerging “superpower,” Moore said, are concerned with improving the environment, public health, human rights, and social development. He cited as early examples the international campaign to ban land mines and the Seattle protests against the World Trade Organization in 1999. The power and legitimacy of this “second superpower” do not derive from the constitutional framework of a nation-state, but from its ability to capture and project people’s everyday feelings, social values, and creativity onto the world stage. Never in history has the individual had such cheap, unfettered access to global audiences, big and small. +And yet, despite its focus on culture and its aversion to conventional politics, the growth of this movement is starting to have political implications. In an influential 2003 essay, James F. Moore announced the arrival of “an emerging second superpower.”~{ James F. Moore, “The Second Superpower Rears its Beautiful Head,” March 31, 2003, available at http://cyber.law.harvard.edu/people/jmoore/secondsuperpower.html. }~ It was not a nation, but the coalescence of people from around the world who were asserting common values, and forming new public identities, via online networks. The people of this emerging “superpower,” Moore said, are concerned with improving the environment, public health, human rights, and social development. He cited as early examples the international campaign to ban land mines and the Seattle protests against the World Trade Organization in 1999. The power and legitimacy of this “second superpower” do not derive from the constitutional framework of a nation-state, but from its ability to capture and project people’s everyday feelings, social values, and creativity onto the world stage. Never in history has the individual had such cheap, unfettered access to global audiences, big and small. ={Moore, James} The awakening superpower described in /{Viral Spiral}/ is not a conventional political or ideological movement that focuses on legislation and a clutch of “issues.” While commoners do not dismiss these activities as unimportant, most are focused on the freedom of their peer communities to create, communicate, and share. When defending these freedoms requires wading into conventional politics and law, they are prepared to go there. But otherwise, the commoners are more intent on building a kind of parallel social order, inscribed within the regnant political economy but animated by their own values. Even now, the political/cultural sensibilities of this order are only vaguely understood by governments, politicians, and corporate leaders. The idea of “freedom without anarchy, control without government, consensus without power” — as Lawrence Lessig put it in 1999~{ Lawrence Lessig, /{Code and Other Laws of Cyberspace}/ (New York: Basic Books, 1999), p. 4. }~ —is just too counterintuitive for the conventionally minded to take seriously. @@ -145,7 +145,7 @@ The various industries that rely on copyrights have welcomed this development be The Internet has profoundly disrupted this model of market production, however. The Internet is a distributed media system of low-cost capital (your personal computer) strung together with inexpensive transmission and software. Instead of being run by a centralized corporation that relies upon professionals and experts above all else, the Internet is a noncommercial infrastructure that empowers amateurs, citizens, and ordinary individuals in all their quirky, authentic variety. The mass media have long regarded people as a commodifiable audience to be sold to advertisers in tidy demographic units. ={Internet:empowerment by+2} -Now, thanks to the Internet, “the people formerly known as the audience” (in Jay Rosen’s wonderful phrase) are morphing into a differentiated organism of flesh-and-blood, idiosyncratic individuals, as if awakening from a spell. Newly empowered to speak as they wish, in their own distinctive, personal voices to a global public of whoever cares to listen, people are creating their own transnational tribes. They are reclaiming culture from the tyranny of mass-media economics and national boundaries. In Lessig’s words, Internet users are overthrowing the “read only” culture that characterized the “weirdly totalitarian” communications of the twentieth century. In its place they are installing the “read/write” culture that invites everyone to be a creator, as well as a consumer and sharer, of culture.~{ Lawrence Lessig, “The Read-Write Society,” delivered at the Wizards of OS4 conference in Berlin, Germany, on September 5, 2006. Available at http:// www.wizards-of-os.org/programm/panels/authorship_amp_culture/keynote _the_read_write_society/the_read_write_society.html. }~ A new online citizenry is arising, one that regards its socially negotiated rules and norms as at least as legitimate as those established by conventional law. +Now, thanks to the Internet, “the people formerly known as the audience” (in Jay Rosen’s wonderful phrase) are morphing into a differentiated organism of flesh-and-blood, idiosyncratic individuals, as if awakening from a spell. Newly empowered to speak as they wish, in their own distinctive, personal voices to a global public of whoever cares to listen, people are creating their own transnational tribes. They are reclaiming culture from the tyranny of mass-media economics and national boundaries. In Lessig’s words, Internet users are overthrowing the “read only” culture that characterized the “weirdly totalitarian” communications of the twentieth century. In its place they are installing the “read/write” culture that invites everyone to be a creator, as well as a consumer and sharer, of culture.~{ Lawrence Lessig, “The Read-Write Society,” delivered at the Wizards of OS4 conference in Berlin, Germany, on September 5, 2006. Available at http://www.wizards-of-os.org/programm/panels/authorship_amp_culture/keynote_the_read_write_society/the_read_write_society.html. }~ A new online citizenry is arising, one that regards its socially negotiated rules and norms as at least as legitimate as those established by conventional law. ={Rosen, Jay} Two profoundly incommensurate media systems are locked in a struggle for survival or supremacy, depending upon your perspective or, perhaps, mutual accommodation. For the moment, we live in a confusing interregnum — a transition that pits the dwindling power and often desperate strategies of Centralized Media against the callow, experimental vigor of Internet-based media. This much is clear, however: a world organized around centralized control, strict intellectual property rights, and hierarchies of credentialed experts is under siege. A radically different order of society based on open access, decentralized creativity, collaborative intelligence, and cheap and easy sharing is ascendant. Or to put it more precisely, we are stumbling into a strange hybrid order that combines both worlds — mass media and online networks — on terms that have yet to be negotiated. @@ -191,12 +191,12 @@ In the video world, too, the remix impulse has found expression in its own form The key insight about many open-platform businesses is that they no longer look to copyright or patent law as tools to assert market control. Their goal is not to exclude others, but to amass large communities. Open businesses understand that exclusive property rights can stifle the value creation that comes with mass participation, and so they strive to find ways to “honor the commons” while making money in socially acceptable forms of advertising, subscriptions, or consulting services. The brave new economics of “peer production” is enabling forward-thinking businesses to use social collaboration among thousands, or even millions, of people to create social communities that are the foundation for significant profits. /{BusinessWeek}/ heralded this development in a major cover story in 2005, “The Power of Us,” and called sharing “the net’s next disruption.”~{ Robert D. Hof, “The Power of Us: Mass Collaboration on the Internet Is Shaking Up Business,” /{BusinessWeek}/, June 20, 2005, pp. 73–82. }~ -!{/{Science}/}! as a commons. The world of scientific research has long depended on open sharing and collaboration. But increasingly, copyrights, patents, and university rules are limiting the flow of scientific knowledge. The resulting gridlock of rights in knowledge is impeding new discoveries and innovation. Because of copyright restrictions and software incompatibilities, scientists studying genetics, proteins, and marine biology often cannot access databases containing vital research. Or they cannot easily share physical samples of lab samples. When the maker of Golden Rice, a vitamin-enhanced bioengineered rice, tried to distribute its seeds to millions of people in poor countries, it first had to get permissions from seventy patent holders and obtain six Material Transfer Agreements (which govern the sharing of biomedical research substances).~{ Interview with John Wilbanks, “Science Commons Makes Sharing Easier,” /{Open Access Now}/, December 20, 2004, available at http://www.biomedcen tral.com/openaccess/archive/?page=features&issue=23.}~ +!{/{Science}/}! as a commons. The world of scientific research has long depended on open sharing and collaboration. But increasingly, copyrights, patents, and university rules are limiting the flow of scientific knowledge. The resulting gridlock of rights in knowledge is impeding new discoveries and innovation. Because of copyright restrictions and software incompatibilities, scientists studying genetics, proteins, and marine biology often cannot access databases containing vital research. Or they cannot easily share physical samples of lab samples. When the maker of Golden Rice, a vitamin-enhanced bioengineered rice, tried to distribute its seeds to millions of people in poor countries, it first had to get permissions from seventy patent holders and obtain six Material Transfer Agreements (which govern the sharing of biomedical research substances).~{ Interview with John Wilbanks, “Science Commons Makes Sharing Easier,” /{Open Access Now}/, December 20, 2004, available at http://www.biomedcentral.com/openaccess/archive/?page=features&issue=23. }~ ={Wilbanks, John+1;Science Commons:CC Commons spinoff, and+1} The problem of acquiring, organizing, and sharing scientific knowledge is becoming more acute, paradoxically enough, as more scientific disciplines become dependent on computers and the networked sharing of data. To help deal with some of these issues, the Creative Commons in 2005 launched a new project known as the Science Commons to try to redesign the information infrastructure for scientific research. The basic idea is to “break down barriers to sharing that are hindering innovation in the sciences,” says John Wilbanks, executive director of Science Commons. Working with the National Academy of Sciences and other research bodies, Wilbanks is collaborating with astronomers, archaeologists, microbiologists, and medical researchers to develop better ways to make vast scientific literatures more computer-friendly, and databases technically compatible, so that they can be searched, organized, and used more effectively. -!{/{Open education and learning.}/}! A new class of knowledge commons is poised to join free and open-source software, the Creative Commons and Wikipedia as a coherent social movement. The new groundswell goes by the awkward name “Open Educational Resources,” or OER.~{ See, e.g., Daniel E. Atkins, John Seely Brown, and Allen L. Hammond, “A Review of the Open Educational Resources (OER) Movement: Achievements, Challenges and New Opportunities,” February 2007, available at http://www .oerderves.org/?p=23.}~ One of the earlier pioneers of the movement was the Massachusetts Institute of Technology which has put virtually all of its course materials on the Web, for free, through its OpenCourseWare initiative. The practice has now spread to scores of colleges and universities around the world, and inspired a broader set of OER initiatives: digital repositories for articles, reports, and data; open-access scholarly journals that bypass expensive commercial publishers; and collaborative Web sites for developing teaching materials. There are wikis for students and scholars working together, sites to share multimedia presentations, and much more. +!{/{Open education and learning.}/}! A new class of knowledge commons is poised to join free and open-source software, the Creative Commons and Wikipedia as a coherent social movement. The new groundswell goes by the awkward name “Open Educational Resources,” or OER.~{ See, e.g., Daniel E. Atkins, John Seely Brown, and Allen L. Hammond, “A Review of the Open Educational Resources (OER) Movement: Achievements, Challenges and New Opportunities,” February 2007, available at http://www.oerderves.org/?p=23. }~ One of the earlier pioneers of the movement was the Massachusetts Institute of Technology which has put virtually all of its course materials on the Web, for free, through its OpenCourseWare initiative. The practice has now spread to scores of colleges and universities around the world, and inspired a broader set of OER initiatives: digital repositories for articles, reports, and data; open-access scholarly journals that bypass expensive commercial publishers; and collaborative Web sites for developing teaching materials. There are wikis for students and scholars working together, sites to share multimedia presentations, and much more. ={education:OER movement+1;pen Educational Resources (OER) movement+1;Wikipedia:social movement, as+1;Creative Commons (CC):social movement, as+1} The OER movement has particular importance for people who want to learn but don’t have the money or resources — scholars in developing countries, students struggling to pay for their educations, people in remote or rural locations, people with specialized learning needs. OER is based on the proposition that it will not only be cheaper or perhaps free if teachers and students can share their materials through the Web, it will also enable more effective types of learning. So the OER movement is dedicated to making learning tools cheaper and more accessible. The revolutionary idea behind OER is to transform traditional education — teachers imparting information to passive students — into a more learnerdriven process facilitated by teachers. Self-directed, socially driven learning supplants formal, hierarchical modes of teaching. @@ -320,13 +320,13 @@ _1 Freedom 2: The freedom to redistribute copies so you can help your neighbor; _1 Freedom 3: The freedom to improve the program, and release your improvements to the public, so that the whole community benefits. (Access to the source code is a precondition for this.) ={authorship:community access} -Stallman has become an evangelist for the idea of freedom embodied in all the GNU programs. He refuses to use any software programs that are not “free,” and he has refused to allow his appearances to be Webcast if the software being used was not “free.” “If I am to be an honest advocate for free software,” said Stallman, “I can hardly go around giving speeches, then put pressure on people to use nonfree software. I’d be undermining my own cause. And if I don’t show that I take my principles seriously, I can’t expect anybody else to take them seriously.”~{ Stallman at MIT forum, “Copyright and Globalization in the Age of Computer Networks,” April 19, 2001, available at http://media-in-transition.mit .edu/forums/copyright/transcript.html. }~ +Stallman has become an evangelist for the idea of freedom embodied in all the GNU programs. He refuses to use any software programs that are not “free,” and he has refused to allow his appearances to be Webcast if the software being used was not “free.” “If I am to be an honest advocate for free software,” said Stallman, “I can hardly go around giving speeches, then put pressure on people to use nonfree software. I’d be undermining my own cause. And if I don’t show that I take my principles seriously, I can’t expect anybody else to take them seriously.”~{ Stallman at MIT forum, “Copyright and Globalization in the Age of Computer Networks,” April 19, 2001, available at http://media-in-transition.mit.edu/forums/copyright/transcript.html. }~ ={Stallman, Richard:free software, and+2} Stallman has no problems with people making money off software. He just wants to guarantee that a person can legally use, copy, modify, and distribute the source code. There is thus an important distinction between software that is commercial (possibly free) and software that is proprietary (never free). Stallman tries to explain the distinction in a catchphrase that has become something of a mantra in free software circles: /{“free as in ‘free speech,’ not as in ‘free beer.’”}/ The point is that code must be freely accessible, not that it should be free of charge. (This is why “freeware” is not the same as free software. Freeware may be free of charge, but it does not necessarily make its source code accessible.) ={freeware vs. free software;software:proprietary|source code for} -Eben Moglen, a professor of law at Columbia University and general counsel for the Free Software Foundation since 1994, calls the provisions of the GPL “elegant and simple. They respond to the proposition that when the marginal cost of goods is zero, any nonzero cost of barbed wire is too high. That’s a fact about the twentyfirst century, and everybody had better get used to it. Yet as you know, there are enormous cultural enterprises profoundly committed to the proposition that more and more barbed wire is necessary. And their basic strategy is to get that barbed wire paid for by the public everywhere.”~{ Eben Moglen, “Freeing the Mind: Free Software and the Death of Proprietary Culture,” June 29, 2003, available at http://emoglen.law/columbia.edu/publi cations/maine-speech.html. }~ +Eben Moglen, a professor of law at Columbia University and general counsel for the Free Software Foundation since 1994, calls the provisions of the GPL “elegant and simple. They respond to the proposition that when the marginal cost of goods is zero, any nonzero cost of barbed wire is too high. That’s a fact about the twentyfirst century, and everybody had better get used to it. Yet as you know, there are enormous cultural enterprises profoundly committed to the proposition that more and more barbed wire is necessary. And their basic strategy is to get that barbed wire paid for by the public everywhere.”~{ Eben Moglen, “Freeing the Mind: Free Software and the Death of Proprietary Culture,” June 29, 2003, available at http://emoglen.law/columbia.edu/publications/maine-speech.html. }~ ={Moglen, Eben;Free Software Foundation} The GPL truly was something new under the sun: a legally enforceable tool to vouchsafe a commons of software code. The license is based on copyright law yet it cleverly turns copyright law against itself, limiting its reach and carving out a legally protected zone to build and protect the public domain. In the larger scheme of things, the GPL was an outgrowth of the “gift economy” ethic that has governed academic life for centuries and computer science for decades. What made the GPL different from these (abridgeable) social norms was its legal enforceability. @@ -349,7 +349,7 @@ The Linux kernel, when combined with the GNU programs developed by Stallman and The real innovation of Linux, writes Eric S. Raymond, a leading analyst of the technology, was “not technical, but sociological”: ={Linux:sociological effect of+1} -_1 Linux was rather casually hacked on by huge numbers of volunteers coordinating only through the Internet. Quality was maintained not by rigid standards or autocracy but by the naively simple strategy of releasing every week and getting feedback from hundreds of users within days, creating a sort of rapid Darwinian selection on the mutations introduced by developers. To the amazement of almost everyone, this worked quite well.~{ Eric S. Raymond, “A Brief History of Hackerdom,” http://www.catb.org/ ~est/writings/cathedral-bazaar/hacker-history/ar01s06.html.}~ +_1 Linux was rather casually hacked on by huge numbers of volunteers coordinating only through the Internet. Quality was maintained not by rigid standards or autocracy but by the naively simple strategy of releasing every week and getting feedback from hundreds of users within days, creating a sort of rapid Darwinian selection on the mutations introduced by developers. To the amazement of almost everyone, this worked quite well.~{ Eric S. Raymond, “A Brief History of Hackerdom,” http://www.catb.org/~est/writings/cathedral-bazaar/hacker-history/ar01s06.html. }~ The Free Software Foundation had a nominal project to develop a kernel, but it was not progressing very quickly. The Linux kernel, while primitive, “was running and ready for experimentation,” writes Steven Weber in his book /{The Success of Open Source}/: “Its crude functionality was interesting enough to make people believe that it could, with work, evolve into something important. That promise was critical and drove the broader development process from early on.”~{ Steven Weber, /{The Success of Open Source}/ (Cambridge, MA: Harvard University Press, 2004), p. 100. }~ ={Weber, Steven:The Success of Open Source;Free Software Foundation} @@ -389,7 +389,7 @@ The philosophical rift between free software and open-source software amounts to ={FOSS/FLOSS+3;free software:FOSS/FLOSS+3;Raymond, Eric S.+1;Linux:sociological effect of+1} -_1 The Linux world behaves in many respects like a free market or an ecology, a collection of selfish agents attempting to maximize utility which in the process produces a selfcorrecting spontaneous order more elaborate and efficient than any amount of central planning could have achieved. . . . The utility function Linux hackers are maximizing is not classically economic, but is the intangible of their own ego satisfaction and reputation among other hackers.~{ Eric Raymond, “The Cathedral and the Bazaar,” available at http://www .catb.org/~esr/writings/cathedral-bazaar/cathedral-bazaar/ar01s11.html.}~ +_1 The Linux world behaves in many respects like a free market or an ecology, a collection of selfish agents attempting to maximize utility which in the process produces a selfcorrecting spontaneous order more elaborate and efficient than any amount of central planning could have achieved. . . . The utility function Linux hackers are maximizing is not classically economic, but is the intangible of their own ego satisfaction and reputation among other hackers.~{ Eric Raymond, “The Cathedral and the Bazaar,” available at http://www.catb.org/~esr/writings/cathedral-bazaar/cathedral-bazaar/ar01s11.html. }~ It turns out that an accessible collaborative process, FOSS, can elicit passions and creativity that entrenched markets often cannot. In this respect, FOSS is more than a type of freely usable software; it reunites two vectors of human behavior that economists have long considered separate, and points to the need for new, more integrated theories of economic and social behavior. ={free software:economic effects of+1} @@ -400,7 +400,7 @@ FOSS represents a new breed of “social production,” one that draws upon soci Red Hat, a company founded in 1993 by Robert Young, was the first to recognize the potential of selling a custom version (or “distribution”) of GNU/Linux as a branded product, along with technical support. A few years later, IBM became one of the first large corporations to recognize the social realities of GNU/Linux and its larger strategic and competitive implications in the networked environment. In 1998 IBM presciently saw that the new software development ecosystem was becoming far too variegated and robust for any single company to dominate. It understood that its proprietary mainframe software could not dominate the burgeoning, diversified Internet-driven marketplace, and so the company adopted the open-source Apache Web server program in its new line of WebSphere business software. ={Red Hat;Young, Robert;GNU/Linux:IBM, and+1|Red Hat, and;IBM:GNU/Linux, and+1;Apache Web server;open source software:functions of+2} -It was a daring move that began to bring the corporate and open-source worlds closer together. Two years later, in 2000, IBM announced that it would spend $1 billion to help develop GNU/Linux for its customer base. IBM shrewdly realized that its customers wanted to slash costs, overcome system incompatibilities, and avoid expensive technology “lock-ins” to single vendors. GNU/Linux filled this need well. IBM also realized that GNU/Linux could help it compete against Microsoft. By assigning its property rights to the commons, IBM could eliminate expensive property rights litigation, entice other companies to help it improve the code (they could be confident that IBM could not take the code private), and unleash a worldwide torrent of creative energy focused on GNU/Linux. Way ahead of the curve, IBM decided to reposition itself for the emerging networked marketplace by making money through tech service and support, rather than through proprietary software alone.~{ Andrew Leonard, “How Big Blue Fell for Linux,” Salon.com, September 12, 2000, available at http://www.salon.com/tech/fsp/2000/09/12/chapter_7_ part_one.print.html. The competitive logic behind IBM’s moves are explored in Pamela Samuelson, “IBM’s Pragmatic Embrace of Open Source,” /{Communications of the ACM}/ 49, no. 21 (October 2006), and Robert P. Merges, “A New Dynamism in the Public Domain,” /{University of Chicago Law Review}/ 71, no. 183 (Winter 2004). }~ +It was a daring move that began to bring the corporate and open-source worlds closer together. Two years later, in 2000, IBM announced that it would spend $1 billion to help develop GNU/Linux for its customer base. IBM shrewdly realized that its customers wanted to slash costs, overcome system incompatibilities, and avoid expensive technology “lock-ins” to single vendors. GNU/Linux filled this need well. IBM also realized that GNU/Linux could help it compete against Microsoft. By assigning its property rights to the commons, IBM could eliminate expensive property rights litigation, entice other companies to help it improve the code (they could be confident that IBM could not take the code private), and unleash a worldwide torrent of creative energy focused on GNU/Linux. Way ahead of the curve, IBM decided to reposition itself for the emerging networked marketplace by making money through tech service and support, rather than through proprietary software alone.~{ Andrew Leonard, “How Big Blue Fell for Linux,” Salon.com, September 12, 2000, available at http://www.salon.com/tech/fsp/2000/09/12/chapter_7_part_one.print.html. The competitive logic behind IBM’s moves are explored in Pamela Samuelson, “IBM’s Pragmatic Embrace of Open Source,” /{Communications of the ACM}/ 49, no. 21 (October 2006), and Robert P. Merges, “A New Dynamism in the Public Domain,” /{University of Chicago Law Review}/ 71, no. 183 (Winter 2004). }~ ={Microsoft:competition against} It was not long before other large tech companies realized the benefits of going open source. Amazon and eBay both saw that they could not affordably expand their large computer infrastructures without converting to GNU/Linux. GNU/Linux is now used in everything from Motorola cell phones to NASA supercomputers to laptop computers. In 2005, /{BusinessWeek}/ magazine wrote, “Linux may bring about the greatest power shift in the computer industry since the birth of the PC, because it lets companies replace expensive proprietary systems with cheap commodity servers.”~{ Steve Hamm, “Linux Inc.,” /{BusinessWeek}/, January 31, 2005. }~ As many as one-third of the programmers working on open-source projects are corporate employees, according to a 2002 survey.~{ Cited by Elliot Maxwell in “Open Standards Open Source and Open Innovation,” note 80, Berlecon Research, /{Free/Libre Open Source Software: Survey and Study — Firms’ Open Source Activities: Motivations and Policy Implications}/, FLOSS Final Report, Part 2, at www.berlecon.de/studien/downloads/200207FLOSS _Activities.pdf. }~ @@ -412,7 +412,7 @@ With faster computing speeds and cost savings of 50 percent or more on hardware But how does open source work without a conventional market apparatus? The past few years have seen a proliferation of sociological and economic theories about how open-source communities create value. One formulation, by Rishab Ghosh, compares free software development to a “cooking pot,” in which you can give a little to the pot yet take a lot — with no one else being the poorer. “Value” is not measured economically at the point of transaction, as in a market, but in the nonmonetary /{flow}/ of value that a project elicits (via volunteers) and generates (through shared software).~{ Rishab Aiyer Ghosh, “Cooking Pot Markets and Balanced Value Flows,” in Rishab Aiyer Ghosh, ed., /{CODE: Collaborative Ownership and the Digital Economy}/ (Cambridge, MA: MIT Press, 2005), pp. 153–68. }~ Another important formulation, which we will revisit later, comes from Harvard law professor Yochai Benkler, who has written that the Internet makes it cheap and easy to access expertise anywhere on the network, rendering conventional forms of corporate organization costly and cumbersome for many functions. Communities based on social trust and reciprocity are capable of mobilizing creativity and commitment in ways that market incentives often cannot — and this can have profound economic implications.~{ See, e.g., Benkler, “Coase’s Penguin, or Linux and the Nature of the Firm,” /{Yale Law Journal}/ 112, no. 369 (2002); Benkler, “ ‘Sharing Nicely’: On Shareable Goods and the Emergence of Sharing as a Modality of Economic Production,” Yale Law Journal 114, no. 273 (2004).}~ Benkler’s analysis helps explain how a global corps of volunteers could create an operating system that, in many respects, outperforms software created by a well-paid army of Microsoft employees. ={Benkler, Yochai:open networks, on;FOSS/FLOSS;free software:FOSS/FLOSS;Ghosh, Rishab;open source software:economic implications of|uses of term+4} -A funny thing happened to free and open-source software as it matured. It became hip. It acquired a cultural cachet that extends well beyond the cloistered precincts of computing. “Open source” has become a universal signifier for any activity that is participatory, collaborative, democratic, and accountable. Innovators within filmmaking, politics, education, biological research, and drug development, among other fields, have embraced the term to describe their own attempts to transform hidebound, hierarchical systems into open, accessible, and distributed meritocracies. Open source has become so much of a cultural meme — a self-replicating symbol and idea — that when the Bikram yoga franchise sought to shut down unlicensed uses of its yoga techniques, dissident yoga teachers organized themselves into a nonprofit that they called Open Source Yoga Unity. To tweak the supremacy of Coca-Cola and Pepsi, culture jammers even developed nonproprietary recipes for a cola drink and beer called “open source cola” and “open source beer.”~{ Open Source Yoga Unity, http://www.yogaunity.org; open-source cola, http://alfredo.octavio.net/soft_drink_formula.pdf; open-source beer, Vores OI (Danish for “Our Beer”), http://en.wikipedia.org/wiki/Vores_%C3%981 . See also http://freebeer.org/blog and http://www.project21.ch/freebeer. }~ +A funny thing happened to free and open-source software as it matured. It became hip. It acquired a cultural cachet that extends well beyond the cloistered precincts of computing. “Open source” has become a universal signifier for any activity that is participatory, collaborative, democratic, and accountable. Innovators within filmmaking, politics, education, biological research, and drug development, among other fields, have embraced the term to describe their own attempts to transform hidebound, hierarchical systems into open, accessible, and distributed meritocracies. Open source has become so much of a cultural meme — a self-replicating symbol and idea — that when the Bikram yoga franchise sought to shut down unlicensed uses of its yoga techniques, dissident yoga teachers organized themselves into a nonprofit that they called Open Source Yoga Unity. To tweak the supremacy of Coca-Cola and Pepsi, culture jammers even developed nonproprietary recipes for a cola drink and beer called “open source cola” and “open source beer.”~{ Open Source Yoga Unity, http://www.yogaunity.org; open-source cola, http://alfredo.octavio.net/soft_drink_formula.pdf; open-source beer, Vores OI (Danish for “Our Beer”), http://en.wikipedia.org/wiki/Vores_%C3%981. See also http://freebeer.org/blog and http://www.project21.ch/freebeer. }~ ={free software:uses of term+5} Stallman’s radical acts of dissent in the 1980s, regarded with bemusement and incredulity at the time, have become, twenty-five years later, a widely embraced ideal. Small-/{d}/ democrats everywhere invoke open source to lambaste closed and corrupt political systems and to express their aspirations for political transcendence. People invoke open source to express a vision of life free from overcommercialization and corporate manipulation. The term enables one to champion bracing democratic ideals without seeming naïve or flaky because, after all, free software is solid stuff. Moreover, despite its image as the software of choice for granola-loving hippies, free and open-source software is entirely compatible with the commercial marketplace. How suspect can open source be when it has been embraced by the likes of IBM, Hewlett-Packard, and Sun Microsystems? @@ -435,7 +435,7 @@ Fortunately, a small but fierce and keenly intelligent corps of progressive copy For decades, the public domain was regarded as something of a wasteland, a place where old books, faded posters, loopy music from the early twentieth century, and boring government reports go to die. It was a dump on the outskirts of respectable culture. If anything in the public domain had any value, someone would sell it for money. Or so goes the customary conception of the public domain. -Jack Valenti, the longtime head of the Motion Picture Association of America, once put it this way: “A public domain work is an orphan. No one is responsible for its life. But everyone exploits its use, until that time certain when it becomes soiled and haggard, barren of its previous virtues. Who, then, will invest the funds to renovate and nourish its future life when no one owns it?”~{ Jack Valenti, “A Plea for Keeping Alive the U.S. Film Industry’s Competitive Energy, ” testimony on behalf of the Motion Picture Association of America to extend the term of copyright protection, Senate Judiciary Committee, September 20, 1995, at http://instructors.cwrl.utexas.edu/~martin/Valenti .pdf. }~ (Valenti was arguing that longer copyright terms would give film studios the incentive to digitize old celluloid films that would otherwise enter the public domain and physically disintegrate.) +Jack Valenti, the longtime head of the Motion Picture Association of America, once put it this way: “A public domain work is an orphan. No one is responsible for its life. But everyone exploits its use, until that time certain when it becomes soiled and haggard, barren of its previous virtues. Who, then, will invest the funds to renovate and nourish its future life when no one owns it?”~{ Jack Valenti, “A Plea for Keeping Alive the U.S. Film Industry’s Competitive Energy, ” testimony on behalf of the Motion Picture Association of America to extend the term of copyright protection, Senate Judiciary Committee, September 20, 1995, at http://instructors.cwrl.utexas.edu/~martin/Valenti.pdf. }~ (Valenti was arguing that longer copyright terms would give film studios the incentive to digitize old celluloid films that would otherwise enter the public domain and physically disintegrate.) ={Valenti, Jack} One of the great, unexplained mysteries of copyright law is how a raffish beggar grew up to be King Midas. How did a virtually ignored realm of culture — little studied and undertheorized— become a subject of intense scholarly interest and great practical importance to commoners and businesses alike? How did the actual value of the public domain become known? The idea that the public domain might be valuable in its own right — and therefore be worth protecting — was a fringe idea in the 1990s and before. So how did a transformation of legal and cultural meaning occur? @@ -499,7 +499,7 @@ Yet rather than negotiate a new copyright bargain to take account of the public Most content industries, then and now, do not see any “imbalance” in copyright law; they prefer to talk in different terms entirely. They liken copyrighted works to personal property or real estate, as in “and you wouldn’t steal a CD or use my house without permission, would you?” A copyrighted work is analogized to a finite physical object, But the essential point about works in the digital age is that they can’t be “used up” in the same way that physical objects can. They are “nondepletable” and “nonrival,” as economists put it. A digital work can be reproduced and shared for virtually nothing, without depriving another person of it. ={property rights:copyright law, and+1;copyright law:property rights, and} -Nonetheless, a new narrative was being launched — copyrighted works as property. The idea of copyright law reflecting a policy bargain between the public and authors/corporations was being supplanted by a new story that casts copyright as property that is nearly absolute in scope and virtually perpetual in term. In hindsight, for those scholars who cared enough to see, a disquieting number of federal court cases were strengthening the hand of copyright holders at the expense of the public. James Boyle, in a much-cited essay, called this the “second enclosure movement” — the first one, of course, being the English enclosure movement of common lands in medieval times and into the nineteenth century.~{ James Boyle, “The Second Enclosure Movement and the Construction of the Public Domain,” /{Law and Contemporary Problems}/ 66 (Winter–Spring 2003), pp. 33–74, at http://www.law.duke.edu/shell/cite.pl?66+Law+&+Contemp.+ Probs.+ 33+ (WinterSpring+2003). }~ +Nonetheless, a new narrative was being launched — copyrighted works as property. The idea of copyright law reflecting a policy bargain between the public and authors/corporations was being supplanted by a new story that casts copyright as property that is nearly absolute in scope and virtually perpetual in term. In hindsight, for those scholars who cared enough to see, a disquieting number of federal court cases were strengthening the hand of copyright holders at the expense of the public. James Boyle, in a much-cited essay, called this the “second enclosure movement” — the first one, of course, being the English enclosure movement of common lands in medieval times and into the nineteenth century.~{ James Boyle, “The Second Enclosure Movement and the Construction of the Public Domain,” /{Law and Contemporary Problems}/ 66 (Winter–Spring 2003), pp. 33–74, at http://www.law.duke.edu/shell/cite.pl?66+Law+&+Contemp.+Probs.+33+ (WinterSpring+2003). }~ ={Boyle, James:enclosure movement, on;commons:enclosure of+1;enclosure movement+1;copyright law:enclosure movement, and+1} Enclosure took many forms. Copyright scholar Peter Jaszi recalls, “Sometime in the mid-1980s, the professoriate started getting worried about software copyright.”~{ Interview with Peter Jaszi, October 17, 2007. }~ It feared that copyrights for software would squelch competition and prevent others from using existing code to innovate. This battle was lost, however. Several years later, the battle entered round two as copyright scholars and programmers sought to protect reverse-engineering as fair use. This time, they won.~{ /{Sega Enterprises v. Accolade}/, 977 F.2d 1510 (9th Cir. 1993). }~ @@ -547,7 +547,7 @@ Critics also argue that the DMCA gives large corporations a powerful legal tool In her excellent history of the political run-up to the DMCA, Litman notes, “There is no overarching vision of the public interest animating the Digital Millennium Copyright Act. None. Instead, what we have is what a variety of different private parties were able to extract from each other in the course of an incredibly complicated four-year multiparty negotiation.”~{ Litman, /{Digital Copyright}/, pp. 144–45. }~ The DMCA represents a new frontier of proprietarian control — the sanctioning of technological locks that can unilaterally override the copyright bargain. Companies asked themselves, Why rely on copyrights alone when technology can embed even stricter controls into the very design of products? ={Litman, Jessica} -The year 1998 was an especially bad year for the public domain. Besides enacting the trademark dilution bill and DMCA, the Walt Disney Company and other large media corporations succeeded in their six-year campaign to enact the Sonny Bono Copyright Term Extension Act.~{ See Wikipedia entry for the Copyright Term Extension Act, at http://en .wikipedia.org/wiki/Sonny_Bono_Copyright_Term_Extension_Act. See also /{Eldred v. Ashcroft}/, 537 U.S. 186 (2003), F. 3d 849 (2001). }~ The legislation, named after the late House legislator and former husband of the singer Cher, retroactively extended the terms of existing copyrights by twenty years. As we will see in chapter 3, this law became the improbable catalyst for a new commons movement. +The year 1998 was an especially bad year for the public domain. Besides enacting the trademark dilution bill and DMCA, the Walt Disney Company and other large media corporations succeeded in their six-year campaign to enact the Sonny Bono Copyright Term Extension Act.~{ See Wikipedia entry for the Copyright Term Extension Act, at http://en.wikipedia.org/wiki/Sonny_Bono_Copyright_Term_Extension_Act. See also /{Eldred v. Ashcroft}/, 537 U.S. 186 (2003), F. 3d 849 (2001). }~ The legislation, named after the late House legislator and former husband of the singer Cher, retroactively extended the terms of existing copyrights by twenty years. As we will see in chapter 3, this law became the improbable catalyst for a new commons movement. ={trademarks:dilution of;Walt Disney Company;Copyright Term Extension Act} 2~ Confronting the Proprietarian Juggernaut @@ -568,7 +568,7 @@ A number of activist voices were also coming forward at this time to challenge t The organization was oriented to hackers and cyberlibertarians, who increasingly realized that they needed an organized presence to defend citizen freedoms in cyberspace. (Barlow adapted the term /{cyberspace}/ from science-fiction writer William Gibson in 1990 and applied it to the then-unnamed cultural life on the Internet.) Initially, the EFF was concerned with hacker freedom, individual privacy, and Internet censorship. It later went through some growing pains as it moved offices, changed directors, and sought to develop a strategic focus for its advocacy and litigation. In more recent years, EFF, now based in San Francisco, has become the leading litigator of copyright, trademark, and Internet free expression issues. It also has more than ten thousand members and spirited outreach programs to the press and public. ={Gibson, William;cyberspace:use of term} -John Perry Barlow was an important visionary and populizer of the time. His March 1994 article “The Economy of Ideas” is one of the most prophetic yet accessible accounts of how the Internet was changing the economics of information. He astutely realized that information is not a “product” like most physical property, but rather a social experience or form of life unto itself. “Information is a verb, not a noun,” he wrote. “Freed of its containers, information obviously is not a thing. In fact, it is something that happens in the field of interaction between minds or objects or other pieces of information. . . . Sharks are said to die of suffocation if they stop swimming, and the same is nearly true of information.”~{22. John Perry Barlow, “The Economy of Ideas,” /{Wired}/, March 1994, at http:// www.wired.com/wired/archive/2.03/economy.ideas.html.}~ +John Perry Barlow was an important visionary and populizer of the time. His March 1994 article “The Economy of Ideas” is one of the most prophetic yet accessible accounts of how the Internet was changing the economics of information. He astutely realized that information is not a “product” like most physical property, but rather a social experience or form of life unto itself. “Information is a verb, not a noun,” he wrote. “Freed of its containers, information obviously is not a thing. In fact, it is something that happens in the field of interaction between minds or objects or other pieces of information. . . . Sharks are said to die of suffocation if they stop swimming, and the same is nearly true of information.”~{22. John Perry Barlow, “The Economy of Ideas,” /{Wired}/, March 1994, at http://www.wired.com/wired/archive/2.03/economy.ideas.html. }~ Instead of the sober polemics of law professors, Barlow — a retired Wyoming cattle rancher who improbably doubled as a tech intellectual and rock hipster — spiced his analysis of information with colorful metaphors and poetic aphorisms. Comparing information to DNA helices, Barlow wrote, “Information replicates into the cracks of possibility, always seeking new opportunities for /{Lebensraum}/.” Digital information, he said, “is a continuing process more like the metaphorphosing tales of prehistory than anything that will fit in shrink-wrap.” @@ -634,7 +634,7 @@ As Litman unpacked the realities of “authorship,” she showed how the idea of English professor Martha Woodmansee and law professor Peter Jaszi helped expose many of the half-truths about “authorship” and “originality.” Their 1994 anthology of essays, /{The Construction of Authorship}/, showed how social context is an indispensable element of “authorship,” one that copyright law essentially ignores.~{ Martha Woodmansee and Peter Jaszi, eds., /{The Construction of Authorship: Textual Appropriation in Law and Literature}/ (Durham, NC: Duke University Press, 1994). }~ Thus, even though indigenous cultures collectively create stories, music, and designs, and folk cultures generate works in a collaborative fashion, copyright law simply does not recognize such acts of collective authorship. And so they go unprotected. They are vulnerable to private appropriation and enclosure, much as Stallman’s hacker community at MIT saw its commons of code destroyed by enclosure. ={Jaszi, Peter;Woodmansee, Martha;commons:enclosure of;enclosure movement} -Before the Internet, the collaborative dimensions of creativity were hardly given much thought. An “author” was self-evidently an individual endowed with unusual creative skills. As the World Wide Web and digital technologies have proliferated, however, copyright’s traditional notions of “authorship” and “originality” have come to seem terribly crude and limited. The individual creator still matters and deserves protection, of course. But when dozens of people contribute to a single entry of Wikipedia, or thousands contribute to an open-source software program, how then shall we determine who is the “author”?~{ Henry Miller writes: “We carry within us so many entities, so many voices, that rare indeed is the man who can say he speaks with his own voice. In the final analysis, is that iota of uniqueness which we boast of as ‘ours’ really ours? Whatever real or unique contribution we make stems from the same inscrutable source whence everything derives. We contribute nothing but our understanding, which is a way of saying — our acceptance.” Miller, /{The Books in My Life}/ (New York: New Directions), p. 198. }~ By the lights of copyright law, how shall the value of the public domain, reconstituted as a commons, be assessed?~{ Rufus Pollock, “The Value of the Public Domain,” report for Institute for Public Policy Research, London, July 2006, at http://www.rufuspollock.org/ economics/papers/value_of_public_domain.ippr.pdf. }~ +Before the Internet, the collaborative dimensions of creativity were hardly given much thought. An “author” was self-evidently an individual endowed with unusual creative skills. As the World Wide Web and digital technologies have proliferated, however, copyright’s traditional notions of “authorship” and “originality” have come to seem terribly crude and limited. The individual creator still matters and deserves protection, of course. But when dozens of people contribute to a single entry of Wikipedia, or thousands contribute to an open-source software program, how then shall we determine who is the “author”?~{ Henry Miller writes: “We carry within us so many entities, so many voices, that rare indeed is the man who can say he speaks with his own voice. In the final analysis, is that iota of uniqueness which we boast of as ‘ours’ really ours? Whatever real or unique contribution we make stems from the same inscrutable source whence everything derives. We contribute nothing but our understanding, which is a way of saying — our acceptance.” Miller, /{The Books in My Life}/ (New York: New Directions), p. 198. }~ By the lights of copyright law, how shall the value of the public domain, reconstituted as a commons, be assessed?~{ Rufus Pollock, “The Value of the Public Domain,” report for Institute for Public Policy Research, London, July 2006, at http://www.rufuspollock.org/economics/papers/value_of_public_domain.ippr.pdf. }~ ={World Wide Web:collective authorship, and;creativity:collaborative} The Bellagio Declaration, the outgrowth of a conference organized by Woodmansee and Jaszi in 1993, called attention to the sweeping deficiencies of copyright law as applied. One key point stated, “In general, systems built around the author paradigm tend to obscure or undervalue the importance of the ‘public domain,’ the intellectual and cultural commons from which future works will be constructed. Each intellectual property right, in effect, fences off some portion of the public domain, making it unavailable to future creators.”~{ See James Boyle, /{Shamans, Software, and Spleens: Law and the Construction of the Information Society}/ (Cambridge, MA: Harvard University Press, 1995), p. 192. }~ @@ -653,7 +653,7 @@ But as the millennium drew near, the tech-minded legal community — and law-min That task was made easier by the intensifying cultural squeeze. The proprietarian lockdown was starting to annoy and anger people in their everyday use of music, software, DVDs, and the Web. And the property claims were growing more extreme. The American Society of Composers, Authors and Publishers had demanded that Girl Scout camps pay a public performance license for singing around the campfire. Ralph Lauren challenged the U.S. Polo Association for ownership of the word /{polo}/. McDonald’s succeeded in controlling the Scottish prefix Mc as applied to restaurants and motels, such as “McVegan” and “McSleep.”~{ These examples can be found in Bollier, /{Brand Name Bullies}/. }~ ={Lauren, Ralph} -The mounting sense of frustration fueled a series of conferences between 1999 and 2001 that helped crystallize the disparate energies of legal scholarship into something resembling an intellectual movement. “A number of us [legal scholars] were still doing our own thing, but we were beginning to get a sense of something,” recalls Yochai Benkler, “It was no longer Becky Eisenberg working on DNA sequences and Pamela Samuelson on computer programs and Jamie Boyle on ‘environmentalism for the ’Net’ and me working on spectrum on First Amendment issues,” said Benkler. “There was a sense of movement.”~{ Interview with Yochai Benkler, February 7, 2006.}~ (“Environmentalism for the ’Net” was an influential piece that Boyle wrote in 1998, calling for the equivalent of an environmental movement to protect the openness and freedom of the Internet.)~{ James Boyle, “A Politics of Intellectual Property: Environmentalism for the Net,” /{Duke Law Journal}/ 47, no. 1 (October 1997), pp. 87–116, at http://www .law.duke.edu/boylesite/Intprop.htm. }~ +The mounting sense of frustration fueled a series of conferences between 1999 and 2001 that helped crystallize the disparate energies of legal scholarship into something resembling an intellectual movement. “A number of us [legal scholars] were still doing our own thing, but we were beginning to get a sense of something,” recalls Yochai Benkler, “It was no longer Becky Eisenberg working on DNA sequences and Pamela Samuelson on computer programs and Jamie Boyle on ‘environmentalism for the ’Net’ and me working on spectrum on First Amendment issues,” said Benkler. “There was a sense of movement.”~{ Interview with Yochai Benkler, February 7, 2006.}~ (“Environmentalism for the ’Net” was an influential piece that Boyle wrote in 1998, calling for the equivalent of an environmental movement to protect the openness and freedom of the Internet.)~{ James Boyle, “A Politics of Intellectual Property: Environmentalism for the Net,” /{Duke Law Journal}/ 47, no. 1 (October 1997), pp. 87–116, at http://www.law.duke.edu/boylesite/Intprop.htm. }~ ={Boyle, James+1;Benkler, Yochai+1;Eisenberg, Rebecca;Samuelson, Pamela} “The place where things started to get even crisper,” said Benkler, “was a conference at Yale that Jamie Boyle organized in April 1999, which was already planned as a movement-building event.” That conference, Private Censorship/Perfect Choice, looked at the threats to free speech on the Web and how the public might resist. It took inspiration from John Perry Barlow’s 1996 manifesto “A Declaration of the Independence of Cyberspace.” It is worth quoting at length from Barlow’s lyrical cri de coeur — first published in /{Wired}/ and widely cited — because it expresses the growing sense of thwarted idealism among Internet users, and a yearning for greater self-determination and self-governance among commoners. Barlow wrote: @@ -692,7 +692,7 @@ In the course of his frequent travels, he had a particularly significant rendezv Eldred was a book enthusiast and computer programmer who had reached the end of his rope. Three years earlier, in 1995, he had launched a simple but brilliant project: a free online archive of classic American literature. Using his PC and a server in his home in New Hampshire, Eldred posted the books of Nathaniel Hawthorne, Henry James, Wallace Stevens, and dozens of other great authors whose works were in the public domain. Eldred figured it would be a great service to humanity to post the texts on the World Wide Web, which was just beginning to go mainstream. -Eldred had previously worked for Apollo Computer and Hewlett-Packard and was experienced in many aspects of computers and software. In the late 1980s, in fact, he had developed a system that enabled users to post electronic text files and then browse and print them on demand. When the World Wide Web arrived, Eldred was understandably excited. “It seemed to me that there was a possibility of having a system for electronic books that was similar to what I had done before. I was interested in experimenting with this to see if it was possible.”~{ Interview with Eric Eldred, August 1, 2006; Daren Fonda, “Copyright Crusader,” /{Boston Globe Magazine}/, August 29, 1999, available at http://www .boston.com/globe/magazine/8-29/featurestory1.shtml; and Eric Eldred, “Battle of the Books: The Ebook vs. the Antibook,” November 15, 1998, at http://www.eldritchpress.org/battle.html. }~ +Eldred had previously worked for Apollo Computer and Hewlett-Packard and was experienced in many aspects of computers and software. In the late 1980s, in fact, he had developed a system that enabled users to post electronic text files and then browse and print them on demand. When the World Wide Web arrived, Eldred was understandably excited. “It seemed to me that there was a possibility of having a system for electronic books that was similar to what I had done before. I was interested in experimenting with this to see if it was possible.”~{ Interview with Eric Eldred, August 1, 2006; Daren Fonda, “Copyright Crusader,” /{Boston Globe Magazine}/, August 29, 1999, available at http://www.boston.com/globe/magazine/8-29/featurestory1.shtml; and Eric Eldred, “Battle of the Books: The Ebook vs. the Antibook,” November 15, 1998, at http://www.eldritchpress.org/battle.html. }~ So Eldred set out to build his own archive of public-domain books: “I got books from the library or wherever, and I learned how to do copyright research and how to scan books, do OCR [opticalcharacter recognition] and mark them up as HTML [the programming language used on the Web],” he said. “I just wanted to make books more accessible to readers.”~{ Interview with Eric Eldred, August 1, 2006. }~ @@ -724,7 +724,7 @@ At a more basic level, the copyright term extension showed contempt for the very The copyright term extension act privatized so many of the public domain books on the Eldritch Press Web site, and so offended Eldred’s sense of justice, that in November 1998 he decided to close his site in protest. The new law meant that he would not be able to add any works published since 1923 to his Web site until 2019. “I can no longer accomplish what I set out to do,” said Eldred.~{ Ibid. }~ ={Eldred, Eric:public domain, and|Lessig, and+3;Lessig, Lawrence+3:Eldred, and+3} -As luck had it, Larry Lessig was looking for an Everyman of the Internet. Lessig, then a thirty-seven-year-old professor at Harvard Law School, was looking for a suitable plaintiff for his envisioned constitutional test case. He had initially approached Michael S. Hart, the founder of Project Gutenberg, the first producer of free electronic books. At the time, the project had nearly six thousand public-domain books available online. (It now has twenty thousand books; about 3 million books are downloaded every month.) Hart was receptive to the case but had his own ideas about how the case should be argued. He wanted the legal complaint to include a stirring populist manifesto railing against rapacious copyright holders. Lessig demurred and went in search of another plaintiff.~{ Richard Poynder interview with Lawrence Lessig, “The Basement Interviews: Free Culture,” April 7, 2006, p. 26, available at http://poynder.blogspot.com/ 2006/03/basement-interviews.html. See also Steven Levy, “Lawrence Lessig’s Supreme Showdown,” /{Wired}/, October 2002, pp. 140–45, 154–56, available at http://www.wired.com/wired/archive/10.10/lessig.html. Project Gutenberg is at http://wwwgutenberg.org. }~ +As luck had it, Larry Lessig was looking for an Everyman of the Internet. Lessig, then a thirty-seven-year-old professor at Harvard Law School, was looking for a suitable plaintiff for his envisioned constitutional test case. He had initially approached Michael S. Hart, the founder of Project Gutenberg, the first producer of free electronic books. At the time, the project had nearly six thousand public-domain books available online. (It now has twenty thousand books; about 3 million books are downloaded every month.) Hart was receptive to the case but had his own ideas about how the case should be argued. He wanted the legal complaint to include a stirring populist manifesto railing against rapacious copyright holders. Lessig demurred and went in search of another plaintiff.~{ Richard Poynder interview with Lawrence Lessig, “The Basement Interviews: Free Culture,” April 7, 2006, p. 26, available at http://poynder.blogspot.com/2006/03/basement-interviews.html. See also Steven Levy, “Lawrence Lessig’s Supreme Showdown,” /{Wired}/, October 2002, pp. 140–45, 154–56, available at http://www.wired.com/wired/archive/10.10/lessig.html. Project Gutenberg is at http://wwwgutenberg.org. }~ ={Hart, Michael S.;Project Gutenberg} After reading about Eldred’s protests in the /{Boston Globe}/, and meeting with him over coffee, Lessig asked Eldred if he would be willing to be the plaintiff in his envisioned case. Eldred readily agreed. As a conscientious objector and draft resister during the Vietnam War, he was ready to go to great lengths to fight the Sonny Bono Act. “Initially, I volunteered to violate the law if necessary and get arrested and go to jail,” Eldred said. “But Larry told me that was not necessary.” A good thing, because under the No Electronic Theft Act, passed in 1997, Eldred could be charged with a felony. “I could face jail, fines, seizure of my computer, termination of my Internet service without notice — and so all the e-books on the Web site could be instantly lost,” he said. @@ -765,7 +765,7 @@ For Lessig, the LambdaMOO “rape” had an obvious resonance with Catherine Mac To explore the issues further, Lessig developed one of the first courses on the law of cyberspace. He taught it in the spring semester of 1995 at Yale Law School, where he was a visiting professor, and later at the University of Chicago and Harvard law schools. During the Yale class, an exchange with a student, Andrew Shapiro, jarred his thinking in a new direction: “I was constantly thinking about the way that changing suppositions of constitutional eras had to be accounted for in the interpretation of the Constitution across time. Andrew made this point about how there’s an equivalent in the technical infrastructure [of the Internet] that you have to think about. And then I began to think about how there were norms and law and infrastructure — and then I eventually added markets into this — which combine to frame what policymaking is in any particular context.”~{ Ibid. }~ ={Shapiro, Andrew} -This line of analysis became a central theme of Lessig’s startling first book, /{Code and Other Laws of Cyberspace}/, published in 1999.~{ Lessig, /{Code and Other Laws of Cyberspace}/ (New York: Basic Books, 1999). }~ /{Code}/ took on widespread assumptions that the Internet would usher in a new libertarian, free-market utopia. Cyberlibertarian futurists such as Alvin Toffler, Esther Dyson, George Gilder, and John Gilmore had routinely invoked cyberspace as a revolutionary force that would render government, regulation, and social welfare programs obsolete and unleash the transformative power of free markets.~{ Esther Dyson, George Gilder, George Keyworth, and Alvin Toffler, “Cyberspace and the American Dream: A Magna Carta for the Knowledge Age,” Progress and Freedom Foundation, August 1994, available at http://www.pff .org/issues-pubs/futureinsights/fil.2magnacarta.html. }~ In the libertarian scenario, individual freedom can flourish only if government gets the hell out of the way and lets individuals create, consume, and interact as they see fit, without any paternalistic or tyrannical constraints. Prosperity can prevail and scarcity disappear only if meddling bureaucrats and politicians leave the citizens of the Internet to their own devices. As Louis Rossetto, the founder and publisher of /{Wired}/, bluntly put it: “The idea that we need to worry about anyone being ‘left out’ is entirely atavistic to me, a product of that old economics of scarcity and the 19th century social thinking that grew out of it.”~{ David Hudson, interview with Louis Rossetto, “What Kind of Libertarian,” /{Rewired}/ (Macmillan, 1997), p. 255. }~ +This line of analysis became a central theme of Lessig’s startling first book, /{Code and Other Laws of Cyberspace}/, published in 1999.~{ Lessig, /{Code and Other Laws of Cyberspace}/ (New York: Basic Books, 1999). }~ /{Code}/ took on widespread assumptions that the Internet would usher in a new libertarian, free-market utopia. Cyberlibertarian futurists such as Alvin Toffler, Esther Dyson, George Gilder, and John Gilmore had routinely invoked cyberspace as a revolutionary force that would render government, regulation, and social welfare programs obsolete and unleash the transformative power of free markets.~{ Esther Dyson, George Gilder, George Keyworth, and Alvin Toffler, “Cyberspace and the American Dream: A Magna Carta for the Knowledge Age,” Progress and Freedom Foundation, August 1994, available at http://www.pff.org/issues-pubs/futureinsights/fil.2magnacarta.html. }~ In the libertarian scenario, individual freedom can flourish only if government gets the hell out of the way and lets individuals create, consume, and interact as they see fit, without any paternalistic or tyrannical constraints. Prosperity can prevail and scarcity disappear only if meddling bureaucrats and politicians leave the citizens of the Internet to their own devices. As Louis Rossetto, the founder and publisher of /{Wired}/, bluntly put it: “The idea that we need to worry about anyone being ‘left out’ is entirely atavistic to me, a product of that old economics of scarcity and the 19th century social thinking that grew out of it.”~{ David Hudson, interview with Louis Rossetto, “What Kind of Libertarian,” /{Rewired}/ (Macmillan, 1997), p. 255. }~ ={code:law, as+4;law:code as+4;Lessig, Lawrence:Code and Other Laws of Cyberspace+4;Dyson, Esther;Gilder, George;Gilmore, John;Rossetto, Louis;Toffler, Alvin;Internet:architecture of+2|freedom of+1;cyberspace: economic effects of} Lessig was more wary. In /{Code}/, he constructed a sweeping theoretical framework to show how freedom on the Internet must be actively, deliberately constructed; it won’t simply happen on its own. Inspired by conversations with computer programmer Mitch Kapor, who declared that “architecture is politics” in 1991, Lessig’s book showed how software code was supplanting the regulatory powers previously enjoyed by sovereign nation-states and governments. The design of the Internet and software applications was becoming more influential than conventional sources of policymaking — Congress, the courts, federal agencies. /{Code is law}/, as Lessig famously put it. @@ -793,7 +793,7 @@ Back at the Berkman Center, however, there were plenty of opportunities to influ While nourished by the work of his academic colleagues, Lessig was determined to come up with ingenious ways to /{do something}/ about the distressing drift of copyright law. It was important to take the offensive. Notwithstanding the pessimism of /{Code}/, Lessig’s decidedly optimistic answer was to gin up a constitutional challenge to copyright law. Many legal experts and even sympathetic colleagues were skeptical. Peter Jaszi, a leading intellectual law professor at American University, told a reporter at the time, “It’s not so much that we thought it was a terrible idea but that it was just unprecedented. Congress has been extending copyright for 180 years, and this is the first time someone said it violated the Constitution.”~{ David Streitfeld, “The Cultural Anarchist vs. the Hollywood Police State,” /{Los Angeles Times Magazine}/, September 22, 2002, p. 32. }~ Others worried that an adverse ruling could set back the larger cause of copyright reform. ={Jaszi, Peter;Lessig, Lawrence:Code and Other Laws of Cyberspace;law:social change, and+3;copyright law:expansion of} -In the spirit of the commons, Lessig and his Berkman Center colleagues decided that the very process for mounting the /{Eldred}/ lawsuit would be different: “Rather than the secret battles of lawyers going to war, we will argue this case in the open. This is a case about the commons; we will litigate it in the commons. Our arguments and strategy will be developed online, in a space called ‘openlaw.org.’ Key briefs will be drafted online, with participants given the opportunity to criticize the briefs and suggest other arguments. . . . Building on the model of open source software, we are working from the hypothesis that an open development process best harnesses the distributed resources of the Internet community. By using the Internet, we hope to enable the public interest to speak as loudly as the interests of corporations.”~{ Lawrence Lessig, “Commons Law,” June 24, 1999, posted on www.intellectu alcapital.com/issues/issue251/item5505.asp, and Open Law archive at http:// cyber.law.harvard.edu/openlaw. }~ +In the spirit of the commons, Lessig and his Berkman Center colleagues decided that the very process for mounting the /{Eldred}/ lawsuit would be different: “Rather than the secret battles of lawyers going to war, we will argue this case in the open. This is a case about the commons; we will litigate it in the commons. Our arguments and strategy will be developed online, in a space called ‘openlaw.org.’ Key briefs will be drafted online, with participants given the opportunity to criticize the briefs and suggest other arguments. . . . Building on the model of open source software, we are working from the hypothesis that an open development process best harnesses the distributed resources of the Internet community. By using the Internet, we hope to enable the public interest to speak as loudly as the interests of corporations.”~{ Lawrence Lessig, “Commons Law,” June 24, 1999, posted on www.intellectu alcapital.com/issues/issue251/item5505.asp, and Open Law archive at http://cyber.law.harvard.edu/openlaw. }~ ={Eldred v. Reno/Eldred v. Ashcroft+28;Lessig, Lawrence:Eldred v. Reno, and+28|law in contemporary context, and+1} Emulating the open-source development model was a nice touch, and perhaps useful; dozens of people around the world registered at the Openlaw site and posted suggestions. Some of the examples and legal critiques were used in developing the case, and the model was later used by lawyers in the so-called DeCSS case, in which a hacker broke the encryption of a DVD. But it turns out that open, distributed creativity has its limits in the baroque dance of litigation; it can’t work when secrecy and confidentiality are important, for example. @@ -810,18 +810,18 @@ Normally, this would have been the end of the road for a case. Very few appeals At this point, Lessig realized he needed the advice and support of some experienced Supreme Court litigators. He enlisted help from additional lawyers at Jones, Day; Alan Morrison of Public Citizen Litigation Group; Kathleen Sullivan, the dean of Stanford Law School; and Charles Fried, a former solicitor general under President Reagan. Professor Peter Jaszi and the students of his law clinic drafted an amicus brief. ={orrison, Alan;Fried, Charles;Jaszi, Peter;Sullivan, Kathleen} -A key concern was how to frame the arguments. Attorney Don Ayer of Jones, Day repeatedly urged Lessig to stress the dramatic harm that the Bono Act was inflicting on free speech and free culture. But as Lessig later confessed, “I hate this view of the law. . . . I was not persuaded that we had to sell our case like soap.”~{ Lessig, “How I Lost the Big One,” /{Legal Affairs}/, March/April 2004, available at http://www.legalaffairs.org/issues/March-April-2004/story_lessig_marap r04.msp. }~ Lessig was convinced that the only way /{Eldred}/ could prevail at the Supreme Court would be to win over the conservative justices with a matter of principle. To Lessig, the harm was obvious; what needed emphasis was how the Sonny Bono Act violated “originalist” principles of jurisprudence. (Originalist judges claim to interpret the Constitution based on its “original” meanings in 1791, which includes a belief that Congress has strictly enumerated powers, not broad legislative discretion.) +A key concern was how to frame the arguments. Attorney Don Ayer of Jones, Day repeatedly urged Lessig to stress the dramatic harm that the Bono Act was inflicting on free speech and free culture. But as Lessig later confessed, “I hate this view of the law. . . . I was not persuaded that we had to sell our case like soap.”~{ Lessig, “How I Lost the Big One,” /{Legal Affairs}/, March/April 2004, available at http://www.legalaffairs.org/issues/March-April-2004/story_lessig_marapr04.msp. }~ Lessig was convinced that the only way /{Eldred}/ could prevail at the Supreme Court would be to win over the conservative justices with a matter of principle. To Lessig, the harm was obvious; what needed emphasis was how the Sonny Bono Act violated “originalist” principles of jurisprudence. (Originalist judges claim to interpret the Constitution based on its “original” meanings in 1791, which includes a belief that Congress has strictly enumerated powers, not broad legislative discretion.) ={Ayer, Don;law:originalist principles of+2} “We tried to make an argument that if you were an originalist— in the way these conservative judges said they were in many other cases — then you should look to the original values in the Copyright Clause,” said Lessig. “And we argued that if you did that then you had to conclude that Congress had wildly overstepped its constitutional authority, and so the law should be struck down.”~{ Lessig interview with Richard Poynder, April 7, 2006, p. 25. }~ Flaunting the harm caused by the copyright term extension struck Lessig as showy and gratuitous; he considered the harm more or less selfevident. In the aftermath of a public debate that Lessig once had with Jack Valenti, a questioner on Slashdot, a hacker Web site, suggested that Lessig would be more persuasive if he asserted “a clear conception of direct harm . . . than the secondary harm of the copyright holders getting a really sweet deal.” Lessig conceded that such a focus “has been a weakness of mine for a long time. In my way of looking at the world, the point is a matter of principle, not pragmatics. . . . There are many others who are better at this pragmatism stuff. To me, it just feels insulting.”~{ “Lawrence Lessig Answers Your Questions,” Slashdot.org, December 21, 2001, Question 1, “The question of harm,” posted by “caduguid,” with Lessig response, available at http://interviews.slashdot.org/article.pl?sid=01/12/21/155221. }~ ={copyright law:expansion of;Copyright Clause, U.S. Constitution;Valenti, Jack} -And so, despite warnings to the contrary, Lessig’s legal strategy relied on a call to uphold originalist principles. Having clerked for Justice Scalia and Judge Posner, Lessig felt that he understood the mind-set and sympathies of the conservative jurists. “If we get to the Supreme Court,” Lessig told Slashdot readers in December 2001, “I am certain that we will win. This is not a left/right issue. The conservatives on the Court will look at the framers’ Constitution— which requires that copyrights be granted for ‘limited times’ — and see that the current practice of Congress . . . makes a mockery of the framers’ plan. And the liberals will look at the effect of these never-ending copyrights on free speech, and conclude that Congress is not justified in this regulation of speech. The Supreme Court doesn’t give a hoot about Hollywood; they will follow the law.”~{ Lessig response to question 11, Slashdot.org, “Will the extension of copyright continue?” posed by “Artifice_Eternity,” available at http://interviews.slash dot.org/article.pl?sid=01/12/21/155221. }~ +And so, despite warnings to the contrary, Lessig’s legal strategy relied on a call to uphold originalist principles. Having clerked for Justice Scalia and Judge Posner, Lessig felt that he understood the mind-set and sympathies of the conservative jurists. “If we get to the Supreme Court,” Lessig told Slashdot readers in December 2001, “I am certain that we will win. This is not a left/right issue. The conservatives on the Court will look at the framers’ Constitution— which requires that copyrights be granted for ‘limited times’ — and see that the current practice of Congress . . . makes a mockery of the framers’ plan. And the liberals will look at the effect of these never-ending copyrights on free speech, and conclude that Congress is not justified in this regulation of speech. The Supreme Court doesn’t give a hoot about Hollywood; they will follow the law.”~{ Lessig response to question 11, Slashdot.org, “Will the extension of copyright continue?” posed by “Artifice_Eternity,” available at http://interviews.slashdot.org/article.pl?sid=01/12/21/155221. }~ ={Posner, Richard;Scalia, Antonin;Copyright Clause, U.S. Constitution;copyright law:expansion of+5;Copyright Term Extension Act+5} Lessig took pride in the fact that thirty-eight amicus briefs were filed on behalf of /{Eldred}/. They included a wide range of authors, computer and consumer electronics companies, and organizations devoted to arts, culture, education, and journalism. Besides the usual suspects like the Free Software Foundation, Electronic Frontier Foundation, and Public Knowledge, supporting briefs were filed by fifteen economists including Kenneth Arrow and Milton Friedman, Phyllis Schlafly of the Eagle Forum, and the Intel Corporation. -At oral arguments, Lessig immediately confronted a skeptical bench. Justice Sandra Day O’Connor worried about overturning years of previous copyright term extensions. Justice William Rehnquist proposed. “You want the right to copy verbatim other people’s books, don’t you?” And when Justice Anthony Kennedy invited Lessig to expound upon the great harm that the law was inflicting on free speech and culture, Lessig declined the opportunity. He instead restated his core constitutional argument, that copyright terms cannot be perpetual. “This was a correct answer, but it wasn’t the right answer,” Lessig later confessed in a candid postmortem of the case. “The right answer was to say that there was an obvious and profound harm. Any number of briefs had been written about it. Kennedy wanted to hear it. And here was where Don Ayer’s advice should have mattered. This was a softball; my answer was a swing and a miss.”~{ See http://www.supremecourtus.gov/oral_arguments/argument_transcripts/ 01-618.pdf. See also Lessig, “How I Lost the Big One,” and Linda Greenhouse, “Justices Hear Arguments in Challenge to Copyrights,” /{New York Times}/, October 10, 2002. A number of Supreme Court opinions in the /{Eldred}/ case can be found at the Openlaw archive at http://cyber.law.harvard.edu/openlaw/ eldredvreno. The /{Loyola Los Angeles Law Review}/ held a symposium on /{Eldred v. Ashcroft}/, available at http://llr.lls.edu/volumes/v36-issue1. }~ No justices spoke in defense of the Sonny Bono Act. +At oral arguments, Lessig immediately confronted a skeptical bench. Justice Sandra Day O’Connor worried about overturning years of previous copyright term extensions. Justice William Rehnquist proposed. “You want the right to copy verbatim other people’s books, don’t you?” And when Justice Anthony Kennedy invited Lessig to expound upon the great harm that the law was inflicting on free speech and culture, Lessig declined the opportunity. He instead restated his core constitutional argument, that copyright terms cannot be perpetual. “This was a correct answer, but it wasn’t the right answer,” Lessig later confessed in a candid postmortem of the case. “The right answer was to say that there was an obvious and profound harm. Any number of briefs had been written about it. Kennedy wanted to hear it. And here was where Don Ayer’s advice should have mattered. This was a softball; my answer was a swing and a miss.”~{ See http://www.supremecourtus.gov/oral_arguments/argument_transcripts/01-618.pdf. See also Lessig, “How I Lost the Big One,” and Linda Greenhouse, “Justices Hear Arguments in Challenge to Copyrights,” /{New York Times}/, October 10, 2002. A number of Supreme Court opinions in the /{Eldred}/ case can be found at the Openlaw archive at http://cyber.law.harvard.edu/openlaw/eldredvreno. The /{Loyola Los Angeles Law Review}/ held a symposium on /{Eldred v. Ashcroft}/, available at http://llr.lls.edu/volumes/v36-issue1. }~ No justices spoke in defense of the Sonny Bono Act. ={Ayer, Don;Kennedy, Anthony;O’Connor, Sandra Day;Rehnquist, William} Yet they had clear reservations about the Supreme Court’s authority to dictate the length of copyright terms. @@ -835,7 +835,7 @@ Justices Stephen Breyer and John Paul Stevens accepted Lessig’s arguments, and In assessing the broad impact of the /{Eldred}/ ruling, copyright scholar Siva Vaidhyanathan cited law professor Shubha Ghosh’s observation that the /{Eldred}/ ruling had effectively “deconstitutionalized” copyright law. /{Eldred}/ pushed copyright law ={Ghosh, Shubha;Vaidhyanathan, Siva+1} -_1 farther into the realm of policy and power battles and away from principles that have anchored the system for two centuries. That means public interest advocates and activists must take their battles to the public sphere and the halls of Congress. We can’t appeal to the Founders’ wishes or republican ideals. We will have to make pragmatic arguments in clear language about the effects of excessive copyright on research, teaching, art and journalism. And we will have to make naked mass power arguments with echoes of “we want our MP3” and “it takes an industry of billions to hold us back.”~{ Siva Vaidhyanathan, “After the Copyright Smackdown: What Next?” /{Salon}/, January 17, 2003, at http://www.salon.com/tech/feature/2003/01/17/copy right.print.html. }~ +_1 farther into the realm of policy and power battles and away from principles that have anchored the system for two centuries. That means public interest advocates and activists must take their battles to the public sphere and the halls of Congress. We can’t appeal to the Founders’ wishes or republican ideals. We will have to make pragmatic arguments in clear language about the effects of excessive copyright on research, teaching, art and journalism. And we will have to make naked mass power arguments with echoes of “we want our MP3” and “it takes an industry of billions to hold us back.”~{ Siva Vaidhyanathan, “After the Copyright Smackdown: What Next?” /{Salon}/, January 17, 2003, at http://www.salon.com/tech/feature/2003/01/17/copyright.print.html. }~ ={copyright law:balance of public and private rights} 2~ A Movement Is Born @@ -848,7 +848,7 @@ After four years of relentless work, Lessig was frustrated and dejected. “I ha Yet Lessig had certainly been correct that /{Eldred}/ would not succeed unless it convinced the Court’s conservative majority. The fact that the originalist gambit failed was perhaps the strongest message of all: /{nothing}/ would convince this Court to rein in the excesses of copyright law. -Even before the Supreme Court had delivered its ruling, Lessig admitted his misgivings about the power of law to solve copyright’s failings: “The more I’m in this battle, the less I believe that constitutional law on its own can solve the problem. If Americans can’t see the value of freedom without the help of lawyers, then we don’t deserve freedom.”~{ Lessig response to Question 11, “Cyberspace Amendment,” posed by “kzinti,” in Slashdot, available at http://interviews.slashdot.org/article.pl?sid=01/12/ 21/155221. }~ Yet mobilizing freedom-loving Americans to seek redress from Congress was also likely to be doomed. Hollywood film studios and record companies had showered some $16.6 million and $1.8 million, respectively, on federal candidates and parties in 1998. Legislators know who butters their bread, and the public was not an organized influence on this issue. No wonder a progressive copyright reform agenda was going nowhere. +Even before the Supreme Court had delivered its ruling, Lessig admitted his misgivings about the power of law to solve copyright’s failings: “The more I’m in this battle, the less I believe that constitutional law on its own can solve the problem. If Americans can’t see the value of freedom without the help of lawyers, then we don’t deserve freedom.”~{ Lessig response to Question 11, “Cyberspace Amendment,” posed by “kzinti,” in Slashdot, available at http://interviews.slashdot.org/article.pl?sid=01/12/21/155221. }~ Yet mobilizing freedom-loving Americans to seek redress from Congress was also likely to be doomed. Hollywood film studios and record companies had showered some $16.6 million and $1.8 million, respectively, on federal candidates and parties in 1998. Legislators know who butters their bread, and the public was not an organized influence on this issue. No wonder a progressive copyright reform agenda was going nowhere. ={Copyright Term Extension Act+1;Eldred v. Reno/Eldred v. Ashcroft:Supreme Court, and;law:limited power of;copyright law:expansion of+1} Four years after the /{Eldred}/ ruling, Lessig had some second thoughts about the “Mickey Mouse” messaging strategy. Opponents of the copyright term extension, including Lessig, had often flaunted Mickey motifs in their dealings with the press and railed at the “Mickey Mouse Protection Act.” Yet in 2006, Lessig lamented to one interviewer that “the case got framed as one about Mickey Mouse. Whereas the reality is, who gives a damn about Mickey Mouse? The really destructive feature of the Sonny Bono law is the way it locks up culture that has no continuing commercial value at all. It orphaned culture. So by focusing on Mickey Mouse, the Court thought this was an issue of whether you believed in property or not. If, however, we had focused people on all the culture that is being lost because it is locked up by copyright, we might have succeeded.”~{ Interview with Poynder, April 7, 2006, pp. 26–27. }~ @@ -856,7 +856,7 @@ Four years after the /{Eldred}/ ruling, Lessig had some second thoughts about th The lasting impact of the /{Eldred}/ case, ironically, may have less to do with the law than with the cultural movement it engendered. The lawsuit provided a powerful platform for educating the American people about copyright law. A subject long regarded as arcane and complicated was now the subject of prominent articles in the /{New York Times}/, /{Salon}/, computer magazines, wire services, and countless other publications and Web sites. A cover story for the /{Los Angeles Times}/'s Sunday magazine explained how the case could “change the way Hollywood makes money — and the way we experience art.” /{Wired}/ magazine headlined its profile of Lessig “The Great Liberator.” Lessig himself barnstormed the country giving dozens of presentations to librarians, technologists, computer programmers, filmmakers, college students, and many others. Even Lessig’s adversary at the district court level, Arthur R. Miller, a Harvard Law School professor, agreed, “The case has sparked a public discussion that wasn’t happening before.” ={Miller, Arthur R.} -Lessig’s orations often provoked the fervor of a revival meeting — and led to more than a few conversions. This may appear surprising because Lessig, with his receding hairline and wireframe glasses, strikes an unprepossessing pose. In the professorial tradition, he can sometimes be didactic and patronizing. But on the stage, Lessig is stylish, poised, and mesmerizing. His carefully crafted talks are intellectual but entertaining, sophisticated but plainspoken— and always simmering with moral passion. He typically uses a customized version of Keynote, a Macintosh-based program similar to PowerPoint, to punctuate his dramatic delivery with witty visuals and quick flashes of words. (Experts in professional presentations have dubbed this style the “Lessig Method,” and likened it to the Takahashi Method in Japan because slides often use a single word, short quote, or photo.)~{ Garr Reynolds’s blog on professional presentation design, “The ‘Lessig Method’ of Presentation,” October 5, 2005, available at http://presentationzen .blogs.com/presentationzen/2005/10/the_lessig_meth.html. }~ +Lessig’s orations often provoked the fervor of a revival meeting — and led to more than a few conversions. This may appear surprising because Lessig, with his receding hairline and wireframe glasses, strikes an unprepossessing pose. In the professorial tradition, he can sometimes be didactic and patronizing. But on the stage, Lessig is stylish, poised, and mesmerizing. His carefully crafted talks are intellectual but entertaining, sophisticated but plainspoken— and always simmering with moral passion. He typically uses a customized version of Keynote, a Macintosh-based program similar to PowerPoint, to punctuate his dramatic delivery with witty visuals and quick flashes of words. (Experts in professional presentations have dubbed this style the “Lessig Method,” and likened it to the Takahashi Method in Japan because slides often use a single word, short quote, or photo.)~{ Garr Reynolds’s blog on professional presentation design, “The ‘Lessig Method’ of Presentation,” October 5, 2005, available at http://presentationzen.blogs.com/presentationzen/2005/10/the_lessig_meth.html. }~ More than a sidebar, Lessig’s public speaking has been an important aspect of his leadership in building a commons movement. His talks have helped some fairly sequestered constituencies in technical fields — computer programming, library science, Internet policy, copyright law — understand the larger political and cultural significance of their work. The results have sometimes been galvanizing. As one veteran hacker told me in 2006, “There’s a whole connoisseurship of Lessig talks. He’s a little past his peak right now — but there was a period where, like when he gave the lecture at OSCON [a conference of open-source programmers], when he was done, they wanted to start a riot. People were literally milling around, looking for things to smash. He was saying to these people who worked on open source, ‘There’s a larger world context to your work. The government is doing things — and you can stop them!’ ”~{ Interview with Aaron Swartz, October 10, 2006. }~ ={Lessig, Lawrence:public speaker, as} @@ -910,7 +910,7 @@ Lessig told me that when he recognized Eldred’s Web site as a new type of soci It helps to remember that in 1998 and the following years, the legality of sharing online works and downloading them was highly ambiguous. Prevailing legal discourse set forth a rather stark, dualistic world: either a work is copyrighted with “all rights reserved,” or a work is in the public domain, available to anyone without restriction. The mental categories of the time offered no room for a “constituency of the reasonable,” in Lessig’s words. ={copyright law:public domain vs.;public domain:copyright law, and} -Copyright law made nominal provisions for a middle ground in the form of the fair use doctrine and the public domain. But Lessig realized that fair use was “just a terrible structure on which to build freedom. There are basically no bright lines; everything is a constant debate. Of course, we don’t want to erase or compromise or weaken [these doctrines] in any sense. But it’s very important to build an infrastructure that doesn’t depend upon four years of litigation.” Or as Lessig was wont to put it in his impassioned performances on the stump: “Fuck fair use.”~{ Robert S. Boynton, “Righting Copyright: Fair Use and Digital Environmentalism,” /{Bookforum}/, February/March 2005, available at http://www.robert boynton.com/articleDisplay.php?article_id=1. }~ +Copyright law made nominal provisions for a middle ground in the form of the fair use doctrine and the public domain. But Lessig realized that fair use was “just a terrible structure on which to build freedom. There are basically no bright lines; everything is a constant debate. Of course, we don’t want to erase or compromise or weaken [these doctrines] in any sense. But it’s very important to build an infrastructure that doesn’t depend upon four years of litigation.” Or as Lessig was wont to put it in his impassioned performances on the stump: “Fuck fair use.”~{ Robert S. Boynton, “Righting Copyright: Fair Use and Digital Environmentalism,” /{Bookforum}/, February/March 2005, available at http://www.robertboynton.com/articleDisplay.php?article_id=1. }~ ={copyright law:fair use doctrine, and+2;fair use doctrine:copyright law, and+2;Lessig, Lawrence:fair use, on+2} This was a theatrical flourish, of course. Back in Palo Alto, Lessig in 2001 had launched the Center for Internet & Society at Stanford Law School, which actively takes on lawsuits seeking to vindicate the public’s fair use rights, among other things. One notable case was against Stephen Joyce, the grandson of novelist James Joyce. As executor of the Joyce literary estate, Stephen Joyce steadfastly prevented dozens of scholars from quoting from the great writer’s archive of unpublished letters.~{ See, e.g., D. T. Max, “The Injustice Collector,” /{New Yorker}/, June 19, 2006, pp. 34ff. }~ (After losing a key court ruling in February 2007, the Joyce estate settled the case on terms favorable to a scholar who had been denied access to the Joyce papers.) @@ -985,7 +985,7 @@ What ensued was a lengthy and irregular series of e-mail conversations and socia A digital archive for donated and public-domain works had great appeal. Just as land trusts acted as trustees of donated plots of land, so the Copyright’s Commons (as Lessig proposed that it be named) would be a “conservancy” for film, books, music, and other works that were either in the public domain or donated. Six weeks after Abelson’s original suggestion, Lessig produced a “Proposal for an Intellectual Property Conservancy” for discussion purposes.~{ Lawrence Lessig, “Proposal for the Intellectual Property Conservancy,” e-mail to ipcommons group, November 12, 2000. }~ He now called the concept “an IP commons” — “the establishment of an intellectual property conservancy to facilitate the collection and distribution under a GPL-like license of all forms of intellectual property.” As elaborated by two Harvard Law School students, Chris Babbitt and Claire Prestel, “The conservancy will attempt to bridge the gap between authors, corporate copyright holders and public domain advocates by providing a repository of donated works which we believe will create a more perfect ‘market’ for intellectual property.”~{ Chris Babbitt and Claire Prestel, “Memorandum to Michael Carroll, Wilmer Cutler Pickering, ‘IP Conservancy,’ ” October 24, 2000. }~ ={belson, Hal:copyright conservancy idea, and+2;Babbitt, Chris;Prestel, Claire;Copyright’s Commons+27;Creative Commons (CC):Copyright’s Commons, as+27;IP Commons+27;Lessig, Lawrence:Copyright’s Commons, and+27} -Friendly critiques started arriving immediately. Stallman considered the proposal a “good idea overall,” but as usual he objected to the words, such as “intellectual property” and “copyright protection,” which he considered “propaganda for the other side.”~{ E-mail from Richard Stallman to Lessig, September 11, 2000. See also http:// www.gnu.org/philosophy/words-to-avoid.html. Stallman suggested calling the project the “Copyright and Patent Conservancy.” }~ Abelson, a friend and colleague of Stallman’s at MIT, was not finicky about word choices, but he did believe that software donations should be directed to the Free Software Foundation, not to the envisioned project. FSF already existed, for one thing, but in addition, said Abelson, “It may be detrimental to have people initially associate this [new project] too closely with the FSF. . . . We need to craft a public position that will unify people. An FSF-style ‘let’s undo the effects of all those evil people licensing software’ is not what we want here.”~{ E-mail from Hal Abelson to Lessig, September 12, 2000. }~ Some people suggested attracting people to the conservancy by having “jewels” such as material from the estates of deceased artists. Another suggested hosting special licenses, such as the Open Audio License, a license issued by the Electronic Frontier Foundation in 2001 that lets musicians authorize the copying and reuse of their songs so long as credit is given and derivative songs can be shared. +Friendly critiques started arriving immediately. Stallman considered the proposal a “good idea overall,” but as usual he objected to the words, such as “intellectual property” and “copyright protection,” which he considered “propaganda for the other side.”~{ E-mail from Richard Stallman to Lessig, September 11, 2000. See also http://www.gnu.org/philosophy/words-to-avoid.html. Stallman suggested calling the project the “Copyright and Patent Conservancy.” }~ Abelson, a friend and colleague of Stallman’s at MIT, was not finicky about word choices, but he did believe that software donations should be directed to the Free Software Foundation, not to the envisioned project. FSF already existed, for one thing, but in addition, said Abelson, “It may be detrimental to have people initially associate this [new project] too closely with the FSF. . . . We need to craft a public position that will unify people. An FSF-style ‘let’s undo the effects of all those evil people licensing software’ is not what we want here.”~{ E-mail from Hal Abelson to Lessig, September 12, 2000. }~ Some people suggested attracting people to the conservancy by having “jewels” such as material from the estates of deceased artists. Another suggested hosting special licenses, such as the Open Audio License, a license issued by the Electronic Frontier Foundation in 2001 that lets musicians authorize the copying and reuse of their songs so long as credit is given and derivative songs can be shared. ={Stallman, Richard:Copyright’s Commons, and;Abelson, Hal:Free Software Foundation, and+1;Electronic Frontier Foundation (EFF);Free Software Foundation} The most difficult issue, said Abelson, was the economics of the project. The care and maintenance of donations, such as the master version of films, could be potentially huge expenses. Digitizing donated works could also be expensive. Finally, there were questions about the economic incentives to potential donors. Would people really wish to donate works that have significant cash value? @@ -1036,7 +1036,7 @@ Viewpoints quickly diverged on how a commons ought to be structured and what met For the next nine months, the group intensified its debate about how to build the envisioned conservancy. After law student Dotan Oliar sketched out possible “business models,” Saltzman persuaded a friend at McKinsey & Company, the consulting firm, to provide a pro bono assessment.~{ Dotan Oliar, “Memo on Creative Commons — Towards Formulating a Business Plan,” March 19, 2001. }~ “The McKinsey folks were very skeptical and, I think, had a hard time fitting this into their [business] framework,” recalled one student at the meeting, Chris Babbitt. After the meeting, he was convinced that Creative Commons could not possibly host a content commons: “It would just be huge amounts of material, huge costs, and we didn’t have the money for that.” ~{ Interview with Chris Babbitt, September 14, 2006. }~ ={Babbitt, Chris+1;McKinsey & Company;Oliar, Dotan} -Feeling the need to force some concrete decisions, Saltzman and Lessig convened twenty-eight people for an all-day meeting in Hauser Hall at Harvard Law School, on May 11, 2001, to hash out plans. “What we’re trying to do here is /{brand the public domain}/,” Lessig said. A briefing book prepared by Chris Babbitt posed a pivotal question to the group: Should Creative Commons be structured as a centralized Web site or as an distributed, open-source licensing protocol that would allow content to be spread across cyberspace? The centralized model could be “an eBay for opensource IP” or a more niche-based commons for out-of-print books, film, or poetry. A mock Web site was actually prepared to illustrate the scenario. The home page read: “The member sites listed on the CommonExchange have been certified by Creative Commons to offer high-quality, non-infringing content on an unrestricted basis. Please feel free to use and pass these works along to others. We invite you to donate works of your own to help maintain the digital Commons.”~{ The mock-up can be found at http://cyber.law.harvard.edu/creativecom mons/site.htm. }~ +Feeling the need to force some concrete decisions, Saltzman and Lessig convened twenty-eight people for an all-day meeting in Hauser Hall at Harvard Law School, on May 11, 2001, to hash out plans. “What we’re trying to do here is /{brand the public domain}/,” Lessig said. A briefing book prepared by Chris Babbitt posed a pivotal question to the group: Should Creative Commons be structured as a centralized Web site or as an distributed, open-source licensing protocol that would allow content to be spread across cyberspace? The centralized model could be “an eBay for opensource IP” or a more niche-based commons for out-of-print books, film, or poetry. A mock Web site was actually prepared to illustrate the scenario. The home page read: “The member sites listed on the CommonExchange have been certified by Creative Commons to offer high-quality, non-infringing content on an unrestricted basis. Please feel free to use and pass these works along to others. We invite you to donate works of your own to help maintain the digital Commons.”~{ The mock-up can be found at http://cyber.law.harvard.edu/creativecommons/site.htm. }~ ={public domain:branding of} The distributed commons model would resemble the Chicago Mercantile Exchange or the New York Stock Exchange — “a trusted matchmaker to facilitate the transaction of securing rights,” according to the briefing book. “Just as corporations or commodities producers must meet certain criteria before they are listed on the Exchange, we could condition ‘listing’ in the Commons on similar criteria, albeit reflecting open source rather than financial values.”~{ “Briefing Book for Creative Commons Inaugural Meeting,” May 7,2001, p.10. }~ The virtue of the distributed model was that it would shift costs, quality control, and digitization to users. Creative Commons would serve mostly as a credentialing service and facilitator. On the other hand, giving up control would be fraught with peril — and what if Creative Commons’ intentions were ignored? @@ -1068,7 +1068,7 @@ A classical composer said he “loved the idea of a Nigerian high school chamber In short, there was no stampede for starting a public-domain conservancy or a set of licenses. Some worried that the CC licenses would be a “case of innovation where’s there’s no current demand.” Another person pointed out, more hopefully, that it could be a case of “changing the market demand with a new model.”~{ Oren Bracha and Dotan Oliar, “Memo: May 7th Consensus Regarding the Creative Commons Project,” August 20, 2001, p. 3, note 9. }~ -The Lessig caucus was clearly struggling with how best to engage with the networked environment. Napster had demonstrated that, in the dawning Internet age, creativity would increasingly be born, distributed, and viewed on the Web; print and mass media would be secondary venues. For a society still deeply rooted in print and mass media, this was a difficult concept to grasp. But Michael Carroll, the Washington lawyer who had earlier vetted the conservancy’s liability issues, shrewdly saw network dynamics as a potentially powerful tool for building new types of digital commons. In 2001, he had noticed how a bit of Internet folk art had become an overnight sensation. Mike Collins, an amateur cartoonist from Elmira, New York, had posted the cartoon below on Taterbrains, a Web site.~{ http://politicalhumor.about.com/od/funnypictures/ig/100-Funny-Pictures/ Confusing-Florida-Ballot.htm. }~ The image suddenly rocketed throughout the cyberlandscape. Everyone was copying it and sharing it with friends. +The Lessig caucus was clearly struggling with how best to engage with the networked environment. Napster had demonstrated that, in the dawning Internet age, creativity would increasingly be born, distributed, and viewed on the Web; print and mass media would be secondary venues. For a society still deeply rooted in print and mass media, this was a difficult concept to grasp. But Michael Carroll, the Washington lawyer who had earlier vetted the conservancy’s liability issues, shrewdly saw network dynamics as a potentially powerful tool for building new types of digital commons. In 2001, he had noticed how a bit of Internet folk art had become an overnight sensation. Mike Collins, an amateur cartoonist from Elmira, New York, had posted the cartoon below on Taterbrains, a Web site.~{ http://politicalhumor.about.com/od/funnypictures/ig/100-Funny-Pictures/Confusing-Florida-Ballot.htm. }~ The image suddenly rocketed throughout the cyberlandscape. Everyone was copying it and sharing it with friends. ={Carroll, Michael W.+4;Collins, Mike+4;Napster} { vs_db_1.png }http://viralspiral.cc/ @@ -1113,7 +1113,7 @@ As the lawyers brooded and debated the licensing terms, another complicated deba At this time, in 2001, the founder of the World Wide Web, Tim Berners-Lee, and others at the World Wide Web Consortium, based at MIT, were trying to conceptualize the protocols for a new “logical layer” of code on top of the World Wide Web. They called it the Semantic Web. The idea is to enable people to identify and retrieve information that is strewn across the Internet but not readily located through conventional computer searches. Through a software format known as RDF/XML,~[* RDF, or Resource Description Framework, is a way to make a statement about content in a digital artifact. XML, or Extensible Markup Language, is a way to write a specialized document format to send across the Web, in which certain content can be marked up, or emphasized, so that other computers can “read” it.]~ digital content could be tagged with machine-readable statements that would in effect say, “This database contains information about x and y.” Through Semantic Web protocols and metatags on content, it would be possible to conduct searches across many types of digital content — Web pages, databases, software programs, even digital sensors — that could yield highly specific and useful results. ={Berners-Lee, Tim;Semantic Web+6;World Wide Web:Semantic Web+6|protocols for+6;RDF/XML} -Unfortunately, progress in developing the Semantic Web has been bogged down in years of technical disagreement and indifference among the larger Web community. Some critics argue that the project has stalled because it was being driven by a small corps of elite software theorists focused on databases, and not by a wider pool of decentralized Web practitioners. In any case, the Creative Commons became one of the first test cases of trying to implement RDF/XML for the Semantic Web.~{ For background, see “The Semantic Web: An Introduction,” at http://in fomesh.net/2001/swintro; Aaron Swartz and James Hendler, “The Semantic Web: A Network of Content for the Digital City,” at http://blogspace.com/ rdf/SwartzHendler; and John Markoff, “Entrepreneurs See a Web Guided by Common Sense,” /{New York Times}/, November 12, 2006. }~ The project was led initially by Lisa Rein, a thirty-three-year-old data modeler who met Lessig at an O’Reilly open-source software conference. Lessig hired her as CC’s first technical director in late 2001 to embed the CC legal licenses in machine-readable formats. +Unfortunately, progress in developing the Semantic Web has been bogged down in years of technical disagreement and indifference among the larger Web community. Some critics argue that the project has stalled because it was being driven by a small corps of elite software theorists focused on databases, and not by a wider pool of decentralized Web practitioners. In any case, the Creative Commons became one of the first test cases of trying to implement RDF/XML for the Semantic Web.~{ For background, see “The Semantic Web: An Introduction,” at http://infomesh.net/2001/swintro; Aaron Swartz and James Hendler, “The Semantic Web: A Network of Content for the Digital City,” at http://blogspace.com/rdf/SwartzHendler; and John Markoff, “Entrepreneurs See a Web Guided by Common Sense,” /{New York Times}/, November 12, 2006. }~ The project was led initially by Lisa Rein, a thirty-three-year-old data modeler who met Lessig at an O’Reilly open-source software conference. Lessig hired her as CC’s first technical director in late 2001 to embed the CC legal licenses in machine-readable formats. ={Rein, Lisa+2;Swartz, Aaron;Lessig, Lawrence:CC licenses, and} Writing the XML code was not so difficult, said Rein; the real challenge was “deciding what needed to be included and how you represent the licenses as simply as possible.”~{ Interview with Lisa Rein, December 20, 2006. }~ This required the lawyers and the techies to have intense dialogues about how the law should be faithfully translated into software code, and vice versa. Once again, there were complicated problems to sort through: Should there be a central database of CC-licensed content? How could machine-readable code be adapted if the legal licenses were later modified? @@ -1207,7 +1207,7 @@ It soon became clear that very few people were choosing any of the five licenses Still another choice was offered to copyright holders, a “public domain dedication,” which is not a license so much as “an overt act of relinquishment in perpetuity” of any rights in the work. The public domain dedication places no restrictions whatsoever on subsequent reuses of the work. ={public domain dedication} -To the first-time user, the licenses may seem a little daunting.~{ A FAQ at the Creative Commons Web site answers the most frequent user questions about the licenses. It is available at http://wiki.creativecommons .org/. }~ The full implications of using one or another license are not immediately obvious. The tagline for the licenses, “Some Rights Reserved,” while catchy, was not really self-explanatory. This became the next big challenge to Creative Commons, as we see in chapter 6: how to educate creators about a solution when they may not have realized they even had a problem. +To the first-time user, the licenses may seem a little daunting.~{ A FAQ at the Creative Commons Web site answers the most frequent user questions about the licenses. It is available at http://wiki.creativecommons.org/. }~ The full implications of using one or another license are not immediately obvious. The tagline for the licenses, “Some Rights Reserved,” while catchy, was not really self-explanatory. This became the next big challenge to Creative Commons, as we see in chapter 6: how to educate creators about a solution when they may not have realized they even had a problem. By December 2002, the three levels of code — legal, digital, and human — had been coordinated and finalized as version 1.0. The organization was set to go public, which it did at a splashy coming-out party in San Francisco. The gala featured appearances by the likes of rapper DJ Spooky (an ardent advocate for remix culture) and a London multimedia jam group, People Like Us. Lessig proudly introduced the licenses as “delivering on our vision of promoting the innovative reuse of all types of intellectual works, unlocking the potential of sharing and transforming others’ work.”~{ http://creativecommons.org/press-releases/entry/3476. }~ ={DJ Spooky;People Like Us;code:levels of;Lessig, Lawrence:CC licenses, and+2} @@ -1255,7 +1255,7 @@ Junell designed the now-familiar CC logo as a deliberate counterpoint to the cop In promoting its licenses, Creative Commons fashioned itself as a neutral, respectable defender of individual choice. “Our tools are just that — tools,” said Haughey, who was then developing the CC Web site. “Our model intentionally depends on copyright holders to take responsibility for how they use those tools. Or how they don’t use them: If you’re unsure and want to keep your full copyright, fine. If you choose to allow others to re-use your work, great.”~{ Matthew Haughey, “Blogging in the Public Domain,” Creative Commons blog post, February 5, 2003, at http://creativecommons.org/weblog/entry/3601. }~ While many CC users were enthusiastically bashing copyright law, Lessig and the CC staff made it a point to defend the basic principles of copyright law — while extolling the value of collaborative creativity and sharing under CC licenses. ={Haughey, Matt} -Despite praise by the heads of the Motion Picture Association of America and the Recording Industry Association of America, the licenses nonetheless did attract critics. Some in the music industry regarded the licenses as a Trojan horse that would dupe unsuspecting artists. David Israelite, president and CEO of the National Music Publishers’ Association, told /{Billboard}/, “My concern is that many who support Creative Commons also support a point of view that would take away people’s choices about what to do with their own property.”~{ Susan Butler, “Movement to Share Creative Works Raises Concerns in Music Circles,” /{Billboard}/, May 28, 2005.}~ /{Billboard}/ went on to cite the cautionary tale of a songwriter who was being kept alive by his AIDS medications, thanks to the royalties from a highly successful song. “No one should let artists give up their rights,” said Andy Fraser of the rock group Free. Other critics, such as John Dvorak of /{PC Magazine}/, called the CC licenses “humbug” and accused them of adding “some artificial paperwork and complexity to the mechanism [of copyright],” while weakening the rights that an author would otherwise enjoy.~{ John C. Dvorak, “Creative Commons Humbug: This Scheme Doesn’t Seem to Benefit the Public,” PC Magazine, July 28, 2005. }~ Still others had cultural scores to settle and criticized “anything advocated by clever, sleek young lawyers.”~{ Researchers at the Economic Observatory of the University of Openness, “Commercial Commons,” on the online journal /{Metamute}/, at http://www .metamute.org/?q=en/Commercial-Commons. }~ +Despite praise by the heads of the Motion Picture Association of America and the Recording Industry Association of America, the licenses nonetheless did attract critics. Some in the music industry regarded the licenses as a Trojan horse that would dupe unsuspecting artists. David Israelite, president and CEO of the National Music Publishers’ Association, told /{Billboard}/, “My concern is that many who support Creative Commons also support a point of view that would take away people’s choices about what to do with their own property.”~{ Susan Butler, “Movement to Share Creative Works Raises Concerns in Music Circles,” /{Billboard}/, May 28, 2005.}~ /{Billboard}/ went on to cite the cautionary tale of a songwriter who was being kept alive by his AIDS medications, thanks to the royalties from a highly successful song. “No one should let artists give up their rights,” said Andy Fraser of the rock group Free. Other critics, such as John Dvorak of /{PC Magazine}/, called the CC licenses “humbug” and accused them of adding “some artificial paperwork and complexity to the mechanism [of copyright],” while weakening the rights that an author would otherwise enjoy.~{ John C. Dvorak, “Creative Commons Humbug: This Scheme Doesn’t Seem to Benefit the Public,” PC Magazine, July 28, 2005. }~ Still others had cultural scores to settle and criticized “anything advocated by clever, sleek young lawyers.”~{ Researchers at the Economic Observatory of the University of Openness, “Commercial Commons,” on the online journal /{Metamute}/, at http://www.metamute.org/?q=en/Commercial-Commons. }~ ={Creative Commons (CC) licenses:critics of;sraelite, David;Recording Industry Association of America (RIAA);Dvorak, John;Fraser, Andy} Putting aside such quibbles and prejudices, the CC licenses seemed a benign enough idea. Given its reliance on copyright law, how could any entertainment lawyer object? Yet the real significance of the licenses was only appreciated by those who realized that a Great Value Shift was kicking in. For them, the licenses were a useful legal tool and cultural flag for building a new sharing economy. @@ -1266,7 +1266,7 @@ Putting aside such quibbles and prejudices, the CC licenses seemed a benign enou In retrospect, the CC licenses could not have been launched at a more propitious moment. Networked culture was exploding in 2003. Broadband was rapidly supplanting dial-up Internet access, enabling users to navigate the Web and share information at much faster speeds. Prices for personal computers were dropping even as computing speeds and memory capacity were soaring. Sophisticated new software applications were enabling users to collaborate in more powerful, user-friendly ways. The infrastructure for sharing was reaching a flashpoint. -Put another way, the original promise of the Internet as a gift economy was coming into its own. Originally built as a platform for efficient sharing among academic researchers, the Internet by 2003 was being used by some 600 million people worldwide.~{ Nielsen/Net Ratings estimated 585 million Internet users in 2002; the International Telecommunications Union estimated 665 million. See http://www2 .sims.berkeley.edu/research/proiects/how-much-info-2003/internet.htm. }~ The open framework for sharing was no longer just a plaything of technophiles and academics; it was now insinuated into most significant corners of the economy and social life. As it scaled and grew new muscles and limbs, the Internet began to radically change the ways in which wealth is generated and allocated. +Put another way, the original promise of the Internet as a gift economy was coming into its own. Originally built as a platform for efficient sharing among academic researchers, the Internet by 2003 was being used by some 600 million people worldwide.~{ Nielsen/Net Ratings estimated 585 million Internet users in 2002; the International Telecommunications Union estimated 665 million. See http://www2.sims.berkeley.edu/research/proiects/how-much-info-2003/internet.htm. }~ The open framework for sharing was no longer just a plaything of technophiles and academics; it was now insinuated into most significant corners of the economy and social life. As it scaled and grew new muscles and limbs, the Internet began to radically change the ways in which wealth is generated and allocated. ={Internet:gift economy of+1} I call this the Great Value Shift — a deep structural change in how valuable things are created for commerce and culture. The shift is not only a fundamental shift in business strategy and organizational behavior, but in the very definition of wealth. On the Internet, wealth is not just financial wealth, nor is it necessarily privately held. Wealth generated through open platforms is often /{socially created value}/ that is shared, evolving, and nonmonetized. It hovers in the air, so to speak, accessible to everyone. @@ -1392,13 +1392,13 @@ The Web 2.0 environment was quite hospitable for the spread of the CC licenses. While technology and economics have been driving forces in shaping the new participatory platforms, much of their appeal has been frankly cultural. Amateur content on the Net may be raw and irregular, but it also tends to be more interesting and authentic than the highly produced, homogenized fare of commercial media. Some of it vastly outshines the lowest common denominator of mass media. Again, the cheap connectivity of the Internet has been key. It has made it possible for people with incredibly specialized interests to find one another and organize themselves into niche communities. For closeted homosexuals in repressive countries or isolated fans of the actor Wallace Beery, the Internet has enabled them to find one another and mutually feed their narrow interests. You name it, there are sites for it: the fans of obscure musicians, the collectors of beer cans, Iranian exiles, kite flyers. Freed of the economic imperative of attracting huge audiences with broad fare, niche-driven Internet content is able to connect with people’s personal passions and interests: a powerful foundation not just for social communities, but for durable markets. ={Internet:communication system, as+1} -This, truly, is one of the more profound effects of networking technologies: the subversion of the “blockbuster” economics of the mass media. It is becoming harder and more expensive for film studios and broadcast networks to amass the huge, cross-demographic audiences that they once could. In the networked environment, it turns out that a diversified set of niche markets can be eminently profitable with lower-volume sales. While Centralized Media require a supply-side “push” of content, the Internet enables a demand-side “pull” of content by users. This radically reduces transaction costs and enhances the economic appeal of niche production. It is easier and cheaper for a company (or single creator) to “pull” niche audiences through word of mouth than it is to pay for expensive “push” advertising campaigns. Specialty interests and products that once were dismissed as too marginal or idiosyncratic to be profitable can now flourish in small but robust “pull markets.”~{ David Bollier, “When Push Comes to Pull: The New Economy and Culture of Networking Technology” (Washington, DC: Aspen Institute, 2006), at http://www.aspeninstitute.org/atf/cf/%7BDEB6F227-659B-4EC8-8F84-8 DF23CA704F5%7D/2005InfoTechText.pdf. }~ +This, truly, is one of the more profound effects of networking technologies: the subversion of the “blockbuster” economics of the mass media. It is becoming harder and more expensive for film studios and broadcast networks to amass the huge, cross-demographic audiences that they once could. In the networked environment, it turns out that a diversified set of niche markets can be eminently profitable with lower-volume sales. While Centralized Media require a supply-side “push” of content, the Internet enables a demand-side “pull” of content by users. This radically reduces transaction costs and enhances the economic appeal of niche production. It is easier and cheaper for a company (or single creator) to “pull” niche audiences through word of mouth than it is to pay for expensive “push” advertising campaigns. Specialty interests and products that once were dismissed as too marginal or idiosyncratic to be profitable can now flourish in small but robust “pull markets.”~{ David Bollier, “When Push Comes to Pull: The New Economy and Culture of Networking Technology” (Washington, DC: Aspen Institute, 2006), at http://www.aspeninstitute.org/atf/cf/%7BDEB6F227-659B-4EC8-8F84-8DF23CA704F5%7D/2005InfoTechText.pdf. }~ ={Centralized Media:Internet vs.;Internet:Centralized Media vs.} The term associated with this phenomenon is the “Long Tail” — the title of a much-cited article by Chris Anderson in the October 2004 issue of /{Wired}/ magazine, later expanded into a book. Anderson explained the “grand transition” now under way: ={Anderson, Chris+2;Long Tail+3} -_1 For too long we’ve been suffering the tyranny of lowestcommon-denominator fare, subjected to brain-dead summer blockbusters and manufactured pop. Why? Economics. Many of our assumptions about popular taste are actually artifacts of poor supply-and-demand matching — a market response to inefficient distribution. . . . Hit-driven economics is a creation of an age without enough room to carry everything for everybody. Not enough shelf space for all the CDs, DVDs, and games produced. Not enough screens to show all the available movies. . . .~{ Chris Anderson, “The Long Tail,” /{Wired}/, October 2004, at http://www.wired .com/wired/archive/12.10/tail.html. }~ +_1 For too long we’ve been suffering the tyranny of lowestcommon-denominator fare, subjected to brain-dead summer blockbusters and manufactured pop. Why? Economics. Many of our assumptions about popular taste are actually artifacts of poor supply-and-demand matching — a market response to inefficient distribution. . . . Hit-driven economics is a creation of an age without enough room to carry everything for everybody. Not enough shelf space for all the CDs, DVDs, and games produced. Not enough screens to show all the available movies. . . .~{ Chris Anderson, “The Long Tail,” /{Wired}/, October 2004, at http://www.wired.com/wired/archive/12.10/tail.html. }~ The “Long Tail” refers to the huge potential markets that can be created for low-volume niche books, CD, DVDs, and other products. More than half of Amazon’s book sales, for example, come from books that rank below its top 130,000 titles. The implication is that “the market for books that are not even sold in the average bookstore is larger than the market for those that are,” writes Anderson. “In other words, the potential book market may be twice as big as it appears to be, if only we can get over the economics of scarcity.” ={Amazon} @@ -1463,11 +1463,11 @@ In January 2003, a month after the CC licenses were released, Doctorow published _1 Well, it’s a long story, but to shorten it up: first-time novelists have a tough row to hoe. Our publishers don’t have a lot of promotional budget to throw at unknown factors like us. Mostly, we rise and fall based on word-of-mouth. I’m not bad at word-of-mouth. I have a blog, Boing Boing (http://boingboingnet), where I do a /{lot}/ of word-ofmouthing. I compulsively tell friends and strangers about things I like. And telling people about stuff is /{way, way}/ easier if I can just send it to ’em. Way easier.~{ Cory Doctorow, “A Note About This Book,” February 12, 2004, and “A Note About This Book,” January 9, 2003, in /{Down and Out in the Magic Kingdom}/, available at http://www.craphound.com/down. }~ -A year later, Doctorow announced that his “grand experiment” was a success; in fact, he said, “my career is turning over like a goddamned locomotive engine.” More than thirty thousand people had downloaded the book within a day of its posting. He proceeded to release a collection of short stories and a second novel under a CC license. He also rereleased /{Down and Out in the Magic Kingdom}/ under a less restrictive CC license — an Attribution, NonCommercial, ShareAlike license (BY-NC-SA), which allows readers to make their own translations, radio and film adaptations, sequels, and other remixes of the novel, so long as they are made available on the same terms.~{ Anna Weinberg,“Buying the Cow, Though the Milk Is Free: Why Some Publishers are Digitizing Themselves,” June 24, 2005, /{Book Standard}/, June 24, 2005, available at http://www.thebookstandard.com/bookstandard/news/publisher/ article_display.jsp?vnu_content_id=1000968186.}~ +A year later, Doctorow announced that his “grand experiment” was a success; in fact, he said, “my career is turning over like a goddamned locomotive engine.” More than thirty thousand people had downloaded the book within a day of its posting. He proceeded to release a collection of short stories and a second novel under a CC license. He also rereleased /{Down and Out in the Magic Kingdom}/ under a less restrictive CC license — an Attribution, NonCommercial, ShareAlike license (BY-NC-SA), which allows readers to make their own translations, radio and film adaptations, sequels, and other remixes of the novel, so long as they are made available on the same terms.~{ Anna Weinberg,“Buying the Cow, Though the Milk Is Free: Why Some Publishers are Digitizing Themselves,” June 24, 2005, /{Book Standard}/, June 24, 2005, available at http://www.thebookstandard.com/bookstandard/news/publisher/article_display.jsp?vnu_content_id=1000968186. }~ With some sheepish candor, Doctorow conceded: “I wanted to see if the sky would fall: you see writers are routinely schooled by their peers that maximal copyright is the only thing that stands between us and penury, and so ingrained was this lesson in me that even though I had the intellectual intuition that a ‘some rights reserved’ regime would serve me well, I still couldn’t shake the atavistic fear that I was about to do something very foolish indeed.” -By June 2006, /{Down and Out in the Magic Kingdom}/ had been downloaded more than seven hundred thousand times. It had gone through six printings, many foreign translations, and two competing online audio adaptations made by fans. “Most people who download the book don’t end up buying it,” Doctorow conceded, “but they wouldn’t have bought it in any event, so I haven’t lost any sales. I’ve just won an audience. A tiny minority of downloaders treats the free e-book as a substitute for the printed book — those are the lost sales. But a much larger minority treats the e-book as an enticement to buy the printed book. They’re gained sales. As long as gained sales outnumber lost sales, I’m ahead of the game. After all, distributing nearly a million copies of my book has cost me nothing.”~{ Cory Doctorow, “Giving it Away,” Forbes.com, December 1, 2006, available at http://www.forbes.com/2006/11/30/cory-doctorow-copyright-tech-media _cz_cd_books06_1201doctorow.html. }~ In 2008, Doctorow’s marketing strategy of giving away online books to stimulate sales of physical books paid off in an even bigger way. His novel for teenagers, /{Little Brother}/, about a youthful hacker who takes on the U.S. government after it becomes a police state, spent weeks on the /{New York Times}/ bestseller list for children’s books. +By June 2006, /{Down and Out in the Magic Kingdom}/ had been downloaded more than seven hundred thousand times. It had gone through six printings, many foreign translations, and two competing online audio adaptations made by fans. “Most people who download the book don’t end up buying it,” Doctorow conceded, “but they wouldn’t have bought it in any event, so I haven’t lost any sales. I’ve just won an audience. A tiny minority of downloaders treats the free e-book as a substitute for the printed book — those are the lost sales. But a much larger minority treats the e-book as an enticement to buy the printed book. They’re gained sales. As long as gained sales outnumber lost sales, I’m ahead of the game. After all, distributing nearly a million copies of my book has cost me nothing.”~{ Cory Doctorow, “Giving it Away,” Forbes.com, December 1, 2006, available at http://www.forbes.com/2006/11/30/cory-doctorow-copyright-tech-media_cz_cd_books06_1201doctorow.html. }~ In 2008, Doctorow’s marketing strategy of giving away online books to stimulate sales of physical books paid off in an even bigger way. His novel for teenagers, /{Little Brother}/, about a youthful hacker who takes on the U.S. government after it becomes a police state, spent weeks on the /{New York Times}/ bestseller list for children’s books. It is perhaps easier for a sci-fi futurist like Doctorow than a publishing business to take such a wild leap into the unknown. But that, too, is an important insight: artists are more likely to lead the way into the sharing economy than entrenched industries. “I’d rather stake my future on a literature that people care about enough to steal,” said Doctorow, “than devote my life to a form that has no home in the dominant medium of the century.” Book lovers and authors will pioneer the future; corporate publishing will grudgingly follow, or be left behind. @@ -1485,10 +1485,10 @@ Free culture publishing models are popping up in many unusual quarters these day Founder Hugh McGuire said the inspiration for LibriVox was a distributed recording of Lessig’s book /{Free Culture}/ read by bloggers and podcasters, chapter by chapter. “After listening to that, it took me a while to figure out how to record things on my computer (which I finally did, thanks to free software Audacity). Brewster Kahle’s call for ‘Universal Access to all human knowledge’ was another inspiration, and the free hosting provided by archive.org and ibiblio.org meant that LibriVox was possible: there was no worry about bandwidth and storage. So the project was started with an investment of $0, which continues to be our global budget.” LibriVox’s mission, said McGuire, is the “acoustical liberation of books in the public domain.” ={Kahle, Brewster;LibriVox;McGuire, Hugh;bloging} -Several publishing businesses now revolve around CC licenses. Wikitravel is a collaborative Web site that amasses content about cities and regions around the world; content is licensed under the CC Attribution, ShareAlike license (BY-SA).~{ “Wikitravel Press launches,” Creative Commons blog, August 3, 2007, at http://creativecommons.org/weblog/entry/7596. See also Mia Garlick, “Wikitravel,” Creative Commons blog, June 20, 2006, at http://creativecom mons.org/text/wikitravel. }~ In 2007, its founder joined with a travel writer to start Wikitravel Press, which now publishes travel books in a number of languages. Like the Wikitravel Web pages, the text in the books can be freely copied and reused. +Several publishing businesses now revolve around CC licenses. Wikitravel is a collaborative Web site that amasses content about cities and regions around the world; content is licensed under the CC Attribution, ShareAlike license (BY-SA).~{ “Wikitravel Press launches,” Creative Commons blog, August 3, 2007, at http://creativecommons.org/weblog/entry/7596. See also Mia Garlick, “Wikitravel,” Creative Commons blog, June 20, 2006, at http://creativecommons.org/text/wikitravel. }~ In 2007, its founder joined with a travel writer to start Wikitravel Press, which now publishes travel books in a number of languages. Like the Wikitravel Web pages, the text in the books can be freely copied and reused. ={Wikitravel Press} -Another new business using CC licenses is Lulu, a technology company started by Robert Young, the founder of the Linux vendor Red Hat and benefactor for the Center for the Public Domain.Lulu lets individuals publish and distribute their own books, which can be printed on demand or downloaded. Lulu handles all the details of the publishing process but lets people control their content and rights. Hundreds of people have licensed their works under the CC ShareAlike license and Public Domain Dedication, and under the GNU Project’s Free Documentation License.~{ Mia Garlick, “Lulu,” Creative Commons blog, May 17, 2006, at http://creative commons.org/text/lulu. }~ +Another new business using CC licenses is Lulu, a technology company started by Robert Young, the founder of the Linux vendor Red Hat and benefactor for the Center for the Public Domain.Lulu lets individuals publish and distribute their own books, which can be printed on demand or downloaded. Lulu handles all the details of the publishing process but lets people control their content and rights. Hundreds of people have licensed their works under the CC ShareAlike license and Public Domain Dedication, and under the GNU Project’s Free Documentation License.~{ Mia Garlick, “Lulu,” Creative Commons blog, May 17, 2006, at http://creativecommons.org/text/lulu. }~ ={Lulu;Red Hat;Young, Robert;Center for the Public Domain;GNU Project:GNU FDL;public domain:Center for Public Domain} As more of culture and commerce move to the Internet, the question facing the book industry now is whether the text of a book is more valuable as a physical object (a codex) or as a digital file (intangible bits that can circulate freely), or some combination of the two. Kevin Kelly, the former editor of /{Wired}/ magazine, once explained: “In a regime of superabundant free copies, copies lose value. They are no longer the basis of wealth. Now relationships, links, connection and sharing are. Value has shifted away from a copy toward the many ways to recall, annotate, personalize, edit, authenticate, display, mark, transfer and engage a work.”~{ Kevin Kelly, “Scan This Book!” /{New York Times Magazine}/, May 14, 2006, p. 43. }~ @@ -1499,7 +1499,7 @@ What this means in practice, Kelly has pointed out, is that books become more va Needless to say, most book publishers and authors’ organizations are not yet prepared to embrace this newfangled value proposition. It seems way too iffy. A “sharing” business model would seemingly cannibalize their current revenues and copyright control with little guarantee of doing better in an open, online milieu. The bigger problem may be the cultural prejudice that an absolute right of control over any possible uses of a book is the best way to make money. ={open business models} -In general, the publishing trade remains skeptical of the Internet, clueless about how to harness its marketing power, and strangers to CC licenses. And it could be years before mainstream publishing accepts some of the counterintuitive notions that special-interest Internet communities will drive publishing in the future. In a presentation that caused a stir in the book industry, futurist Mike Shatzkin said in May 2007 that this is already happening in general trade publishing: “We’re close to a tipping point, or maybe we’re past it . . . where Web-based branding will have more credibility than print, because print, needing more horizontal reach to be viable, won’t deliver the attention of the real experts and megaphones in each field.”~{ Mike Shatzkin, “The End of General Trade Publishing Houses: Death or Rebirth in a Niche-by-Niche World,” presented to the Book Expo America, New York, May 31, 2007, available at http://www.idealog.com/speeches/ endoftrade.htm. }~ +In general, the publishing trade remains skeptical of the Internet, clueless about how to harness its marketing power, and strangers to CC licenses. And it could be years before mainstream publishing accepts some of the counterintuitive notions that special-interest Internet communities will drive publishing in the future. In a presentation that caused a stir in the book industry, futurist Mike Shatzkin said in May 2007 that this is already happening in general trade publishing: “We’re close to a tipping point, or maybe we’re past it . . . where Web-based branding will have more credibility than print, because print, needing more horizontal reach to be viable, won’t deliver the attention of the real experts and megaphones in each field.”~{ Mike Shatzkin, “The End of General Trade Publishing Houses: Death or Rebirth in a Niche-by-Niche World,” presented to the Book Expo America, New York, May 31, 2007, available at http://www.idealog.com/speeches/endoftrade.htm. }~ ={Shatzkin, Mike} 2~ DIY Videos and Film @@ -1528,7 +1528,7 @@ One of the more daring experiments in film production is being pioneered by the Ton Roosendaal, who directs the Blender Institute, is trying to demonstrate that a small studio can develop a virtuous cycle of economically sustainable creativity using open-source software, Creative Commons licenses, and talented programmers and artists from around the world. “We give programmers the freedom to do their best, and what they want to do is improve the technology,” he said. “The market is too hyper-rational and nailed down and filled with limits,” he argues, referring to his peers at major animation studios. “Open source is free of most of these constraints.”~{ Ton Roosendaal remarks at conference, “Economies of the Commons,” De Balie Centre for Culture and Politics, Amsterdam, April 10–12, 2008. }~ ={Roosendaal, Ton} -In April 2008, the Blender Institute released a ten-minute animated short, /{Big Buck Bunny}/, which features a kind-hearted, fat white bunny who endures the abuse of three stone-throwing rodents until they smash a beautiful butterfly with a rock — at which point the bunny rallies to teach the bullies a lesson.~{ The film can be downloaded at http://www.bigbuckbunny.org/index.php/ download. }~ The film uses cutting-edge computer-generated animation techniques that rival anything produced by Pixar, the Hollywood studio responsible for /{Toy Story}/, /{Cars}/, and /{Ratatouille}/. /{Big Buck Bunny}/ is licensed under a CC Attribution license, which means the digital content can be used by anyone for any purpose so long as credit is given to the Blender Institute. +In April 2008, the Blender Institute released a ten-minute animated short, /{Big Buck Bunny}/, which features a kind-hearted, fat white bunny who endures the abuse of three stone-throwing rodents until they smash a beautiful butterfly with a rock — at which point the bunny rallies to teach the bullies a lesson.~{ The film can be downloaded at http://www.bigbuckbunny.org/index.php/download. }~ The film uses cutting-edge computer-generated animation techniques that rival anything produced by Pixar, the Hollywood studio responsible for /{Toy Story}/, /{Cars}/, and /{Ratatouille}/. /{Big Buck Bunny}/ is licensed under a CC Attribution license, which means the digital content can be used by anyone for any purpose so long as credit is given to the Blender Institute. ={Big Buck Bunny (animated short)+1} /{Big Buck Bunny}/ was initially distributed to upfront investors as a DVD set that includes extras such as interviews, outtakes, deleted scenes, and the entire database used in making the film. Then, to pique wider interest in sales of the DVD set, priced at thirty-four euros, a trailer was released on the Internet. This resulted in extensive international press coverage and blog exposure. Early signs are promising that Blender will be able to continue to make highquality animation on a fairly modest budget without worries about illegal downloads or a digital rights management system. The Blender production model also has the virtue of enabling access to top creative talent and cutting-edge animation technologies as well as efficient distribution to paying audiences on a global scale. @@ -1548,7 +1548,7 @@ Media reform activist Harold Feld offers a succinct overview of why creativity i _1 The 1990s saw a number of factors that allowed the major labels to push out independents and dominate the market with their own outrageously priced and poorly produced products: consolidation in the music industry, the whole “studio system” of pumping a few big stars to the exclusion of others, the consolidation in music outlets from mom-andpop record stores to chains like Tower Records and retail giants like Wal-Mart that exclude indies and push the recordings promoted by major labels, and the consolidation of radio — which further killed indie exposure and allowed the labels to artificially pump their selected “hits” through payola. All this created a cozy cartel that enjoyed monopoly profits. ={music:music industry+1} -_1 As a result, the major labels, the mainstream retailers, and the radio broadcasters grew increasingly out of touch with what listeners actually wanted. But as long as the music cartel controlled what the vast majority of people got to hear, it didn’t matter . . . The music cartel remained the de facto only game in town.~{ Harold Feld, “CD Sales Dead? Not for Indies!” blog post on Public Knowledge Web site, March 27, 2007, at http://www.publicknowledge.org/node/ 890. }~ +_1 As a result, the major labels, the mainstream retailers, and the radio broadcasters grew increasingly out of touch with what listeners actually wanted. But as long as the music cartel controlled what the vast majority of people got to hear, it didn’t matter . . . The music cartel remained the de facto only game in town.~{ Harold Feld, “CD Sales Dead? Not for Indies!” blog post on Public Knowledge Web site, March 27, 2007, at http://www.publicknowledge.org/node/890. }~ Changing the music industry is obviously a major challenge that is not going to be solved overnight. Still, there is a growing effort led by indie musicians, small record labels, Internet music entrepreneurs, and advocacy groups such as the Future of Music Coalition to address these problems. Creative Commons is clearly sympathetic, but has largely focused on a more modest agenda — enabling a new universe of shareable music to arise. Its chief tools for this mission, beyond the CC licenses, are new software platforms for legal music remixes, online commons that legally share music, and new business models that respect the interests of both fans and artists. Ultimately, it is hoped that a global oeuvre of shareable music will emerge. Once this body of music matures, attracting more artists and fans in a self-sustaining viral spiral, the record industry may be forced to give up its dreams of perfect control of how music may circulate and adopt fan-friendly business practices. ={Future of Music Coalition} @@ -1556,7 +1556,7 @@ Changing the music industry is obviously a major challenge that is not going to This, at least, is the theory, as Lessig explains it. He calls it the “BMI strategy,” a reference to the strategy that broadcasters and musicians used to fight ASCAP’s monopoly control over radio music in the early 1940s. ASCAP, the American Society of Composers, Authors and Publishers, is a nonprofit organization that collects royalties for musical performances. At the time, ASCAP required artists to have five hits before it would serve as a collection agency for them, a rule that privileged the playing of pop music on the radio at the expense of rhythm and blues, jazz, hillbilly, and ethnic music. Then, over the course of eight years, ASCAP raised its rates by 450 percent between 1931 and 1939 — at which point, ASCAP then proposed /{doubling}/ its rates for 1940. In protest, many radio stations refused to play ASCAP-licensed music. They formed a new performance-rights body, BMI, or Broadcast Music, Inc., which sought to break the ASCAP monopoly by offering free arrangements of public-domain music to radio stations. They also charged lower rates than ASCAP for licensing music and offered better contracts for artists.~{ Donald Clarke, /{The Rise and Fall of Popular Music}/, chapter 11. }~ ={ASCAP+1;BMI (Broadcast Music, Inc.)+3;music:ASCAP+l;Lessig, Lawrence:CC licenses, and+2|music, and+2} -“The Internet is today’s broadcasters,” said Lessig in a 2006 speech. “They are facing the same struggle.”~{ Lessig explained his BMI strategy at a speech, “On Free, and the Differences Between Culture and Code,” at the 23d Chaos Communications Conference (23C3) in Berlin, Germany, December 30, 2006; video can be watched at http://video.google.com/videoplay?docid=7661663613180520595&q= 23c3. }~ Just as ASCAP used its monopoly power to control what music could be heard and at what prices, he said, so today’s media corporations want to leverage their control over content to gain control of the business models and technologies of digital environments. When Google bought YouTube, one-third of the purchase price of $1.65 billion was allegedly a financial reserve to deal with any copyright litigation, said Lessig. This is how the incumbent media world is trying to stifle the emergence of free culture. +“The Internet is today’s broadcasters,” said Lessig in a 2006 speech. “They are facing the same struggle.”~{ Lessig explained his BMI strategy at a speech, “On Free, and the Differences Between Culture and Code,” at the 23d Chaos Communications Conference (23C3) in Berlin, Germany, December 30, 2006; video can be watched at http://video.google.com/videoplay?docid=7661663613180520595&q=23c3. }~ Just as ASCAP used its monopoly power to control what music could be heard and at what prices, he said, so today’s media corporations want to leverage their control over content to gain control of the business models and technologies of digital environments. When Google bought YouTube, one-third of the purchase price of $1.65 billion was allegedly a financial reserve to deal with any copyright litigation, said Lessig. This is how the incumbent media world is trying to stifle the emergence of free culture. ={Google;YouTube} The same questions that once confronted broadcasters are now facing Internet innovators, Lessig argues: “How do we free the future from the dead hand of the past? What do we do to make it so they can’t control how technology evolves?” With copyright terms lasting so long, it is not really feasible to try to use public-domain materials to compete with a commercial cartel. Lessig’s answer is a BMI-inspired solution that uses the CC licenses to create a new body of “free” works that, over time, can begin to compete with popular works. The legendary record producer Jerry Wexler recalled how ASCAP marginalized R & B, country, folk, and ethnic music, but “once the lid was lifted — which happened when BMI entered the picture — the vacuum was filled by all these archetypal musics. BMI turned out to be the mechanism that released all those primal American forms of music that fused and became rock-androll.”~{ From BMI, Inc., Web site, at http://www.bmi.com/genres/entry/533380. }~ Lessig clearly has similar ambitions for Creative Commons. @@ -1568,7 +1568,7 @@ For now, the subculture of CC-licensed music remains something of a fringe movem Creative Commons’s primary task is practical — to help musicians reach audiences directly and reap more of the financial rewards of their music. So far, a wide range of indie bands, hip-hop artists, and bohemian experimentalists of all stripes have used the licenses. One of the most popular is the Attribution, NonCommercial license, which lets artists share their works while getting credit and retaining commercial rights. A number of marquee songwriters and performers — David Byrne, Gilberto Gil, the Beastie Boys, Chuck D — have also used CC licenses as a gesture of solidarity with free culture artists and as an enlightened marketing strategy. Inviting people to remix your songs is a great way to engage your fan base and sell more records. And tagging your music with a CC license, at least for now, wraps an artist in a mantle of tech sophistication and artistic integrity. ={Beastie Boys;Byrne, David;Chuck D;Gil, Gilberto} -Guitarist Jake Shapiro was one of the first musicians to show the marketing potential of unleashing free music on the Internet. In 1995, Shapiro put MP3 files of music by his band, Two Ton Shoe, on the group’s Web site. Within a few years, Two Ton Shoe was one of the most-downloaded bands on the Internet, developing fan bases in Italy, Brazil, Russia, and South Korea. One day Shapiro received a phone call out of the blue from a South Korean concert promoter. He wanted to know if the band would fly over to Seoul to perform four concerts. It turned out that fans in South Korea, where fast broadband connections are the norm, had discovered Two Ton Shoe through file sharing. A local CD retailer kept getting requests for the band’s music, which led him to contact a concert promoter. In August 2005, Shapiro and his buddies arrived in Seoul as conquering rock stars, selling out all four of their concerts. “The kids who showed up knew all the words to the songs,” Shapiro recalled. A year later, the band signed a deal to distribute a double CD to East Asia.~{ Shapiro described his experiences at the “Identity Mashup Conference,” June 19–21, 2006, hosted by the Berkman Center for Internet and Society at Harvard Law School, at http://blogs.law.harvard.edu/mediaberkman/2006/ 06/28/id-mashup-2006-day-two-the-commons-open-apis-meshups-andmashups. His band’s Web site is at http://www.twotonshoe.com. }~ +Guitarist Jake Shapiro was one of the first musicians to show the marketing potential of unleashing free music on the Internet. In 1995, Shapiro put MP3 files of music by his band, Two Ton Shoe, on the group’s Web site. Within a few years, Two Ton Shoe was one of the most-downloaded bands on the Internet, developing fan bases in Italy, Brazil, Russia, and South Korea. One day Shapiro received a phone call out of the blue from a South Korean concert promoter. He wanted to know if the band would fly over to Seoul to perform four concerts. It turned out that fans in South Korea, where fast broadband connections are the norm, had discovered Two Ton Shoe through file sharing. A local CD retailer kept getting requests for the band’s music, which led him to contact a concert promoter. In August 2005, Shapiro and his buddies arrived in Seoul as conquering rock stars, selling out all four of their concerts. “The kids who showed up knew all the words to the songs,” Shapiro recalled. A year later, the band signed a deal to distribute a double CD to East Asia.~{ Shapiro described his experiences at the “Identity Mashup Conference,” June 19–21, 2006, hosted by the Berkman Center for Internet and Society at Harvard Law School, at http://blogs.law.harvard.edu/mediaberkman/2006/06/28/id-mashup-2006-day-two-the-commons-open-apis-meshups-and-mashups. His band’s Web site is at http://www.twotonshoe.com. }~ ={Shapiro, Jake;Two Ton Shoe} While such stories of viral marketing success are not common, neither are they rare. Lots of bands now promote themselves, and find admiring (paying) fans, by posting their music, for free, on Web sites and file-sharing sites. Perhaps the most scrutinized example was Radiohead’s decision to release its album /{In Rainbows}/ for free online, while inviting fans to pay whatever they wanted. (The band did not release any numbers, but considered the move a success. They later released the album through conventional distribution channels as well.)~{ Jon Pareles, “Pay What You Want for This Article,” /{New York Times}/, December 9, 2007. }~ @@ -1580,7 +1580,7 @@ Just as previous generations of fans came together around FM radio or live perfo It is also why the Creative Commons licenses have acquired such cachet. They have come to be associated with musicians who honor the integrity of music making. They symbolize the collective nature of creativity and the importance of communing freely with one’s fans. Nimrod Lev, a prominent Israeli musician and supporter of the CC licenses, received considerable press coverage in his country for a speech that lamented the “cunning arrangement” (in Israeli slang, /{combina}/) by which the music industry has betrayed people’s love of music, making it “only a matter of business and commerce.” Said Lev: ={music:music industry+1;Lev, Nimrod+2} -_1 The music industry treats its consumer as a consumer of sex, not of love, the love of music. Just like everything else: a vacuum without values or meaning. But it is still love that everyone wants and seeks. . . . The music vendors knew then [a generation ago] what they have forgotten today, namely that we must have cultural heroes: artists that are not cloned in a manner out to get our money. There was an added value with a meaning: someone who spoke to our hearts in difficult moments, and with that someone, we would walk hand in hand for a while. We had loyalty and love, and it all meant something.~{ Nimrod Lev, “The Combina Industry,” November 16, 2004, at http://law .haifa.ac.il/techlaw/new/try/eng/nimrod.htm. }~ +_1 The music industry treats its consumer as a consumer of sex, not of love, the love of music. Just like everything else: a vacuum without values or meaning. But it is still love that everyone wants and seeks. . . . The music vendors knew then [a generation ago] what they have forgotten today, namely that we must have cultural heroes: artists that are not cloned in a manner out to get our money. There was an added value with a meaning: someone who spoke to our hearts in difficult moments, and with that someone, we would walk hand in hand for a while. We had loyalty and love, and it all meant something.~{ Nimrod Lev, “The Combina Industry,” November 16, 2004, at http://law.haifa.ac.il/techlaw/new/try/eng/nimrod.htm. }~ At the risk of sounding naïve, Lev said he wanted to stand up for the importance of “authenticity and empathy and my own truth” in making music. It is a complaint that echoes throughout the artistic community globally. A few years ago, Patti Smith, the punk rocker renowned for her artistic integrity, decried the “loss of our cultural voice” as the radio industry consolidated and as music television became a dominant force. She grieved for the scarcity of places for her to “feel connected” to a larger musical community of artists and fans.~{ Patti Smith at a panel at the National Conference for Media Reform, St. Louis, sponsored by Free Press, May 14, 2005. }~ ={Smith, Patti} @@ -1608,7 +1608,7 @@ The impetus for a solution to the sampling problem started with Negativland, an As an experienced sampler of music, Negativland and collagist People Like Us (aka Vicki Bennett) asked Creative Commons if it would develop and offer a music sampling license. Don Joyce of Negativland explained: ={Joyce, Don} -_1 This would be legally acknowledging the now obvious state of modern audio/visual creativity in which quoting, sampling, direct referencing, copying and collaging have become a major part of modern inspiration. [A sampling option would] stop legally suppressing it and start culturally encouraging it — because it’s here to stay. That’s our idea for encouraging a more democratic media for all of us, from corporations to the individual.~{ Glenn Otis Brown, “Mmm . . . Free Samples (Innovation la),” Creative Commons blog, March 11, 2003, at http://creativecommons.org/weblog/entry/ 3631. }~ +_1 This would be legally acknowledging the now obvious state of modern audio/visual creativity in which quoting, sampling, direct referencing, copying and collaging have become a major part of modern inspiration. [A sampling option would] stop legally suppressing it and start culturally encouraging it — because it’s here to stay. That’s our idea for encouraging a more democratic media for all of us, from corporations to the individual.~{ Glenn Otis Brown, “Mmm . . . Free Samples (Innovation la),” Creative Commons blog, March 11, 2003, at http://creativecommons.org/weblog/entry/3631. }~ With legal help from Cooley Godward Kronish and Wilson, Sonsini, Goodrich & Rosati, Creative Commons did just that. During its consultations with the remix community, Creative Commons learned that Gilberto Gil, the renowned /{tropicalismo}/ musician and at the time the Brazilian minister of culture, had been thinking along similar lines, and so it received valuable suggestions and support from him. ={Cooley Godward Kronish;Wilson, Sonsini, Goodrich & Rosati;Gil, Gilberto} @@ -1618,26 +1618,26 @@ In 2005, Creative Commons issued the Sampling license as a way to let people tak The CC Sampling license only whetted the imagination of people who wanted to find new ways to sample, share, and transform music. Neeru Paharia, then the assistant director of the Creative Commons, came up with the idea of developing ccMixter, a software platform for remixing music on the Web.~{ See http://wiki.creativecommons.org/ccMixter. Interview with Mike Linksvayer, February 7, 2007, and Neeru Paharia, April 13, 2007. }~ Paharia realized one day that “this whole remixing and sharing ecology is about getting feedback on who’s using your work and how it’s evolving. That’s almost half the pleasure.”~{ Interview with Neeru Paharia, April 13, 2007. }~ So the organization developed a Web site that would allow people to upload music that could be sampled and remixed. The site has about five thousand registered users, which is not terribly large, but it is an enthusiastic and active community of remix artists that acts as a great proof of concept while promoting the CC licenses. There are other, much larger remix sites on the Internet, such as Sony’s ACIDplanet, but such sites are faux commons. They retain ownership in the sounds and remixes that users make, and no derivative or commercial versions are allowed. ={Paharia, Neeru} -One feature of viral spirals is their propensity to call forth a jumble of new projects and unexpected partners. The CC licenses have done just that for music. ccMixter has joined with Opsound to offer a joint “sound pool” of clips licensed under an Attribution ShareAlike license. It also supports Freesound, a repository of more than twenty thousand CC-licensed samples ranging from waterfalls to crickets to music.~{ Neeru Paharia, “Opsound’s Sal Randolph,” Creative Commons blog, October 1, 2005, at http://creativecommons.org/audio/opsound; Mike Linksvayer, “Freesound,” Creative Commons blog, October 1, 2005, at http://creative commons.org/audio/freesound; Matt Haughey, “Free Online Music Booms as SoundClick Offers Creative Commons Licenses,” Creative Commons blog, August 11, 2004. }~ +One feature of viral spirals is their propensity to call forth a jumble of new projects and unexpected partners. The CC licenses have done just that for music. ccMixter has joined with Opsound to offer a joint “sound pool” of clips licensed under an Attribution ShareAlike license. It also supports Freesound, a repository of more than twenty thousand CC-licensed samples ranging from waterfalls to crickets to music.~{ Neeru Paharia, “Opsound’s Sal Randolph,” Creative Commons blog, October 1, 2005, at http://creativecommons.org/audio/opsound; Mike Linksvayer, “Freesound,” Creative Commons blog, October 1, 2005, at http://creativecommons.org/audio/freesound; Matt Haughey, “Free Online Music Booms as SoundClick Offers Creative Commons Licenses,” Creative Commons blog, August 11, 2004. }~ Runoff Records, Inc., a record label, discovered a remix artist who teaches physics and calculus and goes by the name of Minus Kelvin. Runoff heard a podcast of Kelvin’s CC-licensed music, and signed him up, along with another ccMixter contributor, to do music for three seasons of the television show /{America’s Next Top Model}/.~{ Neeru Paharia, “Minus Kelvin Discovered on ccMixter,” Creative Commons blog, May 17, 2005, at http://creativecommons.org/weblog/archive/2005/5. }~ A few months later, two ccMixter fans based in Poland and Holland started an online record label, DiSfish, that gives 5 percent of all sale proceeds to CC, another 5 percent to charity, with the remainder split between the label and the artist. All music on the label is licensed under CC.~{ Cezary Ostrowski from Poland and Marco Raaphorst from Holland met online at ccMixter and decided to go into business together. They started an online label called DiSfish. }~ -The CC licenses are not just the province of daring remix artists and other experimentalists. Disappointed by its CD sales through traditional channels, the Philharmonia Baroque Orchestra released its performance of Handel’s 1736 opera, /{Atalanta}/, exclusively through the online record label Magnatune, using a CC license. Conductor Nicholas McGegan said the Internet “has potentially given the industry a tremendous shot in the arm,” letting orchestras reach “new audiences, including ones that are unlikely to hear you in person.”~{ Mia Garlick, “Classical Music Goes Digital (& CC),” May 3, 2006, at http://creativecommons.org/weblog/entry/5883. }~ A company that specializes in Catalan music collaborated with the Catalonian government to release two CDs full of CC-licensed music.~{ The Enderrock Group, a company that specializes in Catalan music and publishes three popular music magazines, released the two CDs, /{Música Lliure and Música Lliure II}/, free within the page of its magazines. See Margot Kaminski, “Enderrock,” Creative Commons Web site, January 17, 2007, at http://cre ativecommons.org/audio/enderrock. }~ A group of Gamelan musicians from central Java who perform in North Carolina decided to release their recordings under a CC license.~{ The group, Gamelan Nyai Saraswait, was blogged about by Matt Haughey on February 1, 2003, at http://creativecommons.org/weblog/entry/3599. }~ +The CC licenses are not just the province of daring remix artists and other experimentalists. Disappointed by its CD sales through traditional channels, the Philharmonia Baroque Orchestra released its performance of Handel’s 1736 opera, /{Atalanta}/, exclusively through the online record label Magnatune, using a CC license. Conductor Nicholas McGegan said the Internet “has potentially given the industry a tremendous shot in the arm,” letting orchestras reach “new audiences, including ones that are unlikely to hear you in person.”~{ Mia Garlick, “Classical Music Goes Digital (& CC),” May 3, 2006, at http://creativecommons.org/weblog/entry/5883. }~ A company that specializes in Catalan music collaborated with the Catalonian government to release two CDs full of CC-licensed music.~{ The Enderrock Group, a company that specializes in Catalan music and publishes three popular music magazines, released the two CDs, /{Música Lliure and Música Lliure II}/, free within the page of its magazines. See Margot Kaminski, “Enderrock,” Creative Commons Web site, January 17, 2007, at http://creativecommons.org/audio/enderrock. }~ A group of Gamelan musicians from central Java who perform in North Carolina decided to release their recordings under a CC license.~{ The group, Gamelan Nyai Saraswait, was blogged about by Matt Haughey on February 1, 2003, at http://creativecommons.org/weblog/entry/3599. }~ ={McGegan, Nicholas} Big-name artists have gotten into the licenses as well. DJ Vadim created a splash when he released all the original solo, individual instrumental, and a cappella studio tracks of his album /{The Sound Catcher}/ under an Attribution, NonCommercial license, so that remixers could have at it.~{ Victor Stone, “DJ Vadim Releases Album Tracks Under CC,” August 20, 2007, at http://creativecommons.org/weblog/entry/7619. }~ In 2004, /{Wired}/ magazine released a CD with sixteen tracks by the likes of David Byrne, Gilberto Gil, and the Beastie Boys. “By contributing a track to /{The Wired CD}/., these musicians acknowledge that for an art form to thrive, it needs to be open, fluid and alive,” wrote /{Wired}/. “These artists — and soon, perhaps, many more like them — would rather have people share their work than steal it.”~{ Thomas Goetz, “Sample the Future,” /{Wired}/, November 2004, pp. 181–83. }~ ={Byrne, David;Gil, Gilberto+1;DJ Vadim;Beastie Boys} -Soon thereafter, Byrne and Gil went so far as to host a gala benefit concert for Creative Commons in New York City. In a fitting fusion of styles, Gil sang a Brazilian arrangement of Cole Porter’s cowboy song, “Don’t Fence Me In.” The crowd of 1,500 was high on the transcultural symbolism, said Glenn Brown: “Musical superstars from North and South, jamming together, building earlier works into new creations, in real time. Lawyers on the sidelines and in the audience, where they belong. The big Creative Commons logo smiling overhead.”~{ Glenn Otis Brown, “WIRED Concert and CD: A Study in Collaboration,” September 24, 2004, available at http://creativecommons.org/weblog/entry/ 4415. }~ The description captures the CC enterprise to a fault: the fusion of some clap-your-hands populism and hardheaded legal tools, inflected with an idealistic call to action to build a better world. +Soon thereafter, Byrne and Gil went so far as to host a gala benefit concert for Creative Commons in New York City. In a fitting fusion of styles, Gil sang a Brazilian arrangement of Cole Porter’s cowboy song, “Don’t Fence Me In.” The crowd of 1,500 was high on the transcultural symbolism, said Glenn Brown: “Musical superstars from North and South, jamming together, building earlier works into new creations, in real time. Lawyers on the sidelines and in the audience, where they belong. The big Creative Commons logo smiling overhead.”~{ Glenn Otis Brown, “WIRED Concert and CD: A Study in Collaboration,” September 24, 2004, available at http://creativecommons.org/weblog/entry/4415. }~ The description captures the CC enterprise to a fault: the fusion of some clap-your-hands populism and hardheaded legal tools, inflected with an idealistic call to action to build a better world. ={Brown, Glenn Otis;Porter, Cole} -By 2008 the power of open networks had persuaded the major record labels to abandon digital rights management of music CDs, and more major artists were beginning to venture forth with their own direct distribution plans, bypassing the standard record label deals. Prince, Madonna, and others found it more lucrative to run their own business affairs and deal with concert venues and merchandisers. In a major experiment that suggests a new business model for major music acts, Nine Inch Nails released its album /{Ghosts I-IV}/ under a Creative Commons NonCommercial ShareAlike license, and posted audio files of the album on its official Web site, inviting free downloads. It did not do advertising or promotion. Despite the free distribution — or because of it — the group made money by selling 2,500 copies of an “Ultra-Deluxe Limited Edition” of the album for $300; the edition sold out in less than three days. There were also nonlimited sales of a “deluxe edition” for $75 and a $10 CD. The scheme showed how free access to the music can be used to drive sales for something that remains scarce, such as a “special edition” CD or a live performance. One week after the album’s release, the Nine Inch Nails’ Web site reported that the group had made over $1.6 million from over 750,000 purchase and download transactions. Considering that an artist generally makes only $1.60 on the sale of a $15.99 CD, Nine Inch Nails made a great deal more money from a “free” album distribution than it otherwise would have made through a standard record deal.~{ See, e.g., Wikipedia entry, “Ghosts I-IV,” at http://en.wikipedia.org/wiki/ Ghosts_I-IV. }~ +By 2008 the power of open networks had persuaded the major record labels to abandon digital rights management of music CDs, and more major artists were beginning to venture forth with their own direct distribution plans, bypassing the standard record label deals. Prince, Madonna, and others found it more lucrative to run their own business affairs and deal with concert venues and merchandisers. In a major experiment that suggests a new business model for major music acts, Nine Inch Nails released its album /{Ghosts I-IV}/ under a Creative Commons NonCommercial ShareAlike license, and posted audio files of the album on its official Web site, inviting free downloads. It did not do advertising or promotion. Despite the free distribution — or because of it — the group made money by selling 2,500 copies of an “Ultra-Deluxe Limited Edition” of the album for $300; the edition sold out in less than three days. There were also nonlimited sales of a “deluxe edition” for $75 and a $10 CD. The scheme showed how free access to the music can be used to drive sales for something that remains scarce, such as a “special edition” CD or a live performance. One week after the album’s release, the Nine Inch Nails’ Web site reported that the group had made over $1.6 million from over 750,000 purchase and download transactions. Considering that an artist generally makes only $1.60 on the sale of a $15.99 CD, Nine Inch Nails made a great deal more money from a “free” album distribution than it otherwise would have made through a standard record deal.~{ See, e.g., Wikipedia entry, “Ghosts I-IV,” at http://en.wikipedia.org/wiki/Ghosts_I-IV. }~ ={Nine Inch Nails} It is too early to know if Lessig’s “BMI strategy” will in fact catalyze a structural transformation in the entertainment industries. But Lessig apparently feels that it is the only feasible strategy. As he said in a 2006 speech, intensified hacking to break systems of proprietary control will not work; new campaigns to win progressive legislation won’t succeed within the next twenty years; and litigation is “a long-term losing strategy,” as the /{Eldred}/ case demonstrated. For Lessig and much of the free culture community, the long-term project of building one’s own open, commons-friendly infrastructure is the only enduring solution. ={BMI (Broadcast Music, Inc.);Eldred v. Reno/Eldred v. Ashcroft:effects of;Lessig, Lawrence:Eldred v. Reno, and|music, and+1} -In the music industry, the early signs seem to support this approach. When digital guru Don Tapscott surveyed the events of 2006, he concluded that “the losers built digital music stores and the winners built vibrant communities based on music. The losers built walled gardens while the winners built public squares. The losers were busy guarding their intellectual property while the winners were busy getting everyone’s attention.” In a penetrating analysis in 2007, music industry blogger Gerd Leonhard wrote: “In music, it’s always been about interaction, about sharing, about engaging — not Sell-Sell-Sell right from the start. Stop the sharing and you kill the music business — it’s that simple. When the fan/user/listener stops engaging with the music, it’s all over.”~{ Gerd Leonhard, “Open Letter to the Independent Music Industry: Music 2.0 and the Future of Music,” July 1, 2007, at http://www.gerdleonhard.net/ 2007/07/gerd-leonhards.html. }~ +In the music industry, the early signs seem to support this approach. When digital guru Don Tapscott surveyed the events of 2006, he concluded that “the losers built digital music stores and the winners built vibrant communities based on music. The losers built walled gardens while the winners built public squares. The losers were busy guarding their intellectual property while the winners were busy getting everyone’s attention.” In a penetrating analysis in 2007, music industry blogger Gerd Leonhard wrote: “In music, it’s always been about interaction, about sharing, about engaging — not Sell-Sell-Sell right from the start. Stop the sharing and you kill the music business — it’s that simple. When the fan/user/listener stops engaging with the music, it’s all over.”~{ Gerd Leonhard, “Open Letter to the Independent Music Industry: Music 2.0 and the Future of Music,” July 1, 2007, at http://www.gerdleonhard.net/2007/07/gerd-leonhards.html. }~ ={Leonhard, Gerd;Tapscott, Don} Serious change is in the air when the producer/consumer dichotomy is no longer the only paradigm, and a vast network of ordinary people and talented creators are becoming active participants in making their own culture. They are sharing and co-creating. Markets are no longer so separate from social communities; indeed, the two are blurring into each other. Although we may live in a complicated interregnum between Centralized Media and distributed media, the future is likely to favor those creators and businesses who build on open platforms. As Dan Hunter and F. Gregory Lastowka write: “It is clear that two parallel spheres of information production exist today. One is a traditional, copyright-based and profit-driven model that is struggling with technological change. The second is a newly enabled, decentralized amateur production sphere, in which individual authors or small groups freely release their work.”~{ Dan Hunter and F. Gregory Lastowka, “Amateur-to-Amateur,” /{William and Mary Law Review}/ 46, no. 951 (December 2004), pp. 1029–30. }~ @@ -1668,7 +1668,7 @@ Even as the machine was getting built, Lessig was taking steps to stoke up a mov Although /{Free Culture}/ repeats many of the fundamental arguments made in his earlier books, Lessig’s arguments this time did not sound like a law professor’s or academic’s, but more like an activist trying to rally a social movement. “This movement must begin in the streets,” he writes. “It must recruit a significant number of parents, teachers, librarians, creators, authors, musicians, filmmakers, scientists — all to tell their story in their own words, and to tell their neighbors why this battle is so important. . . . We will not reclaim a free culture by individual action alone. It will take important reforms of laws. We have a long way to go before the politicians will listen to these ideas and implement these reforms. But that also means that we have time to build awareness around the changes that we need.”~{ Lawrence Lessig, /{Free Culture}/ (New York: Penguin, 2004), pp. 275, 287. }~ The preeminent challenge for this would-be movement, Lessig wrote, is “rebuilding freedoms previously presumed” and “rebuilding free culture.” -Lessig had reason to think that his analysis and exhortations would find receptive ears. He was now a leading voice on copyright and Internet issues, and well known through his earlier books, public speaking, and /{Eldred}/ advocacy. The launch of the Creative Commons was thrusting him into the spotlight again. Adoption of the CC licenses was steadily growing in 2003 and 2004 based on the most comprehensive sources at the time, search engines. Yahoo was reporting in September 2004 that there were 4.7 million links to CC licenses on the Web. This number shot up to 14 million only six months later, and by August 2005 it had grown to 53 million.~{ CC license statistics, on CC wiki page, at http://wiki.creativecommons.org/ License_statistics. }~ These numbers offer only a crude estimate of actual license usage, but they nonetheless indicated a consistent trend. Usage was also being propelled by new types of Web 2.0 sites featuring usergenerated content. For example, Flickr, the photo-sharing site, had 4.1 million photos tagged with CC licenses at the end of 2004, a number that has soared to an estimated 75 million by 2008. +Lessig had reason to think that his analysis and exhortations would find receptive ears. He was now a leading voice on copyright and Internet issues, and well known through his earlier books, public speaking, and /{Eldred}/ advocacy. The launch of the Creative Commons was thrusting him into the spotlight again. Adoption of the CC licenses was steadily growing in 2003 and 2004 based on the most comprehensive sources at the time, search engines. Yahoo was reporting in September 2004 that there were 4.7 million links to CC licenses on the Web. This number shot up to 14 million only six months later, and by August 2005 it had grown to 53 million.~{ CC license statistics, on CC wiki page, at http://wiki.creativecommons.org/License_statistics. }~ These numbers offer only a crude estimate of actual license usage, but they nonetheless indicated a consistent trend. Usage was also being propelled by new types of Web 2.0 sites featuring usergenerated content. For example, Flickr, the photo-sharing site, had 4.1 million photos tagged with CC licenses at the end of 2004, a number that has soared to an estimated 75 million by 2008. ={Lessig, Lawrence:CC licenses, and;Yahoo;Web 2.0:CC licenses, and;Creative Commons (CC) licenses:Web 2.0 environment, and} The decisive choice, four years earlier, to build a suite of licenses that could propagate themselves via open networks was bearing fruit. @@ -1718,7 +1718,7 @@ Perhaps the neatest self-promotional trick that the Creative Commons has devised Infrastructure grows old and occasionally needs to be updated and improved. The CC licenses have been no exception. As users have incorporated them into one medium after another, the unwitting omissions and infelicitous legal language of some parts of the licenses needed revisiting. After many months of discussions with many parts of the CC world, the Creative Commons issued a new set of 2.0 licenses in May 2004.~{ Glenn Otis Brown, “Announcing (and explaining) our new 2.0 licenses,” CC blog, May 25, 2004, at http://creativecommons.org/weblog/entry/4216. }~ They did not differ substantially from the original ones, and in fact the changes would probably bore most nonlawyers. For example, version 2.0 included a provision that allows a licensor to require licensees to provide a link back to the licensor’s work. The 2.0 licenses also clarify many complicated license options affecting music rights, and make clear that licensors make no warranties of title, merchantability, or fitness for use. Perhaps the biggest change in version 2.0 was the elimination of the choice of Attribution licenses. Since nearly 98 percent of all licensors chose Attribution, the Creative Commons decided to drop licenses without the Attribution requirement, thereby reducing the number of CC licenses from eleven to six. ={Creative Commons (CC) licenses:version 2.0 of} -Another set of major revisions to the licenses was taken up for discussion in 2006, and agreed upon in February 2007.~{ 7. Mia Garlick, “Version 3.0 Launched,” CC blog, http://creativecommons.org/ weblog/entry/7249. }~ Once again, the layperson would care little for the debates leading to the changes, but considerable, sometimes heated discussion went into the revisions. In general, the 3.0 tweaks sought to make the licenses clearer, more useful, and more enforceable. The issue of “moral rights” under copyright law — an issue in many European countries — is explicitly addressed, as are the complications of the CC licenses and collecting societies. New legal language was introduced to ensure that people who remix works under other licenses, such as the GNU Free Documentation License (FDL), would be able to also use CC-licensed materials in the same work — an important provision for preventing free culture from devolving into “autistic islands” of legally incomptabile material. Besides helping align the CC world with Wikipedia (which uses the GNU FDL license), the 3.0 revisions also made harmonizing legal changes to take account of MIT and the Debian software development community. +Another set of major revisions to the licenses was taken up for discussion in 2006, and agreed upon in February 2007.~{ 7. Mia Garlick, “Version 3.0 Launched,” CC blog, http://creativecommons.org/weblog/entry/7249. }~ Once again, the layperson would care little for the debates leading to the changes, but considerable, sometimes heated discussion went into the revisions. In general, the 3.0 tweaks sought to make the licenses clearer, more useful, and more enforceable. The issue of “moral rights” under copyright law — an issue in many European countries — is explicitly addressed, as are the complications of the CC licenses and collecting societies. New legal language was introduced to ensure that people who remix works under other licenses, such as the GNU Free Documentation License (FDL), would be able to also use CC-licensed materials in the same work — an important provision for preventing free culture from devolving into “autistic islands” of legally incomptabile material. Besides helping align the CC world with Wikipedia (which uses the GNU FDL license), the 3.0 revisions also made harmonizing legal changes to take account of MIT and the Debian software development community. ={GNU Project:GNU FDL;copyright law:moral rights, and;Creative Commons (CC) licenses:version 3.0 of} By getting the CC licenses integrated into so many types of software and Web services, and even leveraging market players to embrace the sharing ethic, Creative Commons has managed to kill at least three birds with one stone. It has enlarged the universe of shareable Internet content. It has educated people to consider how copyright law affects them personally. And it has given visibility to its larger vision of free culture. @@ -1757,7 +1757,7 @@ In a pre-Internet context, the whole idea of a creating a new international lice Going international with the licenses offered an appealing way to grow both simultaneously without forcing unpleasant trade-offs between the two, at least initially. Drafting the licenses for a country, for example, helps convene top lawyers committed to the idea of legal sharing and collaboration while also mobilizing diverse constituencies who are the potential leaders of a movement. -According to Jonathan Zittrain, an early collaborator on the project and a board member, Creative Commons at the international level is more of a “persuasive, communicative enterprise than a legal licensing one.”~{ Interview with Jonathan Zittrain, September 28, 2006. }~ It is a vehicle for starting a process for engaging public-spirited lawyers, law scholars, and all manner of creators. The licenses do have specific legal meanings in their respective legal jurisdictions, of course, or are believed to have legal application. (Only three courts, in the Netherlands and Spain, have ever ruled on the legal status of the CC licenses. In two instances the courts enforced the licenses; in the other case, in which the defendant lost, the validity of the licenses was not at issue.)~{ The most famous court case involving the CC licenses is /{A. Curry v. Audax/Weekend}/, in which Adam Curry sued the publishers of a Dutch tabloid magazine and two senior editors for using four photos of his family on his Flickr account that had been licensed under a BY-NC-SA license. See http://creativecommons.org/weblog/entry/5944 and http://creativecommons.org/weblog/entry/5823. A District Court of Amsterdam upheld Curry’s usage of the CC licenses in a March 9, 2006, decision; see http://mir rors.creativecommons.org/judgements/Curry-Audax-English.pdf. There have been two Spanish cases involving CC licenses. In both cases, a collecting society, the Sociedad General de Autores y Editores (SGAE), sued cafés for playing “free music” licensed under CC licenses; SGAE claimed that it was owed royalties for the public performance of music because artists cannot legally apply a CC license to their work (or even release it online) without the consent of their collecting society. In both instances, the cases turned on evidentiary issues, not on the enforceability of CC licenses. See http:// creativecommons.org/weblog/entry/5830 and http://creativecommons.org/ weblog/entry/7228. }~ Apart from their legal meaning, the licenses’ most important function may be as a social signaling device. They let people announce, “I participate in and celebrate the sharing economy.” The internationalization of the CC licenses has also been a way of “localizing” the free culture movement. +According to Jonathan Zittrain, an early collaborator on the project and a board member, Creative Commons at the international level is more of a “persuasive, communicative enterprise than a legal licensing one.”~{ Interview with Jonathan Zittrain, September 28, 2006. }~ It is a vehicle for starting a process for engaging public-spirited lawyers, law scholars, and all manner of creators. The licenses do have specific legal meanings in their respective legal jurisdictions, of course, or are believed to have legal application. (Only three courts, in the Netherlands and Spain, have ever ruled on the legal status of the CC licenses. In two instances the courts enforced the licenses; in the other case, in which the defendant lost, the validity of the licenses was not at issue.)~{ The most famous court case involving the CC licenses is /{A. Curry v. Audax/Weekend}/, in which Adam Curry sued the publishers of a Dutch tabloid magazine and two senior editors for using four photos of his family on his Flickr account that had been licensed under a BY-NC-SA license. See http://creativecommons.org/weblog/entry/5944 and http://creativecommons.org/weblog/entry/5823. A District Court of Amsterdam upheld Curry’s usage of the CC licenses in a March 9, 2006, decision; see http://mirrors.creativecommons.org/judgements/Curry-Audax-English.pdf. There have been two Spanish cases involving CC licenses. In both cases, a collecting society, the Sociedad General de Autores y Editores (SGAE), sued cafés for playing “free music” licensed under CC licenses; SGAE claimed that it was owed royalties for the public performance of music because artists cannot legally apply a CC license to their work (or even release it online) without the consent of their collecting society. In both instances, the cases turned on evidentiary issues, not on the enforceability of CC licenses. See http://creativecommons.org/weblog/entry/5830 and http://creativecommons.org/weblog/entry/7228. }~ Apart from their legal meaning, the licenses’ most important function may be as a social signaling device. They let people announce, “I participate in and celebrate the sharing economy.” The internationalization of the CC licenses has also been a way of “localizing” the free culture movement. ={Zittrain, Jonathan} The first nation to port the CC licenses was Japan. This was partly an outgrowth of a five-month sabbatical that Lessig had spent in Tokyo, from late 2002 through early 2003. There were already stirrings of dissatisfaction with copyright law in Japan. Koichiro Hayashi, a professor who had once worked for the telecom giant NTT, had once proposed a so-called d-mark system to allow copyright owners to forfeit the statutory term of copyright protection and voluntarily declare a shorter term for their works. In the spring of 2003, a team of Japanese lawyers associated with a technology research institute, the Global Communications Center (GLOCOM), working with CC International in Berlin, set about porting the licenses to Japanese law. @@ -1780,7 +1780,7 @@ As each jurisdiction introduces its licenses, it typically hosts a gala public e Luiz Inácio Lula da Silva had just been elected president of Brazil, and he was eager to stake out a new set of development policies to allow his nation to plot its own economic and cultural future. His government, reflecting his electoral mandate, resented the coercive effects of international copyright law and patent law. To tackle some of these issues on the copyright front, President Lula appointed Gilberto Gil, the renowned singer-songwriter, as his minister of culture. ={Lula da Silva, Luiz Inácio;Gil, Gilberto+11} -Gil became a revered cultural figure when he helped launch a new musical style, /{tropicalismo}/, in the late 1960s, giving Brazil a fresh, international cachet. The music blended national styles of music with pop culture and was inflected with political and moral themes. As one commentator put it, /{tropicalismo}/ was “a very ’60s attempt to capture the chaotic, swirling feel of Brazil’s perennially uneven modernization, its jumble of wealth and poverty, of rural and urban, of local and global. . . . They cut and pasted styles with an abandon that, amid today’s sample-happy music scene, sounds up-to-theminute.”~{ Wikipedia entry, “Tropicalismo,” at http://en.wikipedia.org/wiki/Tropical ismo. }~ The military dictatorship then running the government considered /{tropicalismo}/ sufficiently threatening that it imprisoned Gil for several months before forcing him into exile, in London. Gil continued writing and recording music, however, and eventually returned to Brazil.~{ For a history of Gil, see his personal Web site at http://www.gilbertogil .com.br/index.php?language=en; the Wikipedia entry on him at http: //en.wikipedia.org/wiki/Gilberto_Gil; and Larry Rohter, “Gilberto Gil Hears the Future, Some Rights Reserved,” /{New York Times}/, March 11, 2007. }~ +Gil became a revered cultural figure when he helped launch a new musical style, /{tropicalismo}/, in the late 1960s, giving Brazil a fresh, international cachet. The music blended national styles of music with pop culture and was inflected with political and moral themes. As one commentator put it, /{tropicalismo}/ was “a very ’60s attempt to capture the chaotic, swirling feel of Brazil’s perennially uneven modernization, its jumble of wealth and poverty, of rural and urban, of local and global. . . . They cut and pasted styles with an abandon that, amid today’s sample-happy music scene, sounds up-to-theminute.”~{ Wikipedia entry, “Tropicalismo,” at http://en.wikipedia.org/wiki/Tropicalismo. }~ The military dictatorship then running the government considered /{tropicalismo}/ sufficiently threatening that it imprisoned Gil for several months before forcing him into exile, in London. Gil continued writing and recording music, however, and eventually returned to Brazil.~{ For a history of Gil, see his personal Web site at http://www.gilbertogil.com.br/index.php?language=en; the Wikipedia entry on him at http://en.wikipedia.org/wiki/Gilberto_Gil; and Larry Rohter, “Gilberto Gil Hears the Future, Some Rights Reserved,” /{New York Times}/, March 11, 2007. }~ This history matters, because when Gil was appointed culture minister, he brought with him a rare political sophistication and public veneration. His moral stature and joyous humanity allowed him to transcend politics as conventionally practiced. “Gil wears shoulder-length dreadlocks and is apt to show up at his ministerial offices dressed in the simple white linens that identify him as a follower of the Afro-Brazilian religion /{candomblé}/,” wrote American journalist Julian Dibbell in 2004. “Slouching in and out of the elegant Barcelona chairs that furnish his office, taking the occasional sip from a cup of pinkish herbal tea, he looks — and talks — less like an elder statesman than the posthippie, multiculturalist, Taoist intellectual he is.”~{ Julian Dibbell, “We Pledge Allegiance to the Penguin,” /{Wired}/, November 2004, at http://www.wired.com/wired/archive/12.11/linux_pr.html. }~ ={Dibbell, Julian+1} @@ -1800,10 +1800,10 @@ This alignment of intellectual firepower, artistic authority, and political clou One of the first collaborations between Creative Commons and the Brazilian government involved the release of a special CC-GPL license in December 2003.~{ Creative Commons press release, “Brazilian Government First to Adopt New ‘CC-GPL,’ ” December 2, 2003. }~ This license adapted the General Public License for software by translating it into Portuguese and putting it into the CC’s customary “three layers” — a plain-language version, a lawyers’ version compatible with the national copyright law, and a machine-readable metadata expression of the license. The CC-GPL license, released in conjunction with the Free Software Foundation, was an important international event because it gave the imprimatur of a major world government to free software and the social ethic of sharing and reuse. Brazil has since become a champion of GNU/Linux and free software in government agencies and the judiciary. It regards free software and open standards as part of a larger fight for a “development agenda” at the World Intellectual Property Organization and the World Trade Organization. In a related vein, Brazil has famously challenged patent and trade policies that made HIV/AIDS drugs prohibitively expensive for thousands of sick Brazilians. ={free software:international licensing, and+1;GNU/Linux:Brazil, in;World Trade Organization;World Intellectual Property Organization;open networks:international} -When the full set of CC Brazil licenses was finally launched— at the Fifth International Free Software Forum, in Port Alegre on June 4, 2004 — it was a major national event. Brazilian celebrities, government officials, and an enthusiastic crowd of nearly two thousand people showed up. Gil, flying in from a cabinet meeting in Brasília, arrived late. When he walked into the auditorium, the panel discussion under way immediately stopped, and there was a spontaneous standing ovation.~{ A ten-minute video of the CC Brazil opening can be seen at http:// support.creativecommons.org/videos#brasil. }~ “It was like a boxer entering the arena for a heavyweight match,” recalled Glenn Otis Brown. “He had security guards on both sides of him as he walked up the middle aisle. There were flashbulbs, and admirers trailing him, and this wave of people in the audience cresting as he walked by.”~{ Interview with Glenn Otis Brown, August 10, 2006. }~ +When the full set of CC Brazil licenses was finally launched— at the Fifth International Free Software Forum, in Port Alegre on June 4, 2004 — it was a major national event. Brazilian celebrities, government officials, and an enthusiastic crowd of nearly two thousand people showed up. Gil, flying in from a cabinet meeting in Brasília, arrived late. When he walked into the auditorium, the panel discussion under way immediately stopped, and there was a spontaneous standing ovation.~{ A ten-minute video of the CC Brazil opening can be seen at http://support.creativecommons.org/videos#brasil. }~ “It was like a boxer entering the arena for a heavyweight match,” recalled Glenn Otis Brown. “He had security guards on both sides of him as he walked up the middle aisle. There were flashbulbs, and admirers trailing him, and this wave of people in the audience cresting as he walked by.”~{ Interview with Glenn Otis Brown, August 10, 2006. }~ ={Brown, Glenn Otis, CC International, and+1} -Gil originally planned to release three of his songs under the new CC Sampling license — dubbed the “Recombo” license — but his record label, Warner Bros., balked. He eventually released one song, “Oslodum,” that he had recorded for an indie label. “One way to think about it,” said Brown, “is that now, anybody in the world can jam with Gilberto Gil.”~{ Film about CC Brazil launch, at http://support.creativecommons.org/ videos#brasil. }~ +Gil originally planned to release three of his songs under the new CC Sampling license — dubbed the “Recombo” license — but his record label, Warner Bros., balked. He eventually released one song, “Oslodum,” that he had recorded for an indie label. “One way to think about it,” said Brown, “is that now, anybody in the world can jam with Gilberto Gil.”~{ Film about CC Brazil launch, at http://support.creativecommons.org/videos#brasil. }~ As culture minister, Gil released all materials from his agency under a CC license, and persuaded the Ministry of Education as well as Radiobrás, the government media agency, to do the same. He also initiated the Cultural Points (Pontos de Cultura) program, which has given small grants to scores of community centers in poor neighborhoods so that residents can learn how to produce their own music and video works. Since industry concentration and payola make it virtually impossible for newcomers to get radio play and commercially distribute their CDs, according to many observers, the project has been valuable in allowing a fresh wave of grassroots music to “go public” and reach new audiences. @@ -1816,7 +1816,7 @@ Since its launch in June 2004, Lemos and the CC Brazil office have instigated a In Brazil, there are open-publishing projects for scientific journals;~{ http://www.scielo.br. }~ a Web site that brings together a repository of short films;~{ http://www.portacurtas.comb.br. }~ and Overmundo,a popular site for cultural commentary by Internet users.~{ http://www.overmundo.com.br }~ TramaVirtual, an open-platform record label that lets musicians upload their music and fans download it for free, now features more than thirty-five thousand artists.~{ http://tramavirtual.uol.com.br. }~ (By contrast, the largest commercial label in Brazil, Sony-BMG, released only twelve CDs of Brazilian music in 2006, according to Lemos.) -“Cultural production is becoming increasingly disconnected from traditional media forms,” said Lemos, because mass media institutions “are failing to provide the adequate incentives for culture to be produced and circulated. . . . Cultural production is migrating to civil society and/or the peripheries, which more or less already operate in a ‘social commons’ environment, and do not depend on intellectual property within their business models.”~{ Ronaldo Lemos, “From Legal Commons to Social Commons: Developing Countries and the Cultural Industry in the 21st Century,” http://icommons .org/banco/from-legal-commons-to-social-commons-brazil-and-the-culturalindustry-1. }~ +“Cultural production is becoming increasingly disconnected from traditional media forms,” said Lemos, because mass media institutions “are failing to provide the adequate incentives for culture to be produced and circulated. . . . Cultural production is migrating to civil society and/or the peripheries, which more or less already operate in a ‘social commons’ environment, and do not depend on intellectual property within their business models.”~{ Ronaldo Lemos, “From Legal Commons to Social Commons: Developing Countries and the Cultural Industry in the 21st Century,” http://icommons.org/banco/from-legal-commons-to-social-commons-brazil-and-the-culturalindustry-1. }~ As more people have adopted legal modes of copying and sharing under CC licenses, it is changing the social and political climate for copyright reform. Now that CC Brazil can cite all sorts of successful free culture ventures, it can more persuasively advocate for a Brazilian version of the fair use doctrine and press for greater photocopying privileges in educational settings (which are legally quite restrictive). ={free culture:international+2} @@ -1826,7 +1826,7 @@ Although the CC licenses are now familiar to many Brazilians, they have encounte As a unique global ambassador of creative sharing, Gilberto Gil did a lot to take the CC licenses to other nations and international forums such as the World Intellectual Property Organization. The day before his 2004 benefit concert for the Creative Commons in New York City with David Byrne, Gil delivered a powerful speech explaining the political implications of free culture: ={Byrne, David;Gil, Gilberto+3;World Intellectual Property Organization} -_1 A global movement has risen up in affirmation of digital culture. This movement bears the banners of free software and digital inclusion, as well as the banner of the endless expansion of the circulation of information and creation, and it is the perfect model for a Latin-American developmental cultural policy (other developments are possible) of the most anti-xenophobic, anti-authoritarian, anti-bureaucratizing, anti-centralizing, and for the very reason, profoundly democratic and transformative sort.~{ Gil remarks at New York University, September 19, 2004, at http://www .nyu.edu/fas/NewsEvents/Events/Minister_Gil_speech.pdf. }~ +_1 A global movement has risen up in affirmation of digital culture. This movement bears the banners of free software and digital inclusion, as well as the banner of the endless expansion of the circulation of information and creation, and it is the perfect model for a Latin-American developmental cultural policy (other developments are possible) of the most anti-xenophobic, anti-authoritarian, anti-bureaucratizing, anti-centralizing, and for the very reason, profoundly democratic and transformative sort.~{ Gil remarks at New York University, September 19, 2004, at http://www.nyu.edu/fas/NewsEvents/Events/Minister_Gil_speech.pdf. }~ The Brazilian government was making digital culture “one of its strategic public policies,” Gil said, because “the most important political battle that is being fought today in the technological, economic, social and cultural fields has to do with free software and with the method digital freedom has put in place for the production of shared knowledge. This battle may even signify a change in subjectivity, with critical consequences for the very concept of civilization we shall be using in the near future.”~{ Ibid. }~ @@ -1850,7 +1850,7 @@ In Scotland, government and other public-sector institutions have been huge fans The BBC was a pioneer in making its archived television and radio programs available to the public for free. In 2003, inspired by the CC licenses, the BBC drafted its own “Creative Archive” license as a way to open up its vast collection of taxpayer-financed television and radio programs.~{ See http://news.bbc.co.uk/2/hi/help/4527506.stm, and interview with Paula Le Dieu, joint director of the BBC Creative Archive project, May 28, 2004, at http://digital-lifestyles.info/2004/05/28/exclusive-providing-the-fuel-fora-creative-nation-an-interview-with-paula-le-dieu-joint-director-on-the-bbccreative-archive. }~ The license was later adopted by Channel 4, the Open University, the British Film Institute, and the Museum, Libraries and Archives Council. Although the Creative Archive license has similar goals as the CC licenses, it contains several significant differences: it restricts use of video programs to United Kingdom citizens only, and it prohibits use of materials for political or charitable campaigns and for any derogatory purposes. ={BBC} -The CC licenses have proven useful, also, to the British Museum and National Archives. In 2004, these and other British educational institutions were pondering how they should make their publicly funded digital resources available for reuse. A special government panel, the Common Information Environment, recommended usage of the CC licenses because they were already international in scope. The panel liked that the licenses allow Web links in licensed materials, which could help users avoid the complications of formal registration. The panel also cited the virtues of “human readable deeds” and machine-readable metadata.~{ Intrallect Ltd and AHRC Research Centre for Studies in Intellectual Property and Technology Law, University of Edinburgh, “The Common Information Environment and Creative Commons,” October 10, 2005, at http://www .intrallect.com/index.php/intrallect/content/download/632/2631/file/CIE _CC_Final_Report.pdf. }~ +The CC licenses have proven useful, also, to the British Museum and National Archives. In 2004, these and other British educational institutions were pondering how they should make their publicly funded digital resources available for reuse. A special government panel, the Common Information Environment, recommended usage of the CC licenses because they were already international in scope. The panel liked that the licenses allow Web links in licensed materials, which could help users avoid the complications of formal registration. The panel also cited the virtues of “human readable deeds” and machine-readable metadata.~{ Intrallect Ltd and AHRC Research Centre for Studies in Intellectual Property and Technology Law, University of Edinburgh, “The Common Information Environment and Creative Commons,” October 10, 2005, at http://www.intrallect.com/index.php/intrallect/content/download/632/2631/file/CIE _CC_Final_Report.pdf. }~ As it happened, a team of Scottish legal scholars led by a private attorney, Jonathan Mitchell, successfully ported the licenses and released them a few months later, in December 2005. The Scottish effort had been initiated a year earlier when Mitchell and his colleagues objected that the U.K. CC licenses then being drafted were too rooted in English law and not sufficiently attuned to Scottish law. Since the introduction of the CC Scotland licenses, publicsector institutions have enthusiastically embraced them. Museums use the licenses on MP3 files that contain audio tours, for example, as well as on Web pages, exhibition materials, and photographs of artworks. Interestingly, in England and Wales, individual artists and creative communities seem to be more active than public-sector institutions in using the licenses. ={Scotland:CC licenses in;Creative Commons International:Scotland;Mitchell, Jonathan} @@ -1858,7 +1858,7 @@ As it happened, a team of Scottish legal scholars led by a private attorney, Jon The use of CC licenses for government information and publicly funded materials is inspiring similar efforts in other countries. Governments are coming to realize that they are one of the primary stewards of intellectual property, and that the wide dissemination of their work — statistics, research, reports, legislation, judicial decisions — can stimulate economic innovation, scientific progress, education, and cultural development. Unfortunately, as Anne Fitzgerald, Brian Fitzgerald, and Jessica Coates of Australia have pointed out, “putting all such material into the public domain runs the risk that material which is essentially a public and national asset will be appropriated by the private sector, without any benefit to either the government or the taxpayers.”~{ iCommons annual report, 2007, http://www.icommons.org/annual07. }~ For example, the private sector may incorporate the public-domain material into a value-added proprietary model and find other means to take the information private. The classic instance of this is West Publishing’s dominance in the republishing of U.S. federal court decisions. Open-content licenses offer a solution by ensuring that taxpayerfinanced works will be available to and benefit the general public. ={Coates, Jessica;Fitzgerald, Anne;Fitzgerald, Brian;West Publishing} -In the United States, the National Institutes of Health has pursued a version of this policy by requiring that federally funded research be placed in an open-access archive or journal within twelve months of its commercial publication. The European Commission announced in 2007 that it plans to build a major open-access digital repository for publicly funded research.~{ Michael Geist, “Push for Open Access to Research, BBC News, February 28, 2007, at http://news.bbc.co.uk/go/pr/fr/~/2/hi/technology/6404429. }~ In Mexico, the Sistema Internet de la Presidencia, or Presidency Internet System (SIP), decided in 2006 to adopt CC licenses for all content generated by the Mexican presidency on the Internet — chiefly the president’s various Web sites, Internet radio station, and documents.~{ Creative Commons blog, Alex Roberts, March 8, 2006, at http://creative commons.org/text/sip. }~ In Italy, CC Italy is exploring legislation to open up national and local government archives. It also wants new contract terms for those who develop publicly funded information so that it will automatically be available in the future.~{ Interview with Juan Carlos de Martin, CC Italy, July 17, 2007. }~ +In the United States, the National Institutes of Health has pursued a version of this policy by requiring that federally funded research be placed in an open-access archive or journal within twelve months of its commercial publication. The European Commission announced in 2007 that it plans to build a major open-access digital repository for publicly funded research.~{ Michael Geist, “Push for Open Access to Research, BBC News, February 28, 2007, at http://news.bbc.co.uk/go/pr/fr/~/2/hi/technology/6404429. }~ In Mexico, the Sistema Internet de la Presidencia, or Presidency Internet System (SIP), decided in 2006 to adopt CC licenses for all content generated by the Mexican presidency on the Internet — chiefly the president’s various Web sites, Internet radio station, and documents.~{ Creative Commons blog, Alex Roberts, March 8, 2006, at http://creativecommons.org/text/sip. }~ In Italy, CC Italy is exploring legislation to open up national and local government archives. It also wants new contract terms for those who develop publicly funded information so that it will automatically be available in the future.~{ Interview with Juan Carlos de Martin, CC Italy, July 17, 2007. }~ ={Creative Commons International:Italy|Mexico;Italy:CC licenses in;Mexico:CC licenses in} 2~ Laboratories of Free Culture @@ -1875,7 +1875,7 @@ Not surprisingly, the American CC licenses — a version of which was spun off a As a fledgling network, the international CC community is a rudimentary platform for change. Its members are still groping toward a shared understanding of their work and devising new systems of communication and collaboration. But a great deal of cross-border collaboration is occurring. A variety of free culture advocates have constituted themselves as the Asia Commons and met in Bangkok to collaborate on issues of free software, citizen access to government information, and industry antipiracy propaganda. CC Italy has invited leaders of neighboring countries— France, Switzerland, Austria, Croatia, and Slovenia — to share their experiences and work together. A CC Latin America project started /{Scripta}/, a new Spanish-language journal based in Ecuador, to discuss free software and free culture issues affecting the continent. ={Creative Commons International:cross-border collaboration+1} -CC leaders in Finland, France, and Australia have published books about their licensing projects.~{ The French book is Danièle Bourcier and Mélanie Dulong de Rosnay, eds., /{International Commons at the Digital Age}/ (Paris: Romillat, 2004), at http://fr.creativecommons.org/icommons_book.htm. The Finnish book is Herkko Hietanen et al., /{Community Created Content: Law, Business and Policy}/ (Turre Publishing, 2007), at http://www.turre.com/images/stories/books/webkirja_koko_ optimoitu2.pdf. The Australian book is Brian Fitzgerald, /{Open Content Licensing: Cultivating the Creative Commons}/ (Sydney: Sydney University Press, 2007). }~ CC Brazil and CC South Africa have collaborated on a project about copyright and developing nations. CC Canada is working with partners to develop an online, globally searchable database of Canadian works in the Canadian public domain. CC Salons have been held in Amsterdam, Toronto, Berlin, Beijing, London, Warsaw, Seoul, Taipei, and Johannesburg. +CC leaders in Finland, France, and Australia have published books about their licensing projects.~{ The French book is Danièle Bourcier and Mélanie Dulong de Rosnay, eds., /{International Commons at the Digital Age}/ (Paris: Romillat, 2004), at http://fr.creativecommons.org/icommons_book.htm. The Finnish book is Herkko Hietanen et al., /{Community Created Content: Law, Business and Policy}/ (Turre Publishing, 2007), at http://www.turre.com/images/stories/books/webkirja_koko_optimoitu2.pdf. The Australian book is Brian Fitzgerald, /{Open Content Licensing: Cultivating the Creative Commons}/ (Sydney: Sydney University Press, 2007). }~ CC Brazil and CC South Africa have collaborated on a project about copyright and developing nations. CC Canada is working with partners to develop an online, globally searchable database of Canadian works in the Canadian public domain. CC Salons have been held in Amsterdam, Toronto, Berlin, Beijing, London, Warsaw, Seoul, Taipei, and Johannesburg. In the Netherlands, CC project lead Paul Keller engineered a breakthrough that may overcome the persistent objections of European collecting societies to CC-licensed content. Collecting societies in Europe generally insist that any musician that they represent transfer all of their copyrights to the collective. This means that professional musicians cannot distribute their works under a CC license. Artists who are already using CC licenses cannot join the collecting societies in order to receive royalties for commercial uses of their works. In this manner, collecting societies in many European nations have effectively prevented many musicians from using the CC licenses. ={Keller, Paul;collecting societies+1:see also ASCAP} @@ -1893,7 +1893,7 @@ Love was trying to do for books and journal articles what is already possible fo In the end, Creative Commons offered the Developing Nations license as a separate license, not a rider. It had simple terms: “You must attribute the work in the manner specified by the author or licensor (but not in any way that suggests that they endorse you or your use of the work)” — and the license was valid only in non–high income nations, as determined by United Nations’ statistics. Although the release of the license got considerable press coverage, actual usage of the license was extremely small. The most prominent use was totally unexpected — for architectural designs. Architecture for Humanity, a California nonprofit, used the license for its designs of low-cost housing and health centers. The organization wanted to give away its architectural plans to poor countries while not letting its competitors in the U.S. use them for free.~{ Creative Commons blog, Kathryn Frankel, “Commoners: Architecture for Humanity,” June 30, 2006, at http://creativecommons.org/education/architecture. }~ ={United Nations} -The expected uses of the Developing Nations license never materialized. In 2006, Love said, “The license is there, but people who might be willing to use it are not really aware of it.” He worried that the license “hasn’t really been explained in a way that would be obvious to them,” and ventured that there may be “a need for a re-marketing campaign.” By this time, however, the license had attracted the ire of Richard Stallman for its limitations on “freedom.”~{ See Lessig on Creative Commons blog, December 7, 2005, at http://cre ativecommons.org/weblog/archive/2005/12/page/3. }~ It prohibited copying of a work in certain circumstances (in high-income countries) even for noncommercial purposes, and so authorized only a partial grant of freedom, not a universal one. “Well, the whole point was /{not}/ to be universal,” said Love. “The license is for people that are unwilling to share with high-income countries, but are willing to share with developing countries. So it actually expands the commons, but only in developing countries.”~{ Interview with James Love, June 13, 2006. }~ +The expected uses of the Developing Nations license never materialized. In 2006, Love said, “The license is there, but people who might be willing to use it are not really aware of it.” He worried that the license “hasn’t really been explained in a way that would be obvious to them,” and ventured that there may be “a need for a re-marketing campaign.” By this time, however, the license had attracted the ire of Richard Stallman for its limitations on “freedom.”~{ See Lessig on Creative Commons blog, December 7, 2005, at http://creativecommons.org/weblog/archive/2005/12/page/3. }~ It prohibited copying of a work in certain circumstances (in high-income countries) even for noncommercial purposes, and so authorized only a partial grant of freedom, not a universal one. “Well, the whole point was /{not}/ to be universal,” said Love. “The license is for people that are unwilling to share with high-income countries, but are willing to share with developing countries. So it actually expands the commons, but only in developing countries.”~{ Interview with James Love, June 13, 2006. }~ ={Lessig, Lawrence:CC International, and+1;Stallman, Richard:freedom, and+2} The controversy that grew up around the Developing Nations license illuminates the different approaches to movement building that Lessig and Stallman represent. Lessig’s advocacy for free culture has been an exploratory journey in pragmatic idealism; Stallman’s advocacy for free software has been more of a crusade of true believers in a core philosophy. For Stallman, the principles of “freedom” are unitary and clear, and so the path forward is fairly self-evident and unassailable. For Lessig, the principles of freedom are more situational and evolving and subject to the consensus of key creative communities. The flexibility has enabled a broad-spectrum movement to emerge, but it does not have the ideological coherence of, say, the free software movement. @@ -1905,7 +1905,7 @@ Several factors converged to make it attractive for Creative Commons to revoke t Finally, many CC staff members regarded the Developing Nations and Sampling licenses as misbegotten experiments. Fewer than 0.01 percent of uses of CC licenses at the time involved the Developing Nations license, and the Sampling license was used by a relatively small community of remix artists and musicians. If eliminating two little-used niche licenses could neutralize objections from the open access and free software movements and achieve a greater philosophical and political solidarity in the “free world,” many CC partisans regarded a rescission of the licenses as a modest sacrifice, if not a net gain. ={remix works+1;music:remixes;Creative Commons (CC) licenses:music, for} -In June 2007, Creative Commons announced that it was officially retiring the two licenses.~{ Creative Commons “retired licenses page,” at http://creativecommons.org/ retiredlicenses. }~ In a formal statement, Lessig explained, “These licenses do not meet the minimum standards of the Open Access movement. Because this movement is so important to the spread of science and knowledge, we no longer believe it correct to promote a standalone version of this license.”~{ Lawrence Lessig, “Retiring standalone DevNations and One Sampling License,” message to CC International listserv, June 4, 2007. }~ The Creative Commons also revoked the Sampling license because it “only permits the remix of the licensed work, not the freedom to share it.” (Two other sampling licenses that permit noncommercial sharing— SamplingPlus and NonCommercial SamplingPlus — were retained.) +In June 2007, Creative Commons announced that it was officially retiring the two licenses.~{ Creative Commons “retired licenses page,” at http://creativecommons.org/retiredlicenses. }~ In a formal statement, Lessig explained, “These licenses do not meet the minimum standards of the Open Access movement. Because this movement is so important to the spread of science and knowledge, we no longer believe it correct to promote a standalone version of this license.”~{ Lawrence Lessig, “Retiring standalone DevNations and One Sampling License,” message to CC International listserv, June 4, 2007. }~ The Creative Commons also revoked the Sampling license because it “only permits the remix of the licensed work, not the freedom to share it.” (Two other sampling licenses that permit noncommercial sharing— SamplingPlus and NonCommercial SamplingPlus — were retained.) ={Lessig, Lawrence:CC International, and} Anyone could still use the Sampling or Developing Nations license if they wished; they still exist, after all. It’s just that the Creative Commons no longer supports them. While the actual impact of the license revocations was minor, it did have major symbolic and political significance in the commons world. It signaled that the Creative Commons was capitulating to objections by free software advocates and the concerns of open access publishing activists. @@ -2005,7 +2005,7 @@ Ironically, the Creative Commons is not itself a commons, nor do its licenses ne Is one type of commons superior to the others? Does one offer a superior vision of “freedom”? This philosophical issue has been a recurrent source of tension between the Free Software Foundation, the steward of the GPL, and the Creative Commons, whose licenses cater to individual choice. -Strictly speaking, a commons essentially offers a binary choice, explained Benkler: “You’re in the commons or you’re out of the commons.” By broadening that binary choice, the CC licenses make the commons a more complicated and ambiguous enterprise. This is precisely what some critics like Stallman have found objectionable about certain CC licenses. They don’t necessarily help forge a community of shared values and commitments. Or as two British critics, David Berry and Giles Moss, have put it, the CC licenses create “commons without commonality.”~{ David Berry and Giles Moss, “On the ‘Creative Commons’: A Critique of the Commons without Commonality,” Free Software Magazine, July 15, 2005, at http://www.freesoftwaremagazine.com/articles/commons_without_com monality. }~ +Strictly speaking, a commons essentially offers a binary choice, explained Benkler: “You’re in the commons or you’re out of the commons.” By broadening that binary choice, the CC licenses make the commons a more complicated and ambiguous enterprise. This is precisely what some critics like Stallman have found objectionable about certain CC licenses. They don’t necessarily help forge a community of shared values and commitments. Or as two British critics, David Berry and Giles Moss, have put it, the CC licenses create “commons without commonality.”~{ David Berry and Giles Moss, “On the ‘Creative Commons’: A Critique of the Commons without Commonality,” Free Software Magazine, July 15, 2005, at http://www.freesoftwaremagazine.com/articles/commons_without_commonality. }~ ={Benkler, Yochai:social movements, on;Berry, David;Moss, Giles;Stallman, Richard:criticisms by} Inviting authors to choose how their work may circulate can result in different types of “commons economies” that may or may not be interoperable. ShareAlike content is isolated from NoDerivatives content; NonCommercial content cannot be used for commercial purposes without explicit permission; and so on. CC-licensed works may themselves be incompatible with content licensed under other licenses, such as the GNU Free Documentation License. @@ -2024,7 +2024,7 @@ These are pivotal questions. The answers point toward different visions of free Some critics accuse Creative Commons of betraying the full potential of the commons because its licenses empower individual authors to decide how “shareable” their works can be. The licenses do not place the needs of the general culture or the commons first, as a matter of universal policy, and some licenses restrict how a work may be used. The lamentable result, say critics like Niva Elkin-Koren, is a segmented body of culture that encourages people to think of cultural works as property. People internalize the norms, such as “This is /{my work}/ and /{I’ll}/ decide how it shall be used by others.” ={Elkin-Koren, Niva;commoners:sharing by+1;Creative Commons (CC) licenses:critics of+2} -This can be seen in the actual choices that CC licensors tend to use. Some 67 percent of CC-licensed works do not allow commercial usage.~{ Based on Yahoo queries, June 13, 2006, at http://wiki.creativecommons.org/ License_Statistics. }~ Arguments go back and forth about whether the NC restriction enhances or shrinks freedom. Many musicians and writers want to promote their works on the Internet while retaining the possibility of commercial gain, however remote; this would seem a strike for freedom. Yet critics note that the NC license is often used indiscriminately, even when commercial sales are a remote possibility. This precludes even modest commercial reuses of a work, such as reposting of content on a blog with advertising.~{ Eric Muller, “The Case for Free Use: Reasons Not to Use a Creative Commons–NC License,” at http://freedomdefined.org/Licenses/NC. }~ +This can be seen in the actual choices that CC licensors tend to use. Some 67 percent of CC-licensed works do not allow commercial usage.~{ Based on Yahoo queries, June 13, 2006, at http://wiki.creativecommons.org/License_Statistics. }~ Arguments go back and forth about whether the NC restriction enhances or shrinks freedom. Many musicians and writers want to promote their works on the Internet while retaining the possibility of commercial gain, however remote; this would seem a strike for freedom. Yet critics note that the NC license is often used indiscriminately, even when commercial sales are a remote possibility. This precludes even modest commercial reuses of a work, such as reposting of content on a blog with advertising.~{ Eric Muller, “The Case for Free Use: Reasons Not to Use a Creative Commons–NC License,” at http://freedomdefined.org/Licenses/NC. }~ The larger point of criticism is that the Creative Commons licenses do not “draw a line in the sand” about what types of freedoms are inherent to the commons. In the interest of building a broad movement, Creative Commons does not insist upon a clear standard of freedom or prescribe how a commons should be structured. @@ -2045,9 +2045,9 @@ At one point, the philosophical disagreements between the Creative Commons and i Stallman objected to the Sampling license because, while it allowed a remix of a licensed work, it did not allow the freedom to share it. The Developing Nations license was objectionable because its freedoms to copy are limited to people in the developing world, and do not extend to everyone. Stallman also disliked the fact that the CC tag that licensors affix to their works did not specify /{which}/ license they were using. With no clear standard of “freedom” and now a mix of licenses that included two “non-free” licenses, Stallman regarded the CC tag as meaningless and the organization itself problematic. -“I used to support Creative Commons,” said Stallman on his blog in July 2005, “but then it adopted some additional licenses which do not give everyone that minimum freedom, and now I no longer endorse it as an activity. I agree with Mako Hill that they are taking the wrong approach by not insisting on any specific freedoms for the public.”~{ Richard Stallman, “Fireworks in Montreal,” at http://www.fsf.org/blogs/ rms/entry-20050920.html. }~ +“I used to support Creative Commons,” said Stallman on his blog in July 2005, “but then it adopted some additional licenses which do not give everyone that minimum freedom, and now I no longer endorse it as an activity. I agree with Mako Hill that they are taking the wrong approach by not insisting on any specific freedoms for the public.”~{ Richard Stallman, “Fireworks in Montreal,” at http://www.fsf.org/blogs/rms/entry-20050920.html. }~ -Mako Hill is a brilliant young hacker and Stallman acolyte who wrote a 2005 essay, “Towards a Standard of Freedom: Creative Commons and the Free Software Movement,”~{ Benjamin Mako Hill, “Towards a Standard of Freedom: Creative Commons and the Free Software Movement,” /{Advogato}/, July 29, 2005, at http://www .advogato.org/article/851.html. }~ a piece that shares Elkin-Koren’s complaint about the CC’s “ideological fuzziness.” Then enrolled in a graduate program at the MIT Media Lab, Hill has written a number of essays on the philosophy and social values of free software. (When he was an undergraduate at Hampshire College, I was an outside advisor for his senior thesis and remain friends with him.) +Mako Hill is a brilliant young hacker and Stallman acolyte who wrote a 2005 essay, “Towards a Standard of Freedom: Creative Commons and the Free Software Movement,”~{ Benjamin Mako Hill, “Towards a Standard of Freedom: Creative Commons and the Free Software Movement,” /{Advogato}/, July 29, 2005, at http://www.advogato.org/article/851.html. }~ a piece that shares Elkin-Koren’s complaint about the CC’s “ideological fuzziness.” Then enrolled in a graduate program at the MIT Media Lab, Hill has written a number of essays on the philosophy and social values of free software. (When he was an undergraduate at Hampshire College, I was an outside advisor for his senior thesis and remain friends with him.) ={Elkin-Koren, Niva;Hill, Benjamin Mako+2;free culture:differing visions of+31;free software:social movement, as+31} In his “Freedom’s Standard” essay, Hill wrote: “[D]espite CC’s stated desire to learn from and build upon the example of the free software movement, CC sets no defined limits and promises no freedoms, no rights, and no fixed qualities. Free software’s success is built on an ethical position. CC sets no such standard.” While CC prides itself on its more open-minded “some rights reserved” standard, Hill says that a real movement for freedom must make a bolder commitment to the rights of the audience and other creators— namely, that “essential rights are unreservable.”~{ Interview with Benjamin Mako Hill, June 1, 2007. }~ @@ -2063,7 +2063,7 @@ Lessig has argued many times that, just as the free software community decided f Elkin-Koren is not so sure we can segment the world according to creative sectors and let each determine how works shall circulate. “I don’t think we can separate the different sectors, as if we work in different sectors,” she told me. “We all work in the production of information. My ideas on copyright are really affected by the art that I use and the music that I listen to. . . . Information is essential not only for creating something functional or for selling a work of art, but for our citizenship and for our ability to participate in society. So it’s not as if we can say, ‘Well, this sector can decide for themselves.’”~{ Interview with Niva Elkin-Koren, January 30, 2007. }~ ={Elkin-Koren, Niva} -As Wikipedia began to take off in popularity, what might have been an unpleasant philosophical rift grew into a more serious fissure with potentially significant consequences. All Wikipedia content is licensed under the Free Software Foundation’s GNU Free Documentation License, or FDL,~{ Wikipedia entry on GNU Free Documentation license, at http://en.wiki pedia.org/wiki/GNU_Free_Documentation_License. }~ largely because the CC licenses did not exist when Wikipedia was launched in 2001. The FDL, originally intended for the documentation manuals that explicate software applications, is essentially the same as the CC ShareAlike license (any derivative works must also be released under the same license granting the freedom to share). But using the FDL can get cumbersome, especially as more video, audio, and photos are incorporated into a text; each artifact would require that the license be posted on it. As more content is shared, the potential for misuse of the content, and lawsuits over violations of licensing agreements, would grow.~{ Michael Fitzgerald, “Copyleft Hits a Snag,” /{Technology Review}/, December 21, 2005. }~ +As Wikipedia began to take off in popularity, what might have been an unpleasant philosophical rift grew into a more serious fissure with potentially significant consequences. All Wikipedia content is licensed under the Free Software Foundation’s GNU Free Documentation License, or FDL,~{ Wikipedia entry on GNU Free Documentation license, at http://en.wikipedia.org/wiki/GNU_Free_Documentation_License. }~ largely because the CC licenses did not exist when Wikipedia was launched in 2001. The FDL, originally intended for the documentation manuals that explicate software applications, is essentially the same as the CC ShareAlike license (any derivative works must also be released under the same license granting the freedom to share). But using the FDL can get cumbersome, especially as more video, audio, and photos are incorporated into a text; each artifact would require that the license be posted on it. As more content is shared, the potential for misuse of the content, and lawsuits over violations of licensing agreements, would grow.~{ Michael Fitzgerald, “Copyleft Hits a Snag,” /{Technology Review}/, December 21, 2005. }~ ={Free Documentation License+10;GNU Project+10;Wikipedia:GNU FDL, and+10|CC licenses, and+10} Unfortunately, as a legal matter, the FDL is incompatible with the CC licenses. This means that all content on Wikipedia and its sister Wikimedia projects (Wikispecies, Wikiquote, Wikinews, among other projects) cannot legally be combined with works licensed under CC licenses. Angered by the two “non-free” CC licenses, Stallman dug in his heels and defended Wikipedia’s use of the FDL. He also made it clear that he would remain a critic of Creative Commons unless it revoked or changed its licenses to conform with the Free Software Foundation’s standards of “freedom.” @@ -2097,7 +2097,7 @@ By May 2008 the details of the agreement to make Wikipedia’s entries, licensed As the Creative Commons has grown in popularity, a longer line has formed to take issue with some of its fundamental strategies. One line of criticism comes from anticapitalist ideologues, another from scholars of the underdeveloped nations of the South. -British academics Berry and Moss apparently hanker for a more bracing revolution in culture;they object to the commodification of culture in any form and to the role that copyright law plays in this drama. To them, Lessig is distressingly centrist. He is “always very keen to disassociate himself and the Creative Commons from the (diabolical) insinuation that he is (God forbid!) anti-market, anticapitalist, or communist,” Berry and Moss complain.~{ David Berry and Giles Moss, “On the ‘Creative Commons’: A Critique of the Commons Without Commonality,” /{Free Software Magazine}/, July 15, 2005, at http://www.freesoftwaremagagine.com/articles/commons_without_com monality }~ The gist of their objection: Why is Lessig collaborating with media corporations and neoclassical economists when there is a larger, more profound revolution that needs to be fought? A new social ethic and political struggle are needed, they write, “not lawyers exercising their legal vernacular and skills on complicated licenses, court cases and precedents.” +British academics Berry and Moss apparently hanker for a more bracing revolution in culture;they object to the commodification of culture in any form and to the role that copyright law plays in this drama. To them, Lessig is distressingly centrist. He is “always very keen to disassociate himself and the Creative Commons from the (diabolical) insinuation that he is (God forbid!) anti-market, anticapitalist, or communist,” Berry and Moss complain.~{ David Berry and Giles Moss, “On the ‘Creative Commons’: A Critique of the Commons Without Commonality,” /{Free Software Magazine}/, July 15, 2005, at http://www.freesoftwaremagagine.com/articles/commons_without_commonality }~ The gist of their objection: Why is Lessig collaborating with media corporations and neoclassical economists when there is a larger, more profound revolution that needs to be fought? A new social ethic and political struggle are needed, they write, “not lawyers exercising their legal vernacular and skills on complicated licenses, court cases and precedents.” ={Berry, David;Moss, Giles;Lessig, Lawrence:CC licenses, and} Dense diatribes against the antirevolutionary character of Creative Commons can be heard in various hacker venues and cultural blogs and Web sites. The argument tends to go along the lines sketched here by Anna Nimus of Berlin, Germany: @@ -2125,7 +2125,7 @@ A more radical and profound critique of the commons came in an open letter to _1 We appreciate and admire the determination with which you nurture your garden of licenses. The proliferation and variety of flowering contracts and clauses in your hothouses is astounding. But we find the paradox of a space that is called a commons and yet so fenced in, and in so many ways, somewhat intriguing. The number of times we had to ask for permission, and the number of security check posts we had to negotiate to enter even a corner of your commons was impressive. . . . Sometimes we found that when people spoke of “Common Property” it was hard to know where the commons ended and where property began . . . -_1 Strangely, the capacity to name something as “mine,” even if in order to “share” it, requires a degree of attainments that is not in itself evenly distributed. Not everyone comes into the world with the confidence that anything is “theirs” to share. This means that the “commons,” in your parlance, consists of an arrangement wherein only those who are in the magic circle of confident owners effectively get a share in that which is essentially, still a configuration of different bits of fenced in property. What they do is basically effect a series of swaps, based on a mutual understanding of their exclusive property rights. So I give you something of what I own, in exchange for which, I get something of what you own. The good or item in question never exits the circuit of property, even, paradoxically, when it is shared. Goods that are not owned, or those that have been taken outside the circuit of ownership, effectively cannot be shared, or even circulated.~{ “A Letter to the Commons, from the participants of the ‘Shades of the Commons Workshop,’ ” in /{In the Shade of the Commons:Towards a Culture of Open Networks}/ (Amsterdam, Netherlands: Waag Society, 2006), at http://www3 .fis.utoronto.ca/research/iprp/cracin/publications/pdfs/final/werbin_InThe Shade.pdf. }~ +_1 Strangely, the capacity to name something as “mine,” even if in order to “share” it, requires a degree of attainments that is not in itself evenly distributed. Not everyone comes into the world with the confidence that anything is “theirs” to share. This means that the “commons,” in your parlance, consists of an arrangement wherein only those who are in the magic circle of confident owners effectively get a share in that which is essentially, still a configuration of different bits of fenced in property. What they do is basically effect a series of swaps, based on a mutual understanding of their exclusive property rights. So I give you something of what I own, in exchange for which, I get something of what you own. The good or item in question never exits the circuit of property, even, paradoxically, when it is shared. Goods that are not owned, or those that have been taken outside the circuit of ownership, effectively cannot be shared, or even circulated.~{ “A Letter to the Commons, from the participants of the ‘Shades of the Commons Workshop,’ ” in /{In the Shade of the Commons:Towards a Culture of Open Networks}/ (Amsterdam, Netherlands: Waag Society, 2006), at http://www3.fis.utoronto.ca/research/iprp/cracin/publications/pdfs/final/werbin_InThe Shade.pdf. }~ The letter invites a deeper consideration of how humans form commons. However ingenious and useful the jerry-rigged legal mechanisms of the GPL and Creative Commons, the disembodied voice of the Non Legal Commons speaks, as if through the sewer grate, to remind us that the commons is about much more than law and civil society. It is part of the human condition. Yet the chaotic Asiatic street is not likely to yield conventional economic development without the rule of law, civil institutions, and some forms of legal property. The question posed by the informal commons remains a necessary one to ponder: What balance of commons and property rights, and in what forms, is best for a society? @@ -2137,10 +2137,10 @@ Walk through the blossoming schools of commons thought and it quickly becomes cl It is a compelling argument, but in fact only an indirect criticism of Creative Commons. For filmmakers who need to use film clips from existing films and musicians who want to use a riff from another performer, the fair use doctrine is indeed more important than any CC license. Peter Jaszi, the law professor at American University’s Washington School of Law, believes that even with growing bodies of CC-licensed content, “teachers, filmmakers, editors, freelance critics and others need to do things with proprietary content.” As a practical matter, they need a strong, clear set of fair use guidelines. ={Jaszi, Peter+2} -Jaszi and his colleague Pat Aufderheide, a communications professor who runs the Center for Social Media at American University, have dedicated themselves to clarifying the scope and certainty of fair use. They have launched a major fair use project to get specific creative communities to define their “best practices in fair use.” If filmmakers, for example, can articulate their own artistic needs and professional interests in copying and sharing, then the courts are more likely to take those standards into consideration when they rule what is protected under the fair use doctrine.~{ Center for Social Media, at http://www.centerforsocialmedia.org/fairuse. See also Pat Aufderheide and Peter Jaszi, “Fair Use and Best Practices: Surprising Success,” /{Intellectual Property Today}/, October 2007, at http://www.iptoday .com/articles/2007-10-aufderheide.asp; and Peter Jaszi, “Copyright, Fair Use and Motion Pictures,” /{Utah Law Review}/ 3, no. 715 (2007), and which also appeared in R. Kolker, ed., /{Oxford Handbook of Film and Media Studies}/ (2007), at http://www.centerforsocialmedia.org/files/pdf/fairuse_motionpictures.pdf. }~ A set of respectable standards for a given field can help stabilize and expand the application of fair use. +Jaszi and his colleague Pat Aufderheide, a communications professor who runs the Center for Social Media at American University, have dedicated themselves to clarifying the scope and certainty of fair use. They have launched a major fair use project to get specific creative communities to define their “best practices in fair use.” If filmmakers, for example, can articulate their own artistic needs and professional interests in copying and sharing, then the courts are more likely to take those standards into consideration when they rule what is protected under the fair use doctrine.~{ Center for Social Media, at http://www.centerforsocialmedia.org/fairuse. See also Pat Aufderheide and Peter Jaszi, “Fair Use and Best Practices: Surprising Success,” /{Intellectual Property Today}/, October 2007, at http://www.iptoday.com/articles/2007-10-aufderheide.asp; and Peter Jaszi, “Copyright, Fair Use and Motion Pictures,” /{Utah Law Review}/ 3, no. 715 (2007), and which also appeared in R. Kolker, ed., /{Oxford Handbook of Film and Media Studies}/ (2007), at http://www.centerforsocialmedia.org/files/pdf/fairuse_motionpictures.pdf. }~ A set of respectable standards for a given field can help stabilize and expand the application of fair use. ={Aufderheide, Pat+1} -Inspired in part by a professional code developed by news broadcasters, some of the nation’s most respected filmmakers prepared the Documentary Filmmakers’ Statement of Best Practices in Fair Use, which was released in November 2005. The guidelines have since been embraced by the film industry, television programmers, and insurance companies (who insure against copyright violations) as a default definition about what constitutes fair use in documentary filmmaking.~{ Aufderheide and Jaszi, /{Intellectual Property Today}/, October 2007, at http:// www.iptoday.com/articles/2007-10-aufderheide.asp. }~ Aufderheide and Jaszi are currently exploring fair use projects for other fields, such as teaching, as a way to make fair use a more reliable legal tool for sharing and reuse of works. +Inspired in part by a professional code developed by news broadcasters, some of the nation’s most respected filmmakers prepared the Documentary Filmmakers’ Statement of Best Practices in Fair Use, which was released in November 2005. The guidelines have since been embraced by the film industry, television programmers, and insurance companies (who insure against copyright violations) as a default definition about what constitutes fair use in documentary filmmaking.~{ Aufderheide and Jaszi, /{Intellectual Property Today}/, October 2007, at http://www.iptoday.com/articles/2007-10-aufderheide.asp. }~ Aufderheide and Jaszi are currently exploring fair use projects for other fields, such as teaching, as a way to make fair use a more reliable legal tool for sharing and reuse of works. Lessig has been highly supportive of the fair use project and, indeed, he oversees his own fair use law clinic at Stanford Law School, which litigates cases frequently. “It’s not as if I don’t think fair use is important,” said Lessig, “but I do think that if the movement focuses on fair use, we don’t attract the people we need. . . . From my perspective, long-term success in changing the fundamental perspectives around copyright depends on something like Creative Commons as opposed to legal action, and even quasi-legal action, like the Fair Use Project.” ={Lessig, Lawrence:fair use, on+5} @@ -2199,11 +2199,11 @@ For the short term, the fledgling models in these fields are likely to be seen a Entrepreneur John Buckman concedes that his Internet record label, Magnatune, amounts to “building a business model on top of chaos.”~{ John Buckman presentation at iCommons Summit, Dubrovnik, Croatia, June 15, 2007. }~ That is to say, he makes money by honoring open networks and people’s natural social inclinations. The company rejects the proprietary muscle games used by its mainstream rivals, and instead holds itself to an ethical standard that verges on the sanctimonious: “We are not evil.” In the music industry these days, a straight shooter apparently has to be that blunt. ={Buckman, John+4;Magnatune+8;music:CC licenses for+8;Creative Commons (CC) licenses:music, for+8} -Magnatune is a four-person enterprise based in Berkeley, California, that since 2003 has been pioneering a new open business model for identifying and distributing high-quality new music. It does not lock up the music with anticopying technology or digital rights management. It does not exploit its artists with coercive, unfair contracts. It does not harass its customers for making unauthorized copies. Internet users can in fact listen to all of Magnatune’s music for free (not just music snippets) via online streaming.~{ John Buckman entry in Wikipedia, at http://en.wikipedia.org/wiki/John_ Buckman. }~ +Magnatune is a four-person enterprise based in Berkeley, California, that since 2003 has been pioneering a new open business model for identifying and distributing high-quality new music. It does not lock up the music with anticopying technology or digital rights management. It does not exploit its artists with coercive, unfair contracts. It does not harass its customers for making unauthorized copies. Internet users can in fact listen to all of Magnatune’s music for free (not just music snippets) via online streaming.~{ John Buckman entry in Wikipedia, at http://en.wikipedia.org/wiki/John_Buckman. }~ -Buckman, a former software programmer turned entrepreneur in his thirties, previously founded and ran Lyris Technologies, an e-mail list management company that he sold in 2005. In deciding to start Magnatune, he took note of the obvious realities that the music industry has tried to ignore: radio is boring, CDs cost too much, record labels exploit their artists, file sharing is not going to go away, people love to share music, and listening to music on the Internet is too much work. “I thought, why not make a record label that has a clue?” said Buckman.~{ John Buckman at Magnatune home page, at http://www.magnatune.com/ info/why. }~ +Buckman, a former software programmer turned entrepreneur in his thirties, previously founded and ran Lyris Technologies, an e-mail list management company that he sold in 2005. In deciding to start Magnatune, he took note of the obvious realities that the music industry has tried to ignore: radio is boring, CDs cost too much, record labels exploit their artists, file sharing is not going to go away, people love to share music, and listening to music on the Internet is too much work. “I thought, why not make a record label that has a clue?” said Buckman.~{ John Buckman at Magnatune home page, at http://www.magnatune.com/info/why. }~ -Well before the band Radiohead released its In /{Rainbows}/ album with a “pay what you want” experiment, Magnatune was inviting its customers to choose the amount they would be willing to pay, from $5 to $18, for any of Magnatune’s 547 albums. Buckman explains that the arrangement signals a respect for customers who, after all, have lots of free music choices. It also gives them a chance to express their appreciation for artists, who receive 50 percent of the sales price. “It turns out that people are quite generous and they pay on average about $8.40, and they really don’t get anything more for paying more other than feeling like they’re doing the right thing,” said Buckman.~{ John Buckman, interview with Matthew Magee of Out-Law.com, radio podcast, September 13, 2007, at http://www.out-law.com/page-8468. }~ About 20 percent pay more than $12.~{ John Buckman at iCommons, June 15, 2007. For an extensive profile of Buckman and Magnatune, see http://www.openrightsgroup.org/creative business/index.php/John_Buckman:_Magnatune. }~ +Well before the band Radiohead released its In /{Rainbows}/ album with a “pay what you want” experiment, Magnatune was inviting its customers to choose the amount they would be willing to pay, from $5 to $18, for any of Magnatune’s 547 albums. Buckman explains that the arrangement signals a respect for customers who, after all, have lots of free music choices. It also gives them a chance to express their appreciation for artists, who receive 50 percent of the sales price. “It turns out that people are quite generous and they pay on average about $8.40, and they really don’t get anything more for paying more other than feeling like they’re doing the right thing,” said Buckman.~{ John Buckman, interview with Matthew Magee of Out-Law.com, radio podcast, September 13, 2007, at http://www.out-law.com/page-8468. }~ About 20 percent pay more than $12.~{ John Buckman at iCommons, June 15, 2007. For an extensive profile of Buckman and Magnatune, see http://www.openrightsgroup.org/creativebusiness/index.php/John_Buckman:_Magnatune. }~ ={Radiohead} “The reality is today nobody really needs to pay for music at all,” he acknowledges. “If you choose to hit the ‘buy’ button at Magnatune then you’re one of the people who has decided to actually pay for music. Shouldn’t we reflect that honest behavior back and say, well, if you’re one of the honest people how much do you want to pay?”~{ John Buckman, interview with Matthew Magee, September 13, 2007. }~ The set-your-own-price approach is part of Magnatune’s larger strategy of building the business by cultivating open, interactive relationships with its customers and artists. “If you set up a trusting world,” explains Buckman, “you can be rewarded.” @@ -2231,16 +2231,16 @@ Even as broadcast networks decry the posting of copyrighted television programs Why this inexorable trend toward openness? Because on open networks, excessive control can be counterproductive. The overall value that can be created through interoperability is usually greater than the value that any single player may reap from maintaining its own “walled network.”~{ See Elliot E. Maxwell, “Open Standards, Open Source, and Open Innovation: Harnessing the Benefits of Openness,” /{Innovations:Technology, Governance, Globalization}/ 1, no. 3 (Summer 2006), at http://www.emaxwell.net. }~ For a company to reap value from interoperability, however, it must be willing to compete on an open platform and it must be willing to share technical standards, infrastructure, or content with others. Once this occurs, proprietary gains come from competing to find more sophisticated ways to add value in the production chain, rather than fighting to monopolize basic resources. Advantage also accrues to the company that develops trusting relationships with a community of customers. ={open business models:value created in+9;value:creation of+9} -Free software was one of the earliest demonstrations of the power of online commons as a way to create value. In his classic 1997 essay “The Cathedral and the Bazaar,” hacker Eric S. Raymond provided a seminal analysis explaining how open networks make software development more cost-effective and innovative than software developed by a single firm.~{ Eric Raymond, “The Cathedral and the Bazaar,” May 1997, at http:// www.catb.org/~esr/writings/cathedral-bazaar. The essay has been translated into nineteen languages to date. }~ A wide-open “bazaar” such as the global Linux community can construct a more versatile operating system than one designed by a closed “cathedral” such as Microsoft. “With enough eyes, all bugs are shallow,” Raymond famously declared. Yochai Benkler gave a more formal economic reckoning of the value proposition of open networks in his pioneering 2002 essay “Coase’s Penguin, or, Linux and the Nature of the Firm.”~{ Yochai Benkler, “Coase’s Penguin, or, Linux and the Nature of the Firm,” /{Yale Law Journal}/ 112, no. 369 (2002), at http://www.benkler.org/CoasesPen guin.html. }~ The title is a puckish commentary on how GNU/Linux, whose mascot is a penguin, poses an empirical challenge to economist Ronald Coase’s celebrated “transaction cost” theory of the firm. In 1937, Coase stated that the economic rationale for forming a business enterprise is its ability to assert clear property rights and manage employees and production more efficiently than contracting out to the marketplace. +Free software was one of the earliest demonstrations of the power of online commons as a way to create value. In his classic 1997 essay “The Cathedral and the Bazaar,” hacker Eric S. Raymond provided a seminal analysis explaining how open networks make software development more cost-effective and innovative than software developed by a single firm.~{ Eric Raymond, “The Cathedral and the Bazaar,” May 1997, at http://www.catb.org/~esr/writings/cathedral-bazaar. The essay has been translated into nineteen languages to date. }~ A wide-open “bazaar” such as the global Linux community can construct a more versatile operating system than one designed by a closed “cathedral” such as Microsoft. “With enough eyes, all bugs are shallow,” Raymond famously declared. Yochai Benkler gave a more formal economic reckoning of the value proposition of open networks in his pioneering 2002 essay “Coase’s Penguin, or, Linux and the Nature of the Firm.”~{ Yochai Benkler, “Coase’s Penguin, or, Linux and the Nature of the Firm,” /{Yale Law Journal}/ 112, no. 369 (2002), at http://www.benkler.org/CoasesPenguin.html. }~ The title is a puckish commentary on how GNU/Linux, whose mascot is a penguin, poses an empirical challenge to economist Ronald Coase’s celebrated “transaction cost” theory of the firm. In 1937, Coase stated that the economic rationale for forming a business enterprise is its ability to assert clear property rights and manage employees and production more efficiently than contracting out to the marketplace. ={Benkler, Yochai:open networks, on+3;Raymond, Eric S.:“The Cathedral and the Bazaar”;free software:creation of value, and;Linux:open business models, and;Microsoft:competition against;Coase, Ronald;GNU/Linux:open business models, and;transaction costs:theory of;open business models:“transaction cost” theory, and} What is remarkable about peer production on open networks, said Benkler, is that it undercuts the economic rationale for the firm; commons-based peer production can perform certain tasks more efficiently than a corporation. Those tasks must be modular and divisible into small components and capable of being efficiently integrated, Benkler stipulated. The larger point is that value is created on open networks in very different ways than in conventional markets. Asserting proprietary control on network platforms may prevent huge numbers of people from giving your work (free) social visibility, contributing new value to it, or remixing it. “The only thing worse than being sampled on the Internet,” said Siva Vaidhyanathan, with apologies to Oscar Wilde, “is not being sampled on the Internet.” ={Vaidhyanathan, Siva} -The /{New York Times}/'s experience with its paid subscription service, TimesSelect, offers a great example. The /{Times}/ once charged about fifty dollars a year for online access to its premier columnists and news archives. Despite attracting more than 227,000 subscribers and generating about $10 million a year in revenue, the /{Times}/ discontinued the service in 2007.~{ Richard Pérez-Peña, “Times to Stop Charging for Parts of Its Web Site,” /{New York Times}/, September 18, 2007. }~ A /{Times}/ executive explained that lost subscription revenues would be more than offset by advertising to a much larger online readership with free access. The /{Financial Times}/ and the /{Economist}/ have dropped their paywalls, and the /{Wall Street Journal}/ in effect has done so by allowing free access via search engines and link sites. From some leading citadels of capitalism, a rough consensus had emerged: exclusivity can /{decrease}/ the value of online content.~{ Frank Ahrens, “Web Sites, Tear Down That Wall,” /{Washington Post}/, November 16, 2007, p. D1. See also Farhad Manjoo, “The Wall Street Journal’s Website Is Already (Secretly) Free,” /{Salon}/, March 21, 2008, at http://machinist.salon .com/blog/2008/03/21/wsj/index.html. }~ +The /{New York Times}/'s experience with its paid subscription service, TimesSelect, offers a great example. The /{Times}/ once charged about fifty dollars a year for online access to its premier columnists and news archives. Despite attracting more than 227,000 subscribers and generating about $10 million a year in revenue, the /{Times}/ discontinued the service in 2007.~{ Richard Pérez-Peña, “Times to Stop Charging for Parts of Its Web Site,” /{New York Times}/, September 18, 2007. }~ A /{Times}/ executive explained that lost subscription revenues would be more than offset by advertising to a much larger online readership with free access. The /{Financial Times}/ and the /{Economist}/ have dropped their paywalls, and the /{Wall Street Journal}/ in effect has done so by allowing free access via search engines and link sites. From some leading citadels of capitalism, a rough consensus had emerged: exclusivity can /{decrease}/ the value of online content.~{ Frank Ahrens, “Web Sites, Tear Down That Wall,” /{Washington Post}/, November 16, 2007, p. D1. See also Farhad Manjoo, “The Wall Street Journal’s Website Is Already (Secretly) Free,” /{Salon}/, March 21, 2008, at http://machinist.salon.com/blog/2008/03/21/wsj/index.html. }~ ={New York Times} -While enormous value can be created on open networks, it can take different forms, notes David P. Reed, who studies information architectures.~{ David P. Reed, “The Sneaky Exponential — Beyond Metcalfe’s Law to the Power of Community Building,” at http://www.reed.com/Papers/GFN/ reedslaw.html. }~ One of the most powerful types of network value is what Reed calls “Group-Forming Networks,” or GFNs — or what Benkler might call commons-based peer production and I would call, less precisely, the commons. Reed talks about “scale-driven value shifts” that occur as a network grows in size. Greater value is created as a network moves from a broadcast model (where “content is king”) to peer production (where transactions dominate) and finally, to a group-forming network or commons (where jointly constructed value is produced and shared). +While enormous value can be created on open networks, it can take different forms, notes David P. Reed, who studies information architectures.~{ David P. Reed, “The Sneaky Exponential — Beyond Metcalfe’s Law to the Power of Community Building,” at http://www.reed.com/Papers/GFN/reedslaw.html. }~ One of the most powerful types of network value is what Reed calls “Group-Forming Networks,” or GFNs — or what Benkler might call commons-based peer production and I would call, less precisely, the commons. Reed talks about “scale-driven value shifts” that occur as a network grows in size. Greater value is created as a network moves from a broadcast model (where “content is king”) to peer production (where transactions dominate) and finally, to a group-forming network or commons (where jointly constructed value is produced and shared). ={Reed, David P.;Benkler, Yochai:The Wealth of Networks;commons-based peer production+3;group-forming networks (GFNs)} It is unclear, as a theoretical matter, how to characterize the size and behavior of various “value networks” on the Web today. For simplicity’s stake — and because Web platforms are evolving so rapidly — I refer to two general value propositions, Web 2.0 and the commons. Web 2.0 is about creating new types of value through participation in distributed open networks; the commons is a subset of Web 2.0 that describes fairly distinct, self-governed communities that focus on their own interests, which usually do not involve moneymaking. @@ -2276,7 +2276,7 @@ Netscape was one of the first to demonstrate the power of this model with its re Today, sharing and openness are key to many business strategies. “Open Source: Now It’s an Ecosystem,” wrote /{BusinessWeek}/ in 2005, describing the “gold rush” of venture capital firms investing in startups with open-source products. Most of them planned to give away their software via the Web and charge for premium versions or for training, maintenance, and support.~{ “Open Source: Now It’s an Ecosystem,” BusinessWeek Online, October 3, 2005. }~ -The pioneers in using open platforms to develop commercial ecosystems on the Internet are Amazon, Google, Yahoo, and eBay. Each has devised systems that let third-party software developers and businesses extend their platform with new applications and business synergies. Each uses systems that dynamically leverage users’ social behaviors and so stimulate business — for example, customer recommendations about books, search algorithms that identify the most popular Web sites, and reputation systems that enhance consumer confidence in sellers. Even Microsoft, eager to expand the ecology of developers using its products, has released 150 of its source code distributions under three “Shared Source” licenses, two of which meet the Free Software Foundation’s definition of “free.”~{ Microsoft’s Shared Source Licenses, at http://www.microsoft.com/resources/ sharedsource/licensingbasics/sharedsourcelicenses.mspx; see also Lessig blog, “Microsoft Releases Under ShareAlike,” June 24, 2005, at http://lessig .org/blog/2005/06/microsoft_releases_under_share.html. }~ +The pioneers in using open platforms to develop commercial ecosystems on the Internet are Amazon, Google, Yahoo, and eBay. Each has devised systems that let third-party software developers and businesses extend their platform with new applications and business synergies. Each uses systems that dynamically leverage users’ social behaviors and so stimulate business — for example, customer recommendations about books, search algorithms that identify the most popular Web sites, and reputation systems that enhance consumer confidence in sellers. Even Microsoft, eager to expand the ecology of developers using its products, has released 150 of its source code distributions under three “Shared Source” licenses, two of which meet the Free Software Foundation’s definition of “free.”~{ Microsoft’s Shared Source Licenses, at http://www.microsoft.com/resources/sharedsource/licensingbasics/sharedsourcelicenses.mspx; see also Lessig blog, “Microsoft Releases Under ShareAlike,” June 24, 2005, at http://lessig.org/blog/2005/06/microsoft_releases_under_share.html. }~ ={Amazon;eBay;Microsoft:“Shared Source” licenses of;Yahoo;Google;World Wide Web:social activity on} More recently, Facebook has used its phenomenal reach — more than 80 million active users worldwide — as a platform for growing a diversified ecology of applications. The company allows software developers to create custom software programs that do such things as let users share reviews of favorite books, play Scrabble or poker with others online, or send virtual gifts to friends. Some apps are just for fun; others are the infrastructure for independent businesses that sell products and services or advertise. In September 2007, Facebook had more than two thousand software applications being used by at least one hundred people.~{ Vauhini Vara, “Facebook Gets Help from Its Friends,” Wall Street Journal, June 22, 2007. See also Riva Richmond, “Why So Many Want to Create Facebook Applications,” /{Wall Street Journal}/, September 4, 2007. }~ @@ -2307,10 +2307,10 @@ The rise of CC+ and associated companies brings to mind Niva Elkin-Koren’s war Revver is another company that has developed an ingenious way to promote the sharing of content, yet still monetize it based on the scale of its circulation. Revver is a Los Angeles–based startup that hosts user-generated video. All videos are embedded with a special tracking tag that displays an ad at the end. Like Google’s AdWords system, which charges advertisers for user “click-throughs” on ad links adjacent to Web content, Revver charges advertisers for every time a viewer clicks on an ad. The number of ad views can be tabulated, and Revver splits ad revenues 50-50 with video creators. Key to the whole business model is the use of the CC AttributionNonCommercial-No Derivatives license. The license allows the videos to be legally shared, but prohibits anyone from modifying them or using them for commercial purposes. ={Revver+2;Google;videos and film+2;Internet:videos and films on+2;World Wide Web:videos and film on+2} -One of the most-viewed videos on Revver sparked a minor pop trend. It showed kids dropping Mentos candies into bottles of CocaCola, which produces an explosive chemical reaction. The video is said to have generated around $30,000.~{ Revver entry at Wikipedia, at http://en.wikipedia.org/wiki/Revver. }~ So is new media going to feature silly cat videos and stupid stunts? Steven Starr, a co-founder of Revver, concedes the ubiquity of such videos, but is quick to point to “budding auteurs like Goodnight Burbank, Happy Slip, Studio8 and LoadingReadyRun, all building audiences.” He also notes that online, creators “can take incredible risks with format and genre, can grow their own audience at a fraction of network costs, can enjoy free syndication, hosting, audience-building and ad services at their disposal.”~{ Interview with Steven Starr, “Is Web TV a Threat to TV?” Wall Street Journal, August 7, 2007, at http://online.wsj.com/article/SB118530221391976425 .html. }~ +One of the most-viewed videos on Revver sparked a minor pop trend. It showed kids dropping Mentos candies into bottles of CocaCola, which produces an explosive chemical reaction. The video is said to have generated around $30,000.~{ Revver entry at Wikipedia, at http://en.wikipedia.org/wiki/Revver. }~ So is new media going to feature silly cat videos and stupid stunts? Steven Starr, a co-founder of Revver, concedes the ubiquity of such videos, but is quick to point to “budding auteurs like Goodnight Burbank, Happy Slip, Studio8 and LoadingReadyRun, all building audiences.” He also notes that online, creators “can take incredible risks with format and genre, can grow their own audience at a fraction of network costs, can enjoy free syndication, hosting, audience-building and ad services at their disposal.”~{ Interview with Steven Starr, “Is Web TV a Threat to TV?” Wall Street Journal, August 7, 2007, at http://online.wsj.com/article/SB118530221391976425.html. }~ ={Starr, Steven} -Blip.tv is another video content-sharing Web site that splits ad revenues with video creators (although it is not automatic; users must “opt in”). Unlike many videos on YouTube and Revver, blip.tv tends to feature more professional-quality productions and serialized episodes, in part because its founders grew out of the “videoblogging” community. Blip.tv espouses an open business ethic, with shout-outs to “democratization, openness, and sustainability.” While there is a tradition for companies to spout their high-minded principles, blip.tv puts some bite into this claim by offering an open platform that supports many video formats and open metadata standards. And it allows content to be downloaded and shared on other sites. Users can also apply Creative Commons licenses to their videos, which can then be identified by CC-friendly search engines. For all these reasons, Lessig has singled out blip.tv as a “true sharing site,” in contrast to YouTube, which he calls a “faking sharing site” that “gives you tools to /{make}/ it seem as if there’s sharing, but in fact, all the tools drive traffic and control back to a single site.”~{ Lessig blog post, “The Ethics of Web 2.0,” October 20, 2006, at http:// www.lessig.org/blog/archives/003570.shtml. }~ +Blip.tv is another video content-sharing Web site that splits ad revenues with video creators (although it is not automatic; users must “opt in”). Unlike many videos on YouTube and Revver, blip.tv tends to feature more professional-quality productions and serialized episodes, in part because its founders grew out of the “videoblogging” community. Blip.tv espouses an open business ethic, with shout-outs to “democratization, openness, and sustainability.” While there is a tradition for companies to spout their high-minded principles, blip.tv puts some bite into this claim by offering an open platform that supports many video formats and open metadata standards. And it allows content to be downloaded and shared on other sites. Users can also apply Creative Commons licenses to their videos, which can then be identified by CC-friendly search engines. For all these reasons, Lessig has singled out blip.tv as a “true sharing site,” in contrast to YouTube, which he calls a “faking sharing site” that “gives you tools to /{make}/ it seem as if there’s sharing, but in fact, all the tools drive traffic and control back to a single site.”~{ Lessig blog post, “The Ethics of Web 2.0,” October 20, 2006, at http://www.lessig.org/blog/archives/003570.shtml. }~ ={blip.tv+1;YouTube+1;Web 2.0:open business, and+3;open business models:open networks and;Lessig, Lawrence:open business sites, and+4} Lessig’s blog post on blip.tv provoked a heated response from blogger Nicholas Carr, a former executive editor of the /{Harvard Business Review}/. The contretemps is worth a close look because it illuminates the tensions between Web 2.0 as a business platform and Web 2.0 as a commons platform. In castigating YouTube as a “fake sharing site,” Carr accused Lessig of sounding like Chairman Mao trying to root out counterrevolutionary forces (that is, capitalism) with “the ideology of digital communalism.” @@ -2318,7 +2318,7 @@ Lessig’s blog post on blip.tv provoked a heated response from blogger Nicholas _1 Like Mao, Lessig and his comrades are not only on the wrong side of human nature and the wrong side of culture; they’re also on the wrong side of history. They fooled themselves into believing that Web 2.0 was introducing a new economic system — a system of “social production” — that would serve as the foundation of a democratic, utopian model of culture creation. They were wrong. Web 2.0’s economic system has turned out to be, in effect if not intent, a system of exploitation rather than a system of emancipation. By putting the means of production into the hands of the masses but withholding from those same masses any ownership over the product of their work, Web 2.0 provides an incredibly efficient mechanism to harvest the economic value of the free labor provided by the very, very many and concentrate it into the hands of the very, very few. -_1 The Cultural Revolution is over. It ended before it even began. The victors are the counterrevolutionaries. And they have $1.65 billion [a reference to the sale price of YouTube to Google] to prove it.~{ Nicholas G. Carr, “Web 2.0lier than Thou,” Rough Type blog, October 23, 2006. Joichi Ito has a thoughtful response in his blog, “Is YouTube Web 2.0?” October 22, 2006, at http://joi.ito.com/archives/2006/10/22/is_youtube _web_20.html; and Lessig responded to Carr in his blog, at http://lessig .org/blog/2006/10/stuck_in_the_20th_century_or_t.html. The “communism discourse” persists, and not just among critics of free culture. Lawrence Liang of CC India used this epigraph in a book on open-content licenses: “There is a specter haunting cultural production, the specter of open content licensing.” which he attributes to “Karl Marx (reworked for the digital era).” From Liang, /{Guide to Open Content Licenses}/ (Rotterdam, Netherlands: Piet Zwart Institute, Institute for Postgraduate Studies and Research, Willem de Kooning Academy Hogeschool, 2004). }~ +_1 The Cultural Revolution is over. It ended before it even began. The victors are the counterrevolutionaries. And they have $1.65 billion [a reference to the sale price of YouTube to Google] to prove it.~{ Nicholas G. Carr, “Web 2.0lier than Thou,” Rough Type blog, October 23, 2006. Joichi Ito has a thoughtful response in his blog, “Is YouTube Web 2.0?” October 22, 2006, at http://joi.ito.com/archives/2006/10/22/is_youtube_web_20.html; and Lessig responded to Carr in his blog, at http://lessig.org/blog/2006/10/stuck_in_the_20th_century_or_t.html. The “communism discourse” persists, and not just among critics of free culture. Lawrence Liang of CC India used this epigraph in a book on open-content licenses: “There is a specter haunting cultural production, the specter of open content licensing.” which he attributes to “Karl Marx (reworked for the digital era).” From Liang, /{Guide to Open Content Licenses}/ (Rotterdam, Netherlands: Piet Zwart Institute, Institute for Postgraduate Studies and Research, Willem de Kooning Academy Hogeschool, 2004). }~ Lessig’s response, a warm-up for a new book, /{Remix}/, released in late 2008, pointed out that there are really /{three}/ different economies on the Internet — commercial, sharing, and hybrid. The hybrid economy now emerging is difficult to understand, he suggested, because it “neither gives away everything, nor does it keep everything.” The challenge of open business models, Lessig argues, is to discover the “golden mean.” ={Lessig, Lawrence:Remix;Internet:hybrid economy enabled by+1|sharing economy of+1|commercial economy of+1} @@ -2336,7 +2336,7 @@ The Brazilian /{tecnobrega}/ music scene discussed briefly in chapter 7 is anoth Artists make most of their money from these live performances, not from CDs, said Lemos. Bands earn an average of $1,100 per solo performance at these events, and $700 when playing with other bands — this, in a region where the average monthly income is $350. Altogether, Lemos estimates that the sound system parties as a business sector earn $1.5 million per month, on fixed assets of $8 million. -“The band Calypso has been approached several times by traditional record labels,” said Lemos, “but they turned down all the offers. The reason is that they make more money by means of the existing business model. In an interview with the largest Brazilian newspaper, the singer of the band said, ‘We do not fight the pirates. We have become big /{because}/ of piracy, which has taken our music to cities where they would never have been.’ ” Calypso has sold more than 5 million albums in Brazil and is known for attracting as many as fifty thousand people to its concerts, Lemos said.~{ Ronaldo Lemos, “From Legal Commons to Social Commons: Developing Countries and the Cultural Industry in the 21st Century,” 2006, at http://www.icommons.org/resources/from-legal-commons-to-social-comm ons-brazil-and-the-cultural-industry-1. See Paula Martini post on iCommons blog, “Over the Top: The New (and Bigger) Cultural Industry in Brazil,” September 28, 2007, at http://www.icommons.org/articles/over-the-top-thenew-and-bigger-cultural-industry-in-brazil. }~ +“The band Calypso has been approached several times by traditional record labels,” said Lemos, “but they turned down all the offers. The reason is that they make more money by means of the existing business model. In an interview with the largest Brazilian newspaper, the singer of the band said, ‘We do not fight the pirates. We have become big /{because}/ of piracy, which has taken our music to cities where they would never have been.’ ” Calypso has sold more than 5 million albums in Brazil and is known for attracting as many as fifty thousand people to its concerts, Lemos said.~{ Ronaldo Lemos, “From Legal Commons to Social Commons: Developing Countries and the Cultural Industry in the 21st Century,” 2006, at http://www.icommons.org/resources/from-legal-commons-to-social-commons-brazil-and-the-cultural-industry-1. See Paula Martini post on iCommons blog, “Over the Top: The New (and Bigger) Cultural Industry in Brazil,” September 28, 2007, at http://www.icommons.org/articles/over-the-top-thenew-and-bigger-cultural-industry-in-brazil. }~ ={piracy} Another highly successful open business model in the Brazilian music scene is TramaVirtual, an open platform on which more than 15,000 musicians have uploaded some 35,000 albums. Fans can then download the music for free. While this does not sound like a promising business proposition, it makes a lot of sense in the context of Brazil’s music marketplace. Major record labels release a minuscule number of new Brazilian music CDs each year, and they sell for about $10 to $15.~{ Ibid. }~ Only the cultured elite can afford music CDs, and the native musical talent — which is plentiful in Brazil — has no place to go. With such a constricted marketplace, TramaVirtual has become hugely popular by showcasing new and interesting music. @@ -2363,7 +2363,7 @@ Virtually all the albums on Jamendo use one or more of the six basic CC licenses For businesses operating on open networks, it is a mistake to regard people merely as customers; they are collaborators and even coinvestors. As more companies learn to interact closely with their customers, it is only natural that conversations about the product or service become more intimate and collaborative. The roles of the “consumer” and “producer” are starting to blur, leading to what some business analysts call the “prosumer”~{ Don Tapscott and Anthony D. Williams, /{Wikinomics: How Mass Collaboration Changes Everything}/ (New York Portfolio, 2006), chapter 5, “The Prosumers.” }~ and the “decentralized co-creation of value.”~{ David Bollier, /{The Rise of Collective Intelligence: Decentralized Co-creation of Value as a New Paradigm of Commerce and Culture}/ (Washington, DC: Aspen Institute Communications and Society Program, 2008).}~ The basic idea is that online social communities are becoming staging areas for the advancement of business objectives. Businesses see these communities as cost-effective ways to identify promising innovations, commercialize them more rapidly, tap into more reliable market intelligence, and nurture customer goodwill. -Amateurs who share with one another through a loose social commons have always been a source of fresh ideas. Tech analyst Elliot Maxwell (citing Lessig) notes how volunteers helped compile the /{Oxford English Dictionary}/ by contributing examples of vernacular usage; how the Homebrew Computer Club in the San Francisco Bay area developed many elements of the first successful personal computer; and how sharing among auto enthusiasts helped generate many of the most important early automotive innovations.~{ Elliot Maxwell, “Open Standards, Open Source, and Open Innovation: Harnessing the Benefits of Openness,” /{Innovations:Technology, Governance, Globalization}/ 1, no. 3 (Summer 2006), at http://www.emaxwell.net, p. 150. }~ In our time, hackers were the ones who developed ingenious ways to use unlicensed electromagnetic spectrum as a commons, which we now know as Wi-Fi. They tinkered with the iPod to come up with podcasts, a new genre of broadcasting that commercial broadcasters now emulate.~{ Elliot E. Maxwell drew my attention to these examples in his excellent essay “Open Standards, Open Source, and Open Innovation.” }~ Numerous self-organized commons have incubated profitable businesses. Two movie buffs created the Internet Movie Database as separate Usenet newsgroups in 1989; six years later they had grown so large that they had merged and converted into a business that was later sold to Amazon.~{ Wikipedia entry, IMDB, at http://en.wikipedia.org/wiki/Internet_Movie _Database. }~ The Compact Disc Database was a free database of software applications that looks up information about audio CDs via the Internet. It was originally developed by a community of music fans as a shared database, but in 2000 it had grown big enough that it was sold and renamed Gracenote.~{ Wikipedia entry, CDDB, at http://en.wikipedia.org/wiki/CDDB. }~ +Amateurs who share with one another through a loose social commons have always been a source of fresh ideas. Tech analyst Elliot Maxwell (citing Lessig) notes how volunteers helped compile the /{Oxford English Dictionary}/ by contributing examples of vernacular usage; how the Homebrew Computer Club in the San Francisco Bay area developed many elements of the first successful personal computer; and how sharing among auto enthusiasts helped generate many of the most important early automotive innovations.~{ Elliot Maxwell, “Open Standards, Open Source, and Open Innovation: Harnessing the Benefits of Openness,” /{Innovations:Technology, Governance, Globalization}/ 1, no. 3 (Summer 2006), at http://www.emaxwell.net, p. 150. }~ In our time, hackers were the ones who developed ingenious ways to use unlicensed electromagnetic spectrum as a commons, which we now know as Wi-Fi. They tinkered with the iPod to come up with podcasts, a new genre of broadcasting that commercial broadcasters now emulate.~{ Elliot E. Maxwell drew my attention to these examples in his excellent essay “Open Standards, Open Source, and Open Innovation.” }~ Numerous self-organized commons have incubated profitable businesses. Two movie buffs created the Internet Movie Database as separate Usenet newsgroups in 1989; six years later they had grown so large that they had merged and converted into a business that was later sold to Amazon.~{ Wikipedia entry, IMDB, at http://en.wikipedia.org/wiki/Internet_Movie_Database. }~ The Compact Disc Database was a free database of software applications that looks up information about audio CDs via the Internet. It was originally developed by a community of music fans as a shared database, but in 2000 it had grown big enough that it was sold and renamed Gracenote.~{ Wikipedia entry, CDDB, at http://en.wikipedia.org/wiki/CDDB. }~ ={Amazon;Gracenote;Homebrew Computer Club;iPod;Maxwell, Elliot;Oxford English Dictionary;Wi-Fi;hackers:community of;commons:sources of new ideas, as+11} A commons can be highly generative because its participants are tinkering and innovating for their own sake — for fun, to meet a challenge, to help someone out. Amateurs are not constrained by conventional business ideas about what may be marketable and profitable. They do not have to meet the investment expectations of venture capitalists and Wall Street. Yet once promising new ideas do surface in the commons, market players can play a useful role in supplying capital and management expertise to develop, improve, and commercialize an invention. @@ -2383,7 +2383,7 @@ Lego decided to write a “right to hack” provision into the Mindstorms softwa Another improbable success in distributed, user-driven innovation is Threadless, a Chicago-based t-shirt company. Threadless sells hundreds of original t-shirt designs, each of which is selected by the user community from among more than eight hundred designs submitted every week. The proposed designs are rated on a scale of one to five by the Web site’s more than 600,000 active users. Winners receive cash awards, recognition on the Web site, and their names on the t-shirt label. Every week, Threadless offers six to ten new t-shirts featuring the winning designs. ={Threadless+1} -In 2006, the company sold more than 1.5 million t-shirts without any traditional kind of marketing. Its business model is so rooted in the user community that Threadless co-founders Jake Nickell and Jacob DeHart have declined offers to sell their t-shirts through conventional, big-name retailers. Threadless’s business model has helped it overcome two major challenges in the apparel industry, write Harvard Business School professor Karim R. Lakhani and consultant Jill A. Panetta — the ability “to attract the right design talent at the right time to create recurring fashion hits,” and the ability “to forecast sales so as to be better able to match production cycles with demand cycles.”~{ Karim R. Lakhani and Jill A. Panetta, “The Principles of Distributed Innovation,” Research Publication No. 2007-7, Berkman Center for Internet & Society, Harvard Law School, October 2007, at http://papers.ssrn.com/abstract _id=1021034. See also Darren Dahl, “Nice Threads,” /{Southwest Airlines Spirit}/, December 2006. }~ +In 2006, the company sold more than 1.5 million t-shirts without any traditional kind of marketing. Its business model is so rooted in the user community that Threadless co-founders Jake Nickell and Jacob DeHart have declined offers to sell their t-shirts through conventional, big-name retailers. Threadless’s business model has helped it overcome two major challenges in the apparel industry, write Harvard Business School professor Karim R. Lakhani and consultant Jill A. Panetta — the ability “to attract the right design talent at the right time to create recurring fashion hits,” and the ability “to forecast sales so as to be better able to match production cycles with demand cycles.”~{ Karim R. Lakhani and Jill A. Panetta, “The Principles of Distributed Innovation,” Research Publication No. 2007-7, Berkman Center for Internet & Society, Harvard Law School, October 2007, at http://papers.ssrn.com/abstract_id=1021034. See also Darren Dahl, “Nice Threads,” /{Southwest Airlines Spirit}/, December 2006. }~ ={DeHart, Jacob;Nickell, Jake;Lakhani, Karim R.;Panetta, Jill A.} A number of companies have started successful enterprises based on the use of wikis, the open Web platforms that allow anyone to contribute and edit content and collaborate. Evan Prodromou, the founder of Wikitravel, a free set of worldwide travel guides, has identified four major types of wiki businesses: service providers who sell access to wikis (Wikispace, wetpaint, PBwiki); content hosters of wikis (wikiHow, Wikitravel, Wikia); consultants who advise companies how to run their own wikis (Socialtext); and content developers (WikiBiz, an offshoot of Wikipedia). @@ -2412,7 +2412,7 @@ What has changed in recent years is our perceptions. The actual role of the comm /{Web 2.0 tools, open access, and CC licenses are helping to accelerate scientific discovery.}/ -It was one of those embarrassing episodes in science: Two sets of researchers published papers in a German organic chemistry journal, /{Angewandte Chemie}/, announcing that they had synthesized a strange new substance with “12-membered rings.” Then, as blogger and chemist Derek Lowe tells the story, “Professor Manfred Cristl of Wurzburg, who apparently knows his pyridinium chemistry pretty well, recognized this as an old way to make further pyridinium salts, not funky twelve-membered rings. He recounts how over the last couple of months he exchanged awkward emails with the two sets of authors, pointing out that they seem to have rediscovered a 100-year-old reaction. . . .”~{ Derek Lowe, “Neat! Wish It Were True!” /{In the Pipeline}/ [blog], November 29, 2007, at http://pipeline.corante.com. See also, Donna Wentworth, “Why We Need to Figure Out What We Already Know,” Science Commons blog, January 4, 2008, at http://sciencecommons.org/weblog/archives/2008/01/04/ why-we-need-to-figure-out-what-we-already-know. }~ +It was one of those embarrassing episodes in science: Two sets of researchers published papers in a German organic chemistry journal, /{Angewandte Chemie}/, announcing that they had synthesized a strange new substance with “12-membered rings.” Then, as blogger and chemist Derek Lowe tells the story, “Professor Manfred Cristl of Wurzburg, who apparently knows his pyridinium chemistry pretty well, recognized this as an old way to make further pyridinium salts, not funky twelve-membered rings. He recounts how over the last couple of months he exchanged awkward emails with the two sets of authors, pointing out that they seem to have rediscovered a 100-year-old reaction. . . .”~{ Derek Lowe, “Neat! Wish It Were True!” /{In the Pipeline}/ [blog], November 29, 2007, at http://pipeline.corante.com. See also, Donna Wentworth, “Why We Need to Figure Out What We Already Know,” Science Commons blog, January 4, 2008, at http://sciencecommons.org/weblog/archives/2008/01/04/why-we-need-to-figure-out-what-we-already-know. }~ ={Lowe, Derek} In the Internet age, people generally assume that these kinds of things can’t happen. All you have to do is run a Web search for “pyridinium,” right? But as scientists in every field are discovering, the existence of some shard of highly specialized knowledge does not necessarily mean that it can be located or understood. After all, a Google search for “pyridinium” turns up 393,000 results. And even peer reviewers for journals (who may have been partly at fault in this instance) have the same problem as any researcher: the unfathomable vastness of the scientific and technical literature makes it difficult to know what humankind has already discovered. @@ -2438,12 +2438,12 @@ Perhaps the most salient example of the power of open science was the Human Geno A 2008 report by the Committee for Economic Development identified a number of other notable open research projects.~{ Committee for Economic Development, /{Harnessing Openness to Transform American Health Care}/ (Washington, DC: CED, 2008). }~ There is the PubChem database, which amasses data on chemical genomics from a network of researchers; the Cancer Biomedical Informatics Grid, a network of several dozen cancer research centers and other organizations that shares data, research tools, and software applications; and TDR Targets a Web clearinghouse sponsored by the World Health Organization that lets researchers share genetic data on neglected diseases such as malaria and sleeping sickness. It is telling that Bill Gates, who in his commercial life is a staunch advocate of proprietary control of information, has been a leader, through his Bill & Melinda Gates Foundation, in requiring research grantees to share their data. ={Gates, Bill} -There has even been the emergence of open-source biotechnology, which is applying the principles of free software development to agricultural biotech and pharmaceutical development.~{ See, e.g., Rockefeller Foundation, “2005 Bellagio Meeting on Open Source Models of Collaborative Innovation in the Life Sciences” [report], Bellagio, Italy, September 2005. See also Janet Elizabeth Hope, “Open Source Biotechnology,” Ph.D. diss., Australian National University, December 2004. }~ Richard Jefferson, the founder of Cambia, a nonprofit research institute in Australia, launched the “kernel” of what he calls the first opensource biotech toolkit. It includes patented technologies such as TransBacter, which is a method for transferring genes to plants, and GUSPlus, which is a tool for visualizing genes and understanding their functions.~{ Interview with Richard Jefferson, September 7, 2006. See also http://www .cambia.org. }~ By licensing these patented research tools for open use, Jefferson hopes to enable researchers anywhere in the world— not just at large biotech companies or universities — to develop their own crop improvement technologies. +There has even been the emergence of open-source biotechnology, which is applying the principles of free software development to agricultural biotech and pharmaceutical development.~{ See, e.g., Rockefeller Foundation, “2005 Bellagio Meeting on Open Source Models of Collaborative Innovation in the Life Sciences” [report], Bellagio, Italy, September 2005. See also Janet Elizabeth Hope, “Open Source Biotechnology,” Ph.D. diss., Australian National University, December 2004. }~ Richard Jefferson, the founder of Cambia, a nonprofit research institute in Australia, launched the “kernel” of what he calls the first opensource biotech toolkit. It includes patented technologies such as TransBacter, which is a method for transferring genes to plants, and GUSPlus, which is a tool for visualizing genes and understanding their functions.~{ Interview with Richard Jefferson, September 7, 2006. See also http://www.cambia.org. }~ By licensing these patented research tools for open use, Jefferson hopes to enable researchers anywhere in the world— not just at large biotech companies or universities — to develop their own crop improvement technologies. ={Jefferson, Richard} 2~ The Viral Spiral in Science -Sociologist Robert Merton is often credited with identifying the social values and norms that make science such a creative, productive enterprise. In a notable 1942 essay, Merton described scientific knowledge as “common property” that depends critically upon an open, ethical, peer-driven process.~{ Robert Merton, “Science and Democratic Social Structure,” in /{Social Theory and Social Structure}/, 3d ed. (New York: Free Press, 1968), pp. 604–15. }~ Science is an engine of discovery precisely because research is available for all to see and replicate. It has historically tried to keep some distance from the marketplace for fear that corporate copyrights, patents, or contractual agreements will lock up knowledge that should be available to everyone, especially future scientists.~{ Richard R. Nelson, “The Market Economy and the Scientific Commons,” /{Research Policy}/ 33, no. 3 (April 2004), pp. 455–71. See also Karim R. Lakhani et al., “The Value of Openness in Scientific Problem Solving,” Harvard Business School Working Paper 07-050, January 2007, at http://www.hbs.edu/ research/pdf/07-050.pdf. }~ Secrecy can also make it difficult for the scientific community to verify research results. +Sociologist Robert Merton is often credited with identifying the social values and norms that make science such a creative, productive enterprise. In a notable 1942 essay, Merton described scientific knowledge as “common property” that depends critically upon an open, ethical, peer-driven process.~{ Robert Merton, “Science and Democratic Social Structure,” in /{Social Theory and Social Structure}/, 3d ed. (New York: Free Press, 1968), pp. 604–15. }~ Science is an engine of discovery precisely because research is available for all to see and replicate. It has historically tried to keep some distance from the marketplace for fear that corporate copyrights, patents, or contractual agreements will lock up knowledge that should be available to everyone, especially future scientists.~{ Richard R. Nelson, “The Market Economy and the Scientific Commons,” /{Research Policy}/ 33, no. 3 (April 2004), pp. 455–71. See also Karim R. Lakhani et al., “The Value of Openness in Scientific Problem Solving,” Harvard Business School Working Paper 07-050, January 2007, at http://www.hbs.edu/research/pdf/07-050.pdf. }~ Secrecy can also make it difficult for the scientific community to verify research results. ={Merton, Robert;science:cientific knowledge+2} Although scientific knowledge eventually becomes publicly available, it usually flows in semi-restricted ways, at least initially, because scientists usually like to claim personal credit for their discoveries. They may refuse to share their latest research lest a rival team of scientists gain a competitive advantage. They may wish to claim patent rights in their discoveries. @@ -2451,13 +2451,13 @@ Although scientific knowledge eventually becomes publicly available, it usually So scientific knowledge is not born into the public sphere, but there is a strong presumption that it ought to be treated as a shared resource as quickly as possible. As law scholar Robert Merges noted in 1996, “Science is not so much given freely to the public as shared under a largely implicit code of conduct among a more or less well identified circle of similarly situated scientists. In other words . . . science is more like a limited-access commons than a truly open public domain.”~{ Robert Merges, “Property Rights Theory and the Commons: The Case of Scientific Research,” /{Social Philosophy and Policy}/ 13, no. 2 (Summer 1996), pp. 145–61. }~ In certain disciplines, especially those involving large capital equipment such as telescopes and particle accelerators, the sharing of research is regarded as a kind of membership rule for belonging to a club. ={Merges, Robert} -As Web 2.0 innovations have demonstrated the power of the Great Value Shift, the convergence of open source, open access, and open science has steadily gained momentum.~{ John Willinsky, “The Unacknowledged Convergence of Open Source, Open Access and Open Science,” /{First Monday}/ 10, no. 8 (August 2005), at http:// firstmonday.org/issues/issue10_8/willinsky/index.html. }~ Creative Commons was mindful of this convergence from its beginnings, but it faced formidable practical challenges in doing anything about it. “From the very first meetings of Creative Commons,” recalled law professor James Boyle, a CC board member, “we thought that science could be the killer app. We thought that science could be the place where Creative Commons could really make a difference, save lives, and have a dramatic impact on the world. There is massive, unnecessary friction in science and we think we can deal with it. Plus, there’s the Mertonian ideal of science, with which Creative Commons couldn’t fit more perfectly.”~{ Interview with James Boyle, August 15, 2006. }~ +As Web 2.0 innovations have demonstrated the power of the Great Value Shift, the convergence of open source, open access, and open science has steadily gained momentum.~{ John Willinsky, “The Unacknowledged Convergence of Open Source, Open Access and Open Science,” /{First Monday}/ 10, no. 8 (August 2005), at http://firstmonday.org/issues/issue10_8/willinsky/index.html. }~ Creative Commons was mindful of this convergence from its beginnings, but it faced formidable practical challenges in doing anything about it. “From the very first meetings of Creative Commons,” recalled law professor James Boyle, a CC board member, “we thought that science could be the killer app. We thought that science could be the place where Creative Commons could really make a difference, save lives, and have a dramatic impact on the world. There is massive, unnecessary friction in science and we think we can deal with it. Plus, there’s the Mertonian ideal of science, with which Creative Commons couldn’t fit more perfectly.”~{ Interview with James Boyle, August 15, 2006. }~ ={Merton, Robert;Boyle, James:Science Commons, and+1;Great Value Shift;Web 2.0:Great Value Shift, and} But despite its early interest in making the Web more research-friendly, Creative Commons realized that science is a special culture unto itself, one that has so many major players and niche variations that it would be foolhardy for an upstart nonprofit to try to engage with it. So in 2002 Creative Commons shelved its ambitions to grapple with science as a commons, and focused instead on artistic and cultural sectors. By January 2005, however, the success of the CC licenses emboldened the organization to revisit its initial idea. As a result of deep personal engagement by several Creative Commons board members — computer scientist Hal Abelson, law professors James Boyle and Michael Carroll, and film producer Eric Saltzman — Creative Commons decided to launch a spin-off project, Science Commons. The new initiative would work closely with scientific disciplines and organizations to try to build what it now calls “the Research Web.” ={Abelson, Hall:CC board, on;Carroll, Michael W.;Saltzman, Eric;Science Commons:CC Commons spinoff, and+5} -Science Commons aims to redesign the “information space” — the technologies, legal rules, institutional practices, and social norms — so that researchers can more easily share their articles, datasets, and other resources. The idea is to reimagine and reinvent the “cognitive infrastructures” that are so critical to scientific inquiry. Dismayed by the pressures exerted by commercial journal publishers, open-access publishing advocate Jean-Claude Guédon has called on librarians to become “epistemological engineers.”~{ Jean-Claude Guédon, “In Oldenburg’s Long Shadow: Librarians, Research Scientists, Publishers and the Control of Scientific Publishing,” at http:// www.arl.org/resources/pubs/mmproceedings/138guedon.shtml. }~ They need to design better systems (technical, institutional, legal, and social) for identifying, organizing, and using knowledge. The payoff? Speedier research and greater scientific discovery and innovation. It turns out that every scientific discipline has its own special set of impediments to address. The recurring problem is massive, unnecessary transaction costs. There is an enormous waste of time, expense, bureaucracy, and logistics in acquiring journal articles, datasets, presentations, and physical specimens. +Science Commons aims to redesign the “information space” — the technologies, legal rules, institutional practices, and social norms — so that researchers can more easily share their articles, datasets, and other resources. The idea is to reimagine and reinvent the “cognitive infrastructures” that are so critical to scientific inquiry. Dismayed by the pressures exerted by commercial journal publishers, open-access publishing advocate Jean-Claude Guédon has called on librarians to become “epistemological engineers.”~{ Jean-Claude Guédon, “In Oldenburg’s Long Shadow: Librarians, Research Scientists, Publishers and the Control of Scientific Publishing,” at http://www.arl.org/resources/pubs/mmproceedings/138guedon.shtml. }~ They need to design better systems (technical, institutional, legal, and social) for identifying, organizing, and using knowledge. The payoff? Speedier research and greater scientific discovery and innovation. It turns out that every scientific discipline has its own special set of impediments to address. The recurring problem is massive, unnecessary transaction costs. There is an enormous waste of time, expense, bureaucracy, and logistics in acquiring journal articles, datasets, presentations, and physical specimens. ={Science Commons:libraries, and+5;science:transaction costs in+1;transaction costs:in science+1;libraries:Science Commons, and} If transaction costs could be overcome, scientists could vastly accelerate their research cycles. They could seek answers in unfamiliar bodies of research literature. They could avoid duplicating other people’s flawed research strategies. They could formulate more imaginative hypotheses and test them more rapidly. They could benefit from a broader, more robust conversation (as in free software — “with enough eyes, all bugs are shallow”) and use computer networks to augment and accelerate the entire scientific process. @@ -2505,7 +2505,7 @@ Not surprisingly, many commercial publishers regard OA publishing as a disruptiv It remains an open question whether the OA business model will work in fields where little research is directly funded (and thus upfront payments are not easily made). As Suber reports, “There are hundreds of OA journals in the humanities, but very, very few of them charge a fee on the author’s side; most of them have institutional subsidies from a university say, or a learned society.”~{ Interview with Peter Suber, June 28, 2006. }~ Yet such subsidies, in the overall scheme of things, may be more attractive to universities or learned societies than paying high subscription fees for journals or online access. ={Suber, Peter+1} -The tension between commercial publishers and academic authors has intensified over the past decade, fueling interest in OA alternatives. The most salient point of tension is the so-called “serials crisis.” From 1986 to 2006, libraries that belong to the Association of Research Libraries saw the cost of serial journals rise 321 percent, or about 7.5 percent a year for twenty consecutive years.~{ Association of Research Libraries, /{ARL Statistics}/ 2005–06, at http://www.arl .org/stats/annualsurveys/ar/stats/arlstats06.shtml. }~ This rate is four times higher than the inflation rate for those years. Some commercial journal publishers reap profits of nearly 40 percent a year.~{ Peter Suber, “Creating an Intellectual Commons through Open Access,” in Charlotte Hess and Elinor Ostrom, eds., /{Understanding Knowledge as a Commons: From Theory to Practice}/ (Cambridge, MA: MIT Press, 2007), p. 175. }~ By 2000 subscription rates were so crushing that the Association of American Universities and the Association of Research Libraries issued a joint statement that warned, “The current system of scholarly publishing has become too costly for the academic community to sustain.”~{ Association of Research Libraries, “Tempe Principles for Emerging Systems of Scholarly Publishing,” May 10, 2000, at http://www.arl.org/resources/pubs/ tempe/index.shtml. }~ Three years later, the high price of journals prompted Harvard, the University of California, Cornell, MIT, Duke, and other elite research universities to cancel hundreds of journal subscriptions — a conspicuous act of rebellion by the library community. +The tension between commercial publishers and academic authors has intensified over the past decade, fueling interest in OA alternatives. The most salient point of tension is the so-called “serials crisis.” From 1986 to 2006, libraries that belong to the Association of Research Libraries saw the cost of serial journals rise 321 percent, or about 7.5 percent a year for twenty consecutive years.~{ Association of Research Libraries, /{ARL Statistics}/ 2005–06, at http://www.arl.org/stats/annualsurveys/ar/stats/arlstats06.shtml. }~ This rate is four times higher than the inflation rate for those years. Some commercial journal publishers reap profits of nearly 40 percent a year.~{ Peter Suber, “Creating an Intellectual Commons through Open Access,” in Charlotte Hess and Elinor Ostrom, eds., /{Understanding Knowledge as a Commons: From Theory to Practice}/ (Cambridge, MA: MIT Press, 2007), p. 175. }~ By 2000 subscription rates were so crushing that the Association of American Universities and the Association of Research Libraries issued a joint statement that warned, “The current system of scholarly publishing has become too costly for the academic community to sustain.”~{ Association of Research Libraries, “Tempe Principles for Emerging Systems of Scholarly Publishing,” May 10, 2000, at http://www.arl.org/resources/pubs/tempe/index.shtml. }~ Three years later, the high price of journals prompted Harvard, the University of California, Cornell, MIT, Duke, and other elite research universities to cancel hundreds of journal subscriptions — a conspicuous act of rebellion by the library community. ={libraries:“serials crisis”, and|Science Commons, and;Science Commons:libraries, and} As journal prices have risen, the appeal of OA publishing has only intensified. Unfortunately, migrating to OA journals is not simply an economic issue. Within academia, the reputation of a journal is deeply entwined with promotion and tenure decisions. A scientist who publishes an article in /{Cell}/ or /{Nature}/ earns far more prestige than she might for publishing in a little-known OA journal. @@ -2516,14 +2516,14 @@ So while publishing in OA journals may be economically attractive, it flouts the One of the first major salvos of the movement came in 2000, when biomedical scientists Harold E. Varmus, Patrick O. Brown, and Michael B. Eisen called on scientific publishers to make their literature available through free online public archives such as the U.S. National Library of Medicine’s PubMed Central. Despite garnering support from nearly 34,000 scientists in 180 countries, the measure did not stimulate the change sought. It did alert the scientific world, governments, and publishers about the virtues of OA publishing, however, and galvanized scientists to explore next steps. ={Brown, Patrick O.;Varmus, Harold E.} -At the time, a number of free, online peer-reviewed journals and free online archives were under way.~{ http://www.earlham.edu/~peters/fos/timeline.htm. }~ But much of the momentum for organized OA movement began in 2001, when the Open Society Institute convened a group of leading librarians, scientists, and other academics in Hungary. In February 2002 the group released the Budapest Open Access Initiative, a statement that formally describes “open access” as the freedom of users to “read, download, copy, distribute, print, search or link to the full texts of . . . articles, crawl them for indexing, pass them as data to software, or use them for any other lawful purpose, without financial, legal or technical barriers other than those inseparable from gaining access to the Internet itself.”~{ The Budapest Open Access Initiative can be found at http://www.soros.org/ openaccess. }~ Two subsequent statements, the Bethesda Declaration and the Berlin Declaration, in June 2003 and October 2003, respectively, expanded upon the definitions of open access and gave the idea new prominence. (Suber calls the three documents the “BBB definition” of open access.)~{ http://www.earlham.edu/~peters/fos/overview.htm. }~ +At the time, a number of free, online peer-reviewed journals and free online archives were under way.~{ http://www.earlham.edu/~peters/fos/timeline.htm. }~ But much of the momentum for organized OA movement began in 2001, when the Open Society Institute convened a group of leading librarians, scientists, and other academics in Hungary. In February 2002 the group released the Budapest Open Access Initiative, a statement that formally describes “open access” as the freedom of users to “read, download, copy, distribute, print, search or link to the full texts of . . . articles, crawl them for indexing, pass them as data to software, or use them for any other lawful purpose, without financial, legal or technical barriers other than those inseparable from gaining access to the Internet itself.”~{ The Budapest Open Access Initiative can be found at http://www.soros.org/openaccess. }~ Two subsequent statements, the Bethesda Declaration and the Berlin Declaration, in June 2003 and October 2003, respectively, expanded upon the definitions of open access and gave the idea new prominence. (Suber calls the three documents the “BBB definition” of open access.)~{ http://www.earlham.edu/~peters/fos/overview.htm. }~ ={Suber, Peter;Budapest Open Access Initiative (2002);libraries:open access movement, and} Creative Commons licenses have been critical tools in the evolution of OA publishing because they enable scientists and scholars to authorize in advance the sharing, copying, and reuse of their work, compatible with the BBB definition. The Attribution (BY) and Attribution-Non-Commercial (BY-NC) licenses are frequently used; many OA advocates regard the Attribution license as the preferred choice. The protocols for “metadata harvesting” issued by the Open Archives Initiative are another useful set of tools in OA publishing. When adopted by an OA journal, these standardized protocols help users more easily find research materials without knowing in advance which archives they reside in, or what they contain. There is no question that OA is transforming the market for scholarly publishing, especially as pioneering models develop. The Public Library of Science announced its first two open-access journals in December 2002. The journals represented a bold, high-profile challenge by highly respected scientists to the subscription-based model that has long dominated scientific publishing. Although Elsevier and other publishers scoffed at the economic model, the project has expanded and now publishes seven OA journals, for biology, computational biology, genetics, pathogens, and neglected tropical diseases, among others. -OA received another big boost in 2004 when the National Institutes for Health proposed that all NIH-funded research be made available for free one year after its publication in a commercial journal. The $28 billion that the NIH spends on research each year (more than the domestic budget of 142 nations!) results in about 65,000 peer-reviewed articles, or 178 every day. Unfortunately, commercial journal publishers succeeded in making the proposed OA policy voluntary. The battle continued in Congress, but it became clear that the voluntary approach was not working. Only 4 percent of researchers published their work under OA standards, largely because busy, working scientists did not consider it a priority and their publishers were not especially eager to help. So Congress in December 2007 required NIH to mandate open access for its research within a year of publication.~{ Peter Suber has an excellent account of the final OA legislation in /{SPARC Open Access Newsletter}/, no. 17, January 2, 2008, at http://www.earlham.edu/ ~peters/fos/newsletter/01-02-08.htm. }~ +OA received another big boost in 2004 when the National Institutes for Health proposed that all NIH-funded research be made available for free one year after its publication in a commercial journal. The $28 billion that the NIH spends on research each year (more than the domestic budget of 142 nations!) results in about 65,000 peer-reviewed articles, or 178 every day. Unfortunately, commercial journal publishers succeeded in making the proposed OA policy voluntary. The battle continued in Congress, but it became clear that the voluntary approach was not working. Only 4 percent of researchers published their work under OA standards, largely because busy, working scientists did not consider it a priority and their publishers were not especially eager to help. So Congress in December 2007 required NIH to mandate open access for its research within a year of publication.~{ Peter Suber has an excellent account of the final OA legislation in /{SPARC Open Access Newsletter}/, no. 17, January 2, 2008, at http://www.earlham.edu/~peters/fos/newsletter/01-02-08.htm. }~ ={National Institutes for Health (NIH)} What may sound like an arcane policy battle in fact has serious implications for ordinary Americans. The breast cancer patient seeking the best peer-reviewed articles online, or the family of a person with Huntington’s disease, can clearly benefit if they can acquire, for free, the latest medical research. Scientists, journalists, health-care workers, physicians, patients, and many others cannot access the vast literature of publicly funded scientific knowledge because of high subscription rates or per-article fees. A freely available body of online literature is the best, most efficient way to help science generate more reliable answers, new discoveries, and commercial innovations. @@ -2587,7 +2587,7 @@ It is still too early to judge how well the CC0 program is working, but initial 2~ The Neurocommons -Every day there is so much new scientific literature generated that it would take a single person 106 years to read it all.~{ Brian Athey, University of Michigan, presentation at Commons of Science conference, National Academy of Science, Washington, DC, October 3, 2006. }~ In a single year, over twenty-four thousand peer-reviewed journals publish about 2.5 million research articles.~{ Stevan Harnad, “Maximizing Research Impact Through Institutional and National Open-Access Self-Archiving Mandates,” /{Electronics & Computer Science E-Prints Repository}/, May 2006, available at http://eprints.ecs.soron.ac.uk/ 12093/02/harnad-crisrey.pdf. }~ Our ability to generate content has far outstripped our ability to comprehend it. We are suffering from a cognitive overload — one that can only be addressed by using software and computer networks in innovative ways to organize, search, and access information. For many years, Sir Tim Berners-Lee, the celebrated inventor of the World Wide Web, and his colleagues at the World Wide Web Consortium (W3C), based at MIT, have been trying to solve the problem of information overload by developing a “new layer” of code for the Web. +Every day there is so much new scientific literature generated that it would take a single person 106 years to read it all.~{ Brian Athey, University of Michigan, presentation at Commons of Science conference, National Academy of Science, Washington, DC, October 3, 2006. }~ In a single year, over twenty-four thousand peer-reviewed journals publish about 2.5 million research articles.~{ Stevan Harnad, “Maximizing Research Impact Through Institutional and National Open-Access Self-Archiving Mandates,” /{Electronics & Computer Science E-Prints Repository}/, May 2006, available at http://eprints.ecs.soron.ac.uk/12093/02/harnad-crisrey.pdf. }~ Our ability to generate content has far outstripped our ability to comprehend it. We are suffering from a cognitive overload — one that can only be addressed by using software and computer networks in innovative ways to organize, search, and access information. For many years, Sir Tim Berners-Lee, the celebrated inventor of the World Wide Web, and his colleagues at the World Wide Web Consortium (W3C), based at MIT, have been trying to solve the problem of information overload by developing a “new layer” of code for the Web. ={Berners-Lee, Tim;World Wide Web Consortium (W3C)} This visionary project, the so-called Semantic Web, aspires to develop a framework for integrating a variety of systems, so they can communicate with one another, machine to machine. The goal is to enable computers to identify and capture information from anywhere on the Web, and then organize the results in sophisticated and customized ways. “If you search for ‘signal transduction genes in parameter neurons,’ ” said John Wilbanks of Science Commons, “Google sucks. It will get you 190,000 Web pages.” The goal of the Semantic Web is to deliver a far more targeted and useful body of specialized information. @@ -2623,7 +2623,7 @@ The problem with a field like neuroscience, which has so many exploding frontier Science is not just about text and data, of course. It also involves lots of tangible /{stuff}/ needed to conduct experiments. Typical materials include cell lines, monoclonal antibodies, reagents, animal models, synthetic materials, nano-materials, clones, laboratory equipment, and much else. Here, too, sharing and collaboration are important to the advance of science. But unlike digital bits, which are highly malleable, the physical materials needed for experiments have to be located, approved for use, and shipped. Therein lies another tale of high transaction costs impeding the progress of science. As Thinh Nguyen, counsel for Science Commons, describes the problem: ={Nguyen, Thinh+1} -_1 The ability to locate materials based on their descriptions in journal articles is often limited by lack of sufficient information about origin and availability, and there is no standard citation for such materials. In addition, the process of legal negotiation that may follow can be lengthy and unpredictable. This can have important implications for science policy, especially when delays or inability to obtain research materials result in lost time, productivity and research opportunities.~{ Thinh Nguyen, “Science Commons: Material Transfer Agreement Project,” /{Innovations}/, Summer 2007, pp. 137–43, at http://www.mitpressjournals.org/ doi/pdf/10.1162/itgg.2007.2.3.137. }~ +_1 The ability to locate materials based on their descriptions in journal articles is often limited by lack of sufficient information about origin and availability, and there is no standard citation for such materials. In addition, the process of legal negotiation that may follow can be lengthy and unpredictable. This can have important implications for science policy, especially when delays or inability to obtain research materials result in lost time, productivity and research opportunities.~{ Thinh Nguyen, “Science Commons: Material Transfer Agreement Project,” /{Innovations}/, Summer 2007, pp. 137–43, at http://www.mitpressjournals.org/doi/pdf/10.1162/itgg.2007.2.3.137. }~ To the nonscientist, this transactional subculture is largely invisible. But to scientists whose lab work requires access to certain physical materials, the uncertainties, variations, and delays can be crippling. Normally, the transfer of materials from one scientist to another occurs through a Material Transfer Agreement, or MTA. The technology transfer office at one research university will grant, or not grant, an MTA so that a cell line or tissue specimen can be shipped to a researcher at another university. Typically, permission must be granted for the researcher to publish, disseminate, or use research results, and to license their use for commercialization. ={Material Transfer Agreements (MTAs)+7;science:Material Transfer Agreements (MTAs)+7} @@ -2706,7 +2706,7 @@ MIT also realized the dangers of propertizing college courses and teaching mater School officials stressed that using MIT courseware on the Web is not the same as an MIT education. Indeed, the free materials underscore the fact that what really distinguishes an MIT education is one’s participation in a learning community. Unlike the Connexions content, MIT’s OpenCourseWare is a fairly static set of course materials; they are not modular or constantly updated. In addition, they are licensed under a CC BY-NC-SA (AttributionNonCommercial-ShareAlike.) license. While this prevents businesses from profiting from MIT course materials, it also prevents other educational institutions from remixing them into new courses or textbooks. ={communities:learning;education:learning community, in a} -Despite these limitations, MIT’s OCW materials have been profoundly influential. The course Laboratory in Software Engineering, for example, has been used by students in Karachi, Pakistan; the island of Mauritius; Vienna, Austria; and Kansas City, Missouri, among scores of other places around the world.~{ David Diamond, “MIT Everyware,” /{Wired}/, September 2003. }~ Ten of the leading Chinese universities now use hundreds of MIT courses, leading three noted OER experts, Daniel E. Atkins, John Seely Brown, and Allen L. Hammond, to conclude that MIT’s OCW “has had a major impact on Chinese education.”~{ Daniel E. Atkins, John Seely Brown, and Allen L. Hammond, “A Review of the Open Educational Resources (OER) Movement: Achievements, Challenges and New Opportunities,” February 2007, at http://www.oerderves .org/wp-content/uploads/2007/03/a-review-of-the-open-educational-re sources-oer-movement_final.pdf, p. 23. }~ Noting the life-changing impact that OCW has had on students in rural villages in China and West Africa, Atkins and his co-authors cite “the power of the OCW as a means for cross-cultural engagement.” Over the course of four years, from October 2003 through 2007, the OCW site received nearly 16 million visits; half were newcomers and half were repeat visits. +Despite these limitations, MIT’s OCW materials have been profoundly influential. The course Laboratory in Software Engineering, for example, has been used by students in Karachi, Pakistan; the island of Mauritius; Vienna, Austria; and Kansas City, Missouri, among scores of other places around the world.~{ David Diamond, “MIT Everyware,” /{Wired}/, September 2003. }~ Ten of the leading Chinese universities now use hundreds of MIT courses, leading three noted OER experts, Daniel E. Atkins, John Seely Brown, and Allen L. Hammond, to conclude that MIT’s OCW “has had a major impact on Chinese education.”~{ Daniel E. Atkins, John Seely Brown, and Allen L. Hammond, “A Review of the Open Educational Resources (OER) Movement: Achievements, Challenges and New Opportunities,” February 2007, at http://www.oerderves.org/wp-content/uploads/2007/03/a-review-of-the-open-educational-re sources-oer-movement_final.pdf, p. 23. }~ Noting the life-changing impact that OCW has had on students in rural villages in China and West Africa, Atkins and his co-authors cite “the power of the OCW as a means for cross-cultural engagement.” Over the course of four years, from October 2003 through 2007, the OCW site received nearly 16 million visits; half were newcomers and half were repeat visits. ={Atkins, Daniel E.;Brown, John Seely;Hammond, Allen L.;education:OER movement;Open Educational Resources (OER) movement} OCW is becoming a more pervasive international ethic now that more than 120 educational institutions in twenty nations have banded together to form the OpenCourseWare Consortium. Its goal is to create “a broad and deep body of open educational content using a shared model.”~{ OpenCourseWare Consortium, at http://www.ocwconsortium.org. }~ Although plenty of universities are still trying to make money from distance education courses, a growing number of colleges and universities realize that OCW helps faculty connect with other interested faculty around the world, build a college’s public recognition and recruitment, and advance knowledge as a public good. @@ -2751,7 +2751,7 @@ Next, Delia Browne of the National Education Access Licence for Schools, or NEAL /{Tweet! Tweet!}/ Neeru Paharia, a former executive director of the Creative Commons, introduced her fledgling project, AcaWiki. Paharia is concerned that too many academic articles are locked behind paywalls and are not readily accessible to everyone. AcaWiki plans to recruit graduate students, academics, and citizens to write summaries of academic papers. Since many grad students make abstracts as part of their routine research, it would not be difficult to pool thousands of summaries into a highly useful, searchable Web collection. ={Paharia, Neeru} -The speed geekers in Dubrovnik were sweaty and overstimulated at the end, but gratified to learn that there are a great many OER projects under way throughout the world; they just aren’t very well known or coordinated with one another. Two of the participants — J. Philipp Schmidt of the University of the Western Cape and Mark Surman of the Shuttleworth Foundation, both of South Africa — conceded that “there is still a great deal of fuzziness about what this movement includes,” and that “we don’t yet have a good ‘map’ of open education.” But the significance of grassroots initiatives is unmistakable. “There is a movement afoot here,” they concluded, “and it is movement with an aim no less than making learning accessible and adaptable for all.”~{ J. Philipp Schmidt and Mark Surman, “Open Sourcing Education: Learning and Wisdom from the iSummit 2007,” September 2, 2007, at http://icommons .org/download_banco/open-sourcing-education-learning-and-wisdom-from -isummit-2007. }~ “Education,” another participant predicted, “will drive the future of the Commons movement.” +The speed geekers in Dubrovnik were sweaty and overstimulated at the end, but gratified to learn that there are a great many OER projects under way throughout the world; they just aren’t very well known or coordinated with one another. Two of the participants — J. Philipp Schmidt of the University of the Western Cape and Mark Surman of the Shuttleworth Foundation, both of South Africa — conceded that “there is still a great deal of fuzziness about what this movement includes,” and that “we don’t yet have a good ‘map’ of open education.” But the significance of grassroots initiatives is unmistakable. “There is a movement afoot here,” they concluded, “and it is movement with an aim no less than making learning accessible and adaptable for all.”~{ J. Philipp Schmidt and Mark Surman, “Open Sourcing Education: Learning and Wisdom from the iSummit 2007,” September 2, 2007, at http://icommons.org/download_banco/open-sourcing-education-learning-and-wisdom-from-isummit-2007. }~ “Education,” another participant predicted, “will drive the future of the Commons movement.” ={Schmidt, J. Philipp;Surman, Mark} In a sign that the OER movement is getting serious as a movement, thirty of its leaders met in Cape Town, South Africa, and in January 2008 issued the Cape Town Open Education Declaration.~{ http://www.capetowndeclaration.org. Schmidt and Surman, “Open Sourcing Education.” }~ The declaration is a call to make learning materials more freely available online, and to improve education and learning by making them more collaborative, flexible, and locally relevant. The declaration outlines the challenge: “Many educators remain unaware of the growing pool of open educational resources. Many governments and educational institutions are either unaware or unconvinced of the benefits of open education. Differences among licensing schemes for open resources create confusion and incompatibility. And, of course, the majority of the world does not have access to the computers and networks that are integral to most current open education efforts.” @@ -2829,15 +2829,15 @@ I call the new sorts of citizen behaviors “history-making” because ordinary These behaviors exist in some measure in offline realms, of course, but they are a growing norm in the digital republic. A few examples will suffice to make the point. The Web helped create and propel a handful of cause-oriented candidacies — Howard Dean, Ron Paul, Ned Lamont~[* Lamont was an insurgent candidate for U.S. Senate from Connecticut challenging Senator Joseph Lieberman in a campaign that helped culturally validate opposition to the U.S. war in Iraq.]~ — who rapidly raised enormous sums of money, galvanized large numbers of passionate supporters, and altered mainstream political discourse. Although none prevailed in their races, Barack Obama made a quantum leap in online organizing in 2008, raising $50 million in a single month from supporters via the Internet. Obama’s candidacy was buoyed by the rise of the “netroots” — Web activists with a progressive political agenda— whose size and credibility enable them to sway votes in Congress, raise significant amounts of campaign funds, and influence local activism. The stories are now legion about blogs affecting political life — from the resignation of Senate majority leader Trent Lott after he praised the racist past of Senator Strom Thurmond at his hundredth birthday party, to the electoral defeat of Senate candidate George Allen after his uttering of an ethnic slur, /{macaca}/, was posted on YouTube. ={Dean, Howard;Lamont, Ned;Obama, Barack;Paul, Ron;Internet:political campaigns on;Allen, George;Lott, Trent;YouTube} -Citizens are now able to initiate their own policy initiatives without first persuading the mainstream media or political parties to validate them as worthy. For example, a handful of citizens troubled by evidence of “hackable” electronic voting machines exposed the defects of the Diebold machines and the company’s efforts to thwart public scrutiny and reforms.~{ See, e.g.,Yochai Benkler, /{The Wealth of Networks}/, pp. 225–32. }~ (The effort has led to a nationwide citizen effort, www.blackboxvoting.org, to expose security problems with voting machines and vote counting.) An ad hoc group of activists, lawyers, academics, and journalists spontaneously formed around a public wiki dealing with the lethal side effects of a bestselling antipsychotic drug Zyprexa, and the manufacturer’s allegedly illegal conduct in suppressing evidence of the drug’s risks. (Prosecutors later sought a $1 billion fine against Eli Lilly.)~{ Jonah Bossewitch, “The Zyprexa Kills Campaign: Peer Production and the Frontiers of Radical Pedagogy,” /{Re-public}/, at http://www.re-public.gr/en/ ?p=144. }~ +Citizens are now able to initiate their own policy initiatives without first persuading the mainstream media or political parties to validate them as worthy. For example, a handful of citizens troubled by evidence of “hackable” electronic voting machines exposed the defects of the Diebold machines and the company’s efforts to thwart public scrutiny and reforms.~{ See, e.g.,Yochai Benkler, /{The Wealth of Networks}/, pp. 225–32. }~ (The effort has led to a nationwide citizen effort, www.blackboxvoting.org, to expose security problems with voting machines and vote counting.) An ad hoc group of activists, lawyers, academics, and journalists spontaneously formed around a public wiki dealing with the lethal side effects of a bestselling antipsychotic drug Zyprexa, and the manufacturer’s allegedly illegal conduct in suppressing evidence of the drug’s risks. (Prosecutors later sought a $1 billion fine against Eli Lilly.)~{ Jonah Bossewitch, “The Zyprexa Kills Campaign: Peer Production and the Frontiers of Radical Pedagogy,” /{Re-public}/, at http://www.re-public.gr/en/?p=144. }~ The Web is giving individuals extra-institutional public platforms for articulating their own facts and interpretations of culture. It is enabling them to go far beyond voting and citizen vigilance, to mount citizen-led interventions in politics and governance. History-making citizens can compete with the mass media as an arbiter of cultural and political reality. They can expose the factual errors and lack of independence of /{New York Times}/ reporters; reveal the editorial biases of the “MSM” — mainstream media — by offering their own videotape snippets on YouTube; they can even be pacesetters for the MSM, as the blog Firedoglake did in its relentless reporting of the “Scooter” Libby trial (Libby, one of Vice President Cheney’s top aides, was convicted of obstruction of justice and perjury in connection with press leaks about CIA agent Valerie Plame.) Citizen-journalists, amateur videographers, genuine experts who have created their own Web platforms, parodists, dirty tricksters, and countless others are challenging elite control of the news agenda. It is no wonder that commercial journalism is suffering an identity crisis. Institutional authority is being trumped by the “social warranting” of online communities, many of which function as a kind of participatory meritocracy. ={Libby, “Scooter”;YouTube} -History-making citizenship is not without its deficiencies. Rumors, misinformation, and polarized debate are common in this more open, unmediated environment. Its crowning virtue is its potential ability to mobilize the energies and creativity of huge numbers of people. GNU/Linux improbably drew upon the talents of tens of thousands of programmers; certainly our contemporary world with its countless problems could use some of this elixir— platforms that can elicit distributed creativity, specialized talent, passionate commitment, and social legitimacy. In 2005 Joi Ito, then chairman of the board of the Creative Commons, wrote: “Traditional forms of representative democracy can barely manage the scale, complexity and speed of the issues in the world today. Representatives of sovereign nations negotiating with each other in global dialog are limited in their ability to solve global issues. The monolithic media and its increasingly simplistic representation of the world cannot provide the competition of ideas necessary to reach informed, viable consensus.”~{ Joichi Ito, “Emergent Democracy,” chapter 1 in John Lebkowsky and Mitch Ratcliffe, eds., /{Extreme Democracy}/ (Durham, NC: Lulu.com, 2005), at http:// extremedemocracy.com/chapters/Chapter%20One-Ito.pdf. }~ Ito concluded that a new, not-yetunderstood model of “emergent democracy” is likely to materialize as the digital revolution proceeds. A civic order consisting of “intentional blog communities, ad hoc advocacy coalitions and activist networks” could begin to tackle many urgent problems. +History-making citizenship is not without its deficiencies. Rumors, misinformation, and polarized debate are common in this more open, unmediated environment. Its crowning virtue is its potential ability to mobilize the energies and creativity of huge numbers of people. GNU/Linux improbably drew upon the talents of tens of thousands of programmers; certainly our contemporary world with its countless problems could use some of this elixir— platforms that can elicit distributed creativity, specialized talent, passionate commitment, and social legitimacy. In 2005 Joi Ito, then chairman of the board of the Creative Commons, wrote: “Traditional forms of representative democracy can barely manage the scale, complexity and speed of the issues in the world today. Representatives of sovereign nations negotiating with each other in global dialog are limited in their ability to solve global issues. The monolithic media and its increasingly simplistic representation of the world cannot provide the competition of ideas necessary to reach informed, viable consensus.”~{ Joichi Ito, “Emergent Democracy,” chapter 1 in John Lebkowsky and Mitch Ratcliffe, eds., /{Extreme Democracy}/ (Durham, NC: Lulu.com, 2005), at http://extremedemocracy.com/chapters/Chapter%20One-Ito.pdf. }~ Ito concluded that a new, not-yetunderstood model of “emergent democracy” is likely to materialize as the digital revolution proceeds. A civic order consisting of “intentional blog communities, ad hoc advocacy coalitions and activist networks” could begin to tackle many urgent problems. ={Ito, Joichi;NU/Linux;democracy:emergent+1|traditional forms of+5} -Clearly, the first imperative in developing a new framework to host representative democracy is to ensure that the electronic commons be allowed to exist in the first place. Without net neutrality, citizens could very well be stifled in their ability to participate on their own terms, in their own voices. If proprietary policies or technologies are allowed to override citizen interests (Verizon Wireless in 2007 prevented the transmission of abortion rights messages on its text-messaging system, for example~{ Adam Liptak, “Verizon Reverses Itself on Abortion Messages,” /{New York Times}/, September 27, 2007, at http://www.nytimes.com/2007/09/27/busi ness/27cnd-verizon.html. }~), then any hope for historymaking citizenship will be stillborn. +Clearly, the first imperative in developing a new framework to host representative democracy is to ensure that the electronic commons be allowed to exist in the first place. Without net neutrality, citizens could very well be stifled in their ability to participate on their own terms, in their own voices. If proprietary policies or technologies are allowed to override citizen interests (Verizon Wireless in 2007 prevented the transmission of abortion rights messages on its text-messaging system, for example~{ Adam Liptak, “Verizon Reverses Itself on Abortion Messages,” /{New York Times}/, September 27, 2007, at http://www.nytimes.com/2007/09/27/business/27cnd-verizon.html. }~), then any hope for historymaking citizenship will be stillborn. Beyond such near-term concerns, however, the emerging digital republic is embroiled in a much larger structural tension with –terrestrial “real world” governments. The commoner is likely to regard the rules forged in online commons as more legitimate and appropriate than those mandated by government. Again, David R. Johnson: ={Johnson, David R.} @@ -2862,7 +2862,7 @@ If Lessig is going to succeed in using the tools of the digital republic to refo It is hard to get a fix on this long-term transformation because the struggles to actualize an emergent democracy, as envisioned by Ito, are strangely apolitical and intensely political at the same time. They are apolitical in the sense that commoners are chiefly focused on the pragmatic technical challenges of their individual projects; they are not usually involved in official policymaking in legislatures or before courts and government agencies. Yet free software and free culture projects are highly political in the sense that commons projects, taken together over time, represent a profound challenge to the conventional market order and political culture. For example, Wikitravel, Jamendo, and open-access journals arguably provide better value than the commercial alternatives. The success of free software punctures the foundational assumptions of copyright law, making it easier to challenge new expansions of copyright law. Participatory commons are diverting viewer “eyeballs” away from commercial media and its genres of culture, spurring the growth of new hybrid forms of user-generated content. These kinds of effects, which advance project by project, month by month, are likely to have a longterm transformational impact. A new social ethic is taking root. ={Ito, Joichi;free software:FOSS/FLOSS+2;FOSS/FLOSS+2;copyright law:assumptions of;democracy:emergent} -Free culture, though culturally progressive, is fairly nonjudgmental about ideological politics. When American conservatives decided they wanted to start Conservapedia because they found Wikipedia too liberal, Wikipedia founder Jimmy Wales was happy to bless it: “Free culture knows no bounds . . . We welcome the reuse of our work to build variants. That’s directly in line with our mission.”~{ Robert Mackey, “Conservapedia: The Word Says it All,” /{New York Times}/, March 8, 2007, at http://thelede.blogs.nytimes.com/2007/03/08/conserva pedia-the-word-says-it-all/?scp=1&sq=wales+conservapedia. }~ Anthropology professor E. Gabriella Coleman has found a similar ecumenicism in the free software movement, which is agnostic about conventional politics but adamant about its own polity of freedom.~{ E. Gabriella Coleman, “The Political Agnosticism of Free and Open Source Software and the Inadvertent Politics of Contrast,” /{Anthropology Quarterly}/ 77, no. 3 (Summer 2004), pp. 507–19. See also her Ph.D. dissertation, “The Social Construction of Freedom in Free and Open Source Software: Hackers, Ethics and the Liberal Tradition,” abstract at http://healthhacker.org/biella/cole man-abstract.pdf. }~ Thus, the FOSS movement has no position with respect to social justice or globalization issues, but it does demand a strict commitment to the “four freedoms” of software development. Johan Söderberg makes much the same case in his book /{Hacking Capitalism}/.~{ Johan Söderberg, /{Hacking Capitalism: The Free and Open Source Software Movement}/ (New York: Routledge, 2007). }~ +Free culture, though culturally progressive, is fairly nonjudgmental about ideological politics. When American conservatives decided they wanted to start Conservapedia because they found Wikipedia too liberal, Wikipedia founder Jimmy Wales was happy to bless it: “Free culture knows no bounds . . . We welcome the reuse of our work to build variants. That’s directly in line with our mission.”~{ Robert Mackey, “Conservapedia: The Word Says it All,” /{New York Times}/, March 8, 2007, at http://thelede.blogs.nytimes.com/2007/03/08/conservapedia-the-word-says-it-all/?scp=1&sq=wales+conservapedia. }~ Anthropology professor E. Gabriella Coleman has found a similar ecumenicism in the free software movement, which is agnostic about conventional politics but adamant about its own polity of freedom.~{ E. Gabriella Coleman, “The Political Agnosticism of Free and Open Source Software and the Inadvertent Politics of Contrast,” /{Anthropology Quarterly}/ 77, no. 3 (Summer 2004), pp. 507–19. See also her Ph.D. dissertation, “The Social Construction of Freedom in Free and Open Source Software: Hackers, Ethics and the Liberal Tradition,” abstract at http://healthhacker.org/biella/coleman-abstract.pdf. }~ Thus, the FOSS movement has no position with respect to social justice or globalization issues, but it does demand a strict commitment to the “four freedoms” of software development. Johan Söderberg makes much the same case in his book /{Hacking Capitalism}/.~{ Johan Söderberg, /{Hacking Capitalism: The Free and Open Source Software Movement}/ (New York: Routledge, 2007). }~ ={Coleman, E. Gabriella;Wales, Jimmy;Söderberg, Johan} As projects like GNU/Linux, Wikipedia, open courseware, open-access journals, open databases, municipal Wi-Fi, collections of CC-licensed content, and other commons begin to cross-link and coalesce, the commons paradigm is migrating from the margins of culture to the center. The viral spiral, after years of building its infrastructure and social networks, may be approaching a Cambrian explosion, an evolutionary leap. @@ -2892,7 +2892,7 @@ The GPL and the CC licenses are ingenious hacks because they navigate this indet The beauty of this “ideological straddle” is that it enables a diverse array of players into the same tent without inciting sectarian acrimony. (There is some, of course, but mostly at the margins.) Ecumenical tolerance is the norm because orthodoxies cannot take root at the periphery where innovation is constantly being incubated. In any case, there is a widespread realization in the networked world that shared goals are likely to require variable implementations, depending on specific needs and contexts. -It may appear that the free software hacker, blogger, tech entrepreneur, celebrity musician, college professor, and biological researcher have nothing in common. In truth, each is participating in social practices that are incrementally and collectively bringing into being a new sort of democratic polity. French sociologist Bruno Latour calls it the “pixellation of politics,”~{ Bruno Latour, “We Are All Reactionaries Today,” Re-public, at http://www .republic.gr/en/?p=129. }~ which conjures up a pointillist painting slowly materializing. The new polity is more open, participatory, dynamically responsive, and morally respected by “the governed” than the nominal democracies of nation-states. The bureaucratic state tends to be too large and remote to be responsive to local circumstances and complex issues; it is ridiculed and endured. But who dares to aspire to transcend it? +It may appear that the free software hacker, blogger, tech entrepreneur, celebrity musician, college professor, and biological researcher have nothing in common. In truth, each is participating in social practices that are incrementally and collectively bringing into being a new sort of democratic polity. French sociologist Bruno Latour calls it the “pixellation of politics,”~{ Bruno Latour, “We Are All Reactionaries Today,” Re-public, at http://www.republic.gr/en/?p=129. }~ which conjures up a pointillist painting slowly materializing. The new polity is more open, participatory, dynamically responsive, and morally respected by “the governed” than the nominal democracies of nation-states. The bureaucratic state tends to be too large and remote to be responsive to local circumstances and complex issues; it is ridiculed and endured. But who dares to aspire to transcend it? ={Latour, Bruno} Sooner or later, history-making citizenship is likely to take up such a challenge. It already has. What is the digital republic, after all, but a federation of self-organized communities, each seeking to fulfill its members’ dreams by developing its own indigenous set of tools, rules, and ethics? The power of the commons stems from its role as an organizing template, and not an ideology. Because it is able to host a diverse and robust ecosystem of talent without squeezing it into an ideological straitjacket, the commons is flexible and resilient. It is based on people’s sincerest passions, not on remote institutional imperatives or ideological shibboleths. It therefore has a foundational support and energy that can outperform “mainstream” institutions. diff --git a/data/v2/samples/viral_spiral.david_bollier.sst b/data/v2/samples/viral_spiral.david_bollier.sst index e5c9ace..8e0be7c 100644 --- a/data/v2/samples/viral_spiral.david_bollier.sst +++ b/data/v2/samples/viral_spiral.david_bollier.sst @@ -103,7 +103,7 @@ It is perilous to generalize about a movement that has so many disparate parts p Yet the people who are inventing new commons have some deeper aspirations and allegiances. They glimpse the liberating potential of the Internet, and they worry about the totalizing inclinations of large corporations and the state, especially their tendency to standardize and coerce behavior. They object as well to processes that are not transparent. They dislike the impediments to direct access and participation, the limitations of credentialed expertise and arbitrary curbs on people’s freedom. -One of the first major gatherings of international commoners occurred in June 2006, when several hundred people from fifty nations converged on Rio de Janeiro, Brazil, for the iCommons Summit. The people of this multinational, eclectic vanguard blend the sophistication of the establishment in matters of power and politics with the bravado and playfulness of Beat poets. There were indie musicians who can deconstruct the terms of a record company licensing agreement with Talmudic precision. There were Web designers who understand the political implications of arcane rules made by the World Wide Web Consortium, a technical standards body. The lawyers and law professors who discourse about Section 114 of the Copyright Act are likely to groove on the remix career of Danger Mouse and the appropriationist antics of Negativland, a sound-collage band. James Boyle and Jennifer Jenkins, two law scholars at Duke Law School, even published a superhero comic book, /{Down by Law!}/, which demystifies the vagaries of the “fair use doctrine” through a filmmaker character resembling video game heroine Lara Croft.~{Keith Aoki, James Boyle, Jennifer Jenkins, /{Down by Law!}/ at http://www .duke.edu/cspd/comics.}~ (Fair use is a provision of copyright law that makes it legal to excerpt portions of a copyrighted work for noncommercial, educational, and personal purposes.) +One of the first major gatherings of international commoners occurred in June 2006, when several hundred people from fifty nations converged on Rio de Janeiro, Brazil, for the iCommons Summit. The people of this multinational, eclectic vanguard blend the sophistication of the establishment in matters of power and politics with the bravado and playfulness of Beat poets. There were indie musicians who can deconstruct the terms of a record company licensing agreement with Talmudic precision. There were Web designers who understand the political implications of arcane rules made by the World Wide Web Consortium, a technical standards body. The lawyers and law professors who discourse about Section 114 of the Copyright Act are likely to groove on the remix career of Danger Mouse and the appropriationist antics of Negativland, a sound-collage band. James Boyle and Jennifer Jenkins, two law scholars at Duke Law School, even published a superhero comic book, /{Down by Law!}/, which demystifies the vagaries of the “fair use doctrine” through a filmmaker character resembling video game heroine Lara Croft.~{Keith Aoki, James Boyle, Jennifer Jenkins, /{Down by Law!}/ at http://www.duke.edu/cspd/comics. }~ (Fair use is a provision of copyright law that makes it legal to excerpt portions of a copyrighted work for noncommercial, educational, and personal purposes.) ={commoners:gatherings of} 2~ The Rise of Socially Created Value @@ -117,10 +117,10 @@ This is why so many ordinary people — without necessarily having degrees, inst Hugh McGuire, a Montreal-based writer and Web designer, is one. In 2005, he started LibriVox, a digital library of free public-domain audio books that are read and recorded by volunteers. More than ten thousand people a day visit the Web site to download audio files of Twain, Kafka, Shakespeare, Dostoyevsky, and others, in nearly a dozen languages.~{ http://www.librivox.org. }~ The Faulkes Telescope Project in Australia lets high school students connect with other students, and with professional astronomers, to scan the skies with robotic, online telescopes.~{ http://faulkes-telescope.com. }~ In a similar type of learning commons, the Bugscope project in the United States enables students to operate a scanning electronic microscope in real time, using a simple Web browser on a classroom computer connected to the Internet.~{ http://bugscope.beckman.uiuc.edu. }~ ={Bugscope;LibriVox;McGuire, Hugh;Faulkes Telescope Project} -Thousands of individual authors, musicians, and filmmakers are using Web tools and Creative Commons licenses to transform markets for creative works — or, more accurately, to blend the market and commons into integrated hybrids. A nonprofit humanitarian group dedicated to doing reconstructive surgery for children in poor countries, Interplast, produced an Oscar-winning film, /{A Story of Healing}/, in 1997. Ten years later, it released the film under a Creative Commons license as a way to publicize Interplast’s work while retaining ownership of the film: a benefit for both film buffs and Interplast.~{ http://www.interplast.org and http://creativecommons.org/press-releases/ 2007/04/%E2%80%9Ca-story-of-healing%E2%80%9D-becomes-first-acad emy-award%C2%AE-winning-film-released-under-a-creative-commons-li cense. }~ +Thousands of individual authors, musicians, and filmmakers are using Web tools and Creative Commons licenses to transform markets for creative works — or, more accurately, to blend the market and commons into integrated hybrids. A nonprofit humanitarian group dedicated to doing reconstructive surgery for children in poor countries, Interplast, produced an Oscar-winning film, /{A Story of Healing}/, in 1997. Ten years later, it released the film under a Creative Commons license as a way to publicize Interplast’s work while retaining ownership of the film: a benefit for both film buffs and Interplast.~{ http://www.interplast.org and http://creativecommons.org/press-releases/2007/04/%E2%80%9Ca-story-of-healing%E2%80%9D-becomes-first-acad emy-award%C2%AE-winning-film-released-under-a-creative-commons-li cense. }~ ={Interplast} -Scoopt, a Glasgow, Scotland–based photography agency, acts as a broker to help bloggers and amateurs sell newsworthy photos and videos to the commercial media.~{ http://www.scoopt.com. }~ The Boston band Two Ton Shoe released its music on the Web for free to market its concerts. Out of the blue, a South Korean record label called one day to say it loved the band and could it come over to Seoul, all expenses paid, to perform four concerts? Each one sold out.~{ http://www.twotonshoe.com/news.html. }~ Boing Boing blogger and cyber-activist Cory Doctorow released his 2003 science-fiction novel, /{Down and Out in the Magic Kingdom}/, under a CC license, reaping a whirlwind of worldwide exposure.~{ See Doctorow’s preface to the second release of the book, February 12, 2004, Tor Books. See also his blog Craphound.com, September 9, 2006, at http:// www.craphound.com/?=p=1681. }~ +Scoopt, a Glasgow, Scotland–based photography agency, acts as a broker to help bloggers and amateurs sell newsworthy photos and videos to the commercial media.~{ http://www.scoopt.com. }~ The Boston band Two Ton Shoe released its music on the Web for free to market its concerts. Out of the blue, a South Korean record label called one day to say it loved the band and could it come over to Seoul, all expenses paid, to perform four concerts? Each one sold out.~{ http://www.twotonshoe.com/news.html. }~ Boing Boing blogger and cyber-activist Cory Doctorow released his 2003 science-fiction novel, /{Down and Out in the Magic Kingdom}/, under a CC license, reaping a whirlwind of worldwide exposure.~{ See Doctorow’s preface to the second release of the book, February 12, 2004, Tor Books. See also his blog Craphound.com, September 9, 2006, at http://www.craphound.com/?=p=1681. }~ ={Doctorow, Cory;Scoopt} 2~ The Commoners Build a Digital Republic of Their Own @@ -134,7 +134,7 @@ The commoners differ from most of their corporate brethren in their enthusiasm f It is all very well to spout such lofty goals. But how to actualize them? That is the story that the following pages recount. It has been the work of a generation, some visionary leaders, and countless individuals to articulate a loosely shared vision, build the infrastructure, and develop the social practices and norms. This project has not been animated by a grand political ideology, but rather is the result of countless initiatives, grand and incremental, of an extended global family of hackers, lawyers, bloggers, artists, and other supporters of free culture. ={commons:political implications of+3} -And yet, despite its focus on culture and its aversion to conventional politics, the growth of this movement is starting to have political implications. In an influential 2003 essay, James F. Moore announced the arrival of “an emerging second superpower.”~{ James F. Moore, “The Second Superpower Rears its Beautiful Head,” March 31, 2003, available at http://cyber.law.harvard.edu/people/jmoore/second superpower.html. }~ It was not a nation, but the coalescence of people from around the world who were asserting common values, and forming new public identities, via online networks. The people of this emerging “superpower,” Moore said, are concerned with improving the environment, public health, human rights, and social development. He cited as early examples the international campaign to ban land mines and the Seattle protests against the World Trade Organization in 1999. The power and legitimacy of this “second superpower” do not derive from the constitutional framework of a nation-state, but from its ability to capture and project people’s everyday feelings, social values, and creativity onto the world stage. Never in history has the individual had such cheap, unfettered access to global audiences, big and small. +And yet, despite its focus on culture and its aversion to conventional politics, the growth of this movement is starting to have political implications. In an influential 2003 essay, James F. Moore announced the arrival of “an emerging second superpower.”~{ James F. Moore, “The Second Superpower Rears its Beautiful Head,” March 31, 2003, available at http://cyber.law.harvard.edu/people/jmoore/secondsuperpower.html. }~ It was not a nation, but the coalescence of people from around the world who were asserting common values, and forming new public identities, via online networks. The people of this emerging “superpower,” Moore said, are concerned with improving the environment, public health, human rights, and social development. He cited as early examples the international campaign to ban land mines and the Seattle protests against the World Trade Organization in 1999. The power and legitimacy of this “second superpower” do not derive from the constitutional framework of a nation-state, but from its ability to capture and project people’s everyday feelings, social values, and creativity onto the world stage. Never in history has the individual had such cheap, unfettered access to global audiences, big and small. ={Moore, James} The awakening superpower described in /{Viral Spiral}/ is not a conventional political or ideological movement that focuses on legislation and a clutch of “issues.” While commoners do not dismiss these activities as unimportant, most are focused on the freedom of their peer communities to create, communicate, and share. When defending these freedoms requires wading into conventional politics and law, they are prepared to go there. But otherwise, the commoners are more intent on building a kind of parallel social order, inscribed within the regnant political economy but animated by their own values. Even now, the political/cultural sensibilities of this order are only vaguely understood by governments, politicians, and corporate leaders. The idea of “freedom without anarchy, control without government, consensus without power” — as Lawrence Lessig put it in 1999~{ Lawrence Lessig, /{Code and Other Laws of Cyberspace}/ (New York: Basic Books, 1999), p. 4. }~ —is just too counterintuitive for the conventionally minded to take seriously. @@ -151,7 +151,7 @@ The various industries that rely on copyrights have welcomed this development be The Internet has profoundly disrupted this model of market production, however. The Internet is a distributed media system of low-cost capital (your personal computer) strung together with inexpensive transmission and software. Instead of being run by a centralized corporation that relies upon professionals and experts above all else, the Internet is a noncommercial infrastructure that empowers amateurs, citizens, and ordinary individuals in all their quirky, authentic variety. The mass media have long regarded people as a commodifiable audience to be sold to advertisers in tidy demographic units. ={Internet:empowerment by+2} -Now, thanks to the Internet, “the people formerly known as the audience” (in Jay Rosen’s wonderful phrase) are morphing into a differentiated organism of flesh-and-blood, idiosyncratic individuals, as if awakening from a spell. Newly empowered to speak as they wish, in their own distinctive, personal voices to a global public of whoever cares to listen, people are creating their own transnational tribes. They are reclaiming culture from the tyranny of mass-media economics and national boundaries. In Lessig’s words, Internet users are overthrowing the “read only” culture that characterized the “weirdly totalitarian” communications of the twentieth century. In its place they are installing the “read/write” culture that invites everyone to be a creator, as well as a consumer and sharer, of culture.~{ Lawrence Lessig, “The Read-Write Society,” delivered at the Wizards of OS4 conference in Berlin, Germany, on September 5, 2006. Available at http:// www.wizards-of-os.org/programm/panels/authorship_amp_culture/keynote _the_read_write_society/the_read_write_society.html. }~ A new online citizenry is arising, one that regards its socially negotiated rules and norms as at least as legitimate as those established by conventional law. +Now, thanks to the Internet, “the people formerly known as the audience” (in Jay Rosen’s wonderful phrase) are morphing into a differentiated organism of flesh-and-blood, idiosyncratic individuals, as if awakening from a spell. Newly empowered to speak as they wish, in their own distinctive, personal voices to a global public of whoever cares to listen, people are creating their own transnational tribes. They are reclaiming culture from the tyranny of mass-media economics and national boundaries. In Lessig’s words, Internet users are overthrowing the “read only” culture that characterized the “weirdly totalitarian” communications of the twentieth century. In its place they are installing the “read/write” culture that invites everyone to be a creator, as well as a consumer and sharer, of culture.~{ Lawrence Lessig, “The Read-Write Society,” delivered at the Wizards of OS4 conference in Berlin, Germany, on September 5, 2006. Available at http://www.wizards-of-os.org/programm/panels/authorship_amp_culture/keynote_the_read_write_society/the_read_write_society.html. }~ A new online citizenry is arising, one that regards its socially negotiated rules and norms as at least as legitimate as those established by conventional law. ={Rosen, Jay} Two profoundly incommensurate media systems are locked in a struggle for survival or supremacy, depending upon your perspective or, perhaps, mutual accommodation. For the moment, we live in a confusing interregnum — a transition that pits the dwindling power and often desperate strategies of Centralized Media against the callow, experimental vigor of Internet-based media. This much is clear, however: a world organized around centralized control, strict intellectual property rights, and hierarchies of credentialed experts is under siege. A radically different order of society based on open access, decentralized creativity, collaborative intelligence, and cheap and easy sharing is ascendant. Or to put it more precisely, we are stumbling into a strange hybrid order that combines both worlds — mass media and online networks — on terms that have yet to be negotiated. @@ -197,12 +197,12 @@ In the video world, too, the remix impulse has found expression in its own form The key insight about many open-platform businesses is that they no longer look to copyright or patent law as tools to assert market control. Their goal is not to exclude others, but to amass large communities. Open businesses understand that exclusive property rights can stifle the value creation that comes with mass participation, and so they strive to find ways to “honor the commons” while making money in socially acceptable forms of advertising, subscriptions, or consulting services. The brave new economics of “peer production” is enabling forward-thinking businesses to use social collaboration among thousands, or even millions, of people to create social communities that are the foundation for significant profits. /{BusinessWeek}/ heralded this development in a major cover story in 2005, “The Power of Us,” and called sharing “the net’s next disruption.”~{ Robert D. Hof, “The Power of Us: Mass Collaboration on the Internet Is Shaking Up Business,” /{BusinessWeek}/, June 20, 2005, pp. 73–82. }~ -!{/{Science}/}! as a commons. The world of scientific research has long depended on open sharing and collaboration. But increasingly, copyrights, patents, and university rules are limiting the flow of scientific knowledge. The resulting gridlock of rights in knowledge is impeding new discoveries and innovation. Because of copyright restrictions and software incompatibilities, scientists studying genetics, proteins, and marine biology often cannot access databases containing vital research. Or they cannot easily share physical samples of lab samples. When the maker of Golden Rice, a vitamin-enhanced bioengineered rice, tried to distribute its seeds to millions of people in poor countries, it first had to get permissions from seventy patent holders and obtain six Material Transfer Agreements (which govern the sharing of biomedical research substances).~{ Interview with John Wilbanks, “Science Commons Makes Sharing Easier,” /{Open Access Now}/, December 20, 2004, available at http://www.biomedcen tral.com/openaccess/archive/?page=features&issue=23.}~ +!{/{Science}/}! as a commons. The world of scientific research has long depended on open sharing and collaboration. But increasingly, copyrights, patents, and university rules are limiting the flow of scientific knowledge. The resulting gridlock of rights in knowledge is impeding new discoveries and innovation. Because of copyright restrictions and software incompatibilities, scientists studying genetics, proteins, and marine biology often cannot access databases containing vital research. Or they cannot easily share physical samples of lab samples. When the maker of Golden Rice, a vitamin-enhanced bioengineered rice, tried to distribute its seeds to millions of people in poor countries, it first had to get permissions from seventy patent holders and obtain six Material Transfer Agreements (which govern the sharing of biomedical research substances).~{ Interview with John Wilbanks, “Science Commons Makes Sharing Easier,” /{Open Access Now}/, December 20, 2004, available at http://www.biomedcentral.com/openaccess/archive/?page=features&issue=23. }~ ={Wilbanks, John+1;Science Commons:CC Commons spinoff, and+1} The problem of acquiring, organizing, and sharing scientific knowledge is becoming more acute, paradoxically enough, as more scientific disciplines become dependent on computers and the networked sharing of data. To help deal with some of these issues, the Creative Commons in 2005 launched a new project known as the Science Commons to try to redesign the information infrastructure for scientific research. The basic idea is to “break down barriers to sharing that are hindering innovation in the sciences,” says John Wilbanks, executive director of Science Commons. Working with the National Academy of Sciences and other research bodies, Wilbanks is collaborating with astronomers, archaeologists, microbiologists, and medical researchers to develop better ways to make vast scientific literatures more computer-friendly, and databases technically compatible, so that they can be searched, organized, and used more effectively. -!{/{Open education and learning.}/}! A new class of knowledge commons is poised to join free and open-source software, the Creative Commons and Wikipedia as a coherent social movement. The new groundswell goes by the awkward name “Open Educational Resources,” or OER.~{ See, e.g., Daniel E. Atkins, John Seely Brown, and Allen L. Hammond, “A Review of the Open Educational Resources (OER) Movement: Achievements, Challenges and New Opportunities,” February 2007, available at http://www .oerderves.org/?p=23.}~ One of the earlier pioneers of the movement was the Massachusetts Institute of Technology which has put virtually all of its course materials on the Web, for free, through its OpenCourseWare initiative. The practice has now spread to scores of colleges and universities around the world, and inspired a broader set of OER initiatives: digital repositories for articles, reports, and data; open-access scholarly journals that bypass expensive commercial publishers; and collaborative Web sites for developing teaching materials. There are wikis for students and scholars working together, sites to share multimedia presentations, and much more. +!{/{Open education and learning.}/}! A new class of knowledge commons is poised to join free and open-source software, the Creative Commons and Wikipedia as a coherent social movement. The new groundswell goes by the awkward name “Open Educational Resources,” or OER.~{ See, e.g., Daniel E. Atkins, John Seely Brown, and Allen L. Hammond, “A Review of the Open Educational Resources (OER) Movement: Achievements, Challenges and New Opportunities,” February 2007, available at http://www.oerderves.org/?p=23. }~ One of the earlier pioneers of the movement was the Massachusetts Institute of Technology which has put virtually all of its course materials on the Web, for free, through its OpenCourseWare initiative. The practice has now spread to scores of colleges and universities around the world, and inspired a broader set of OER initiatives: digital repositories for articles, reports, and data; open-access scholarly journals that bypass expensive commercial publishers; and collaborative Web sites for developing teaching materials. There are wikis for students and scholars working together, sites to share multimedia presentations, and much more. ={education:OER movement+1;pen Educational Resources (OER) movement+1;Wikipedia:social movement, as+1;Creative Commons (CC):social movement, as+1} The OER movement has particular importance for people who want to learn but don’t have the money or resources — scholars in developing countries, students struggling to pay for their educations, people in remote or rural locations, people with specialized learning needs. OER is based on the proposition that it will not only be cheaper or perhaps free if teachers and students can share their materials through the Web, it will also enable more effective types of learning. So the OER movement is dedicated to making learning tools cheaper and more accessible. The revolutionary idea behind OER is to transform traditional education — teachers imparting information to passive students — into a more learnerdriven process facilitated by teachers. Self-directed, socially driven learning supplants formal, hierarchical modes of teaching. @@ -326,13 +326,13 @@ _1 Freedom 2: The freedom to redistribute copies so you can help your neighbor; _1 Freedom 3: The freedom to improve the program, and release your improvements to the public, so that the whole community benefits. (Access to the source code is a precondition for this.) ={authorship:community access} -Stallman has become an evangelist for the idea of freedom embodied in all the GNU programs. He refuses to use any software programs that are not “free,” and he has refused to allow his appearances to be Webcast if the software being used was not “free.” “If I am to be an honest advocate for free software,” said Stallman, “I can hardly go around giving speeches, then put pressure on people to use nonfree software. I’d be undermining my own cause. And if I don’t show that I take my principles seriously, I can’t expect anybody else to take them seriously.”~{ Stallman at MIT forum, “Copyright and Globalization in the Age of Computer Networks,” April 19, 2001, available at http://media-in-transition.mit .edu/forums/copyright/transcript.html. }~ +Stallman has become an evangelist for the idea of freedom embodied in all the GNU programs. He refuses to use any software programs that are not “free,” and he has refused to allow his appearances to be Webcast if the software being used was not “free.” “If I am to be an honest advocate for free software,” said Stallman, “I can hardly go around giving speeches, then put pressure on people to use nonfree software. I’d be undermining my own cause. And if I don’t show that I take my principles seriously, I can’t expect anybody else to take them seriously.”~{ Stallman at MIT forum, “Copyright and Globalization in the Age of Computer Networks,” April 19, 2001, available at http://media-in-transition.mit.edu/forums/copyright/transcript.html. }~ ={Stallman, Richard:free software, and+2} Stallman has no problems with people making money off software. He just wants to guarantee that a person can legally use, copy, modify, and distribute the source code. There is thus an important distinction between software that is commercial (possibly free) and software that is proprietary (never free). Stallman tries to explain the distinction in a catchphrase that has become something of a mantra in free software circles: /{“free as in ‘free speech,’ not as in ‘free beer.’”}/ The point is that code must be freely accessible, not that it should be free of charge. (This is why “freeware” is not the same as free software. Freeware may be free of charge, but it does not necessarily make its source code accessible.) ={freeware vs. free software;software:proprietary|source code for} -Eben Moglen, a professor of law at Columbia University and general counsel for the Free Software Foundation since 1994, calls the provisions of the GPL “elegant and simple. They respond to the proposition that when the marginal cost of goods is zero, any nonzero cost of barbed wire is too high. That’s a fact about the twentyfirst century, and everybody had better get used to it. Yet as you know, there are enormous cultural enterprises profoundly committed to the proposition that more and more barbed wire is necessary. And their basic strategy is to get that barbed wire paid for by the public everywhere.”~{ Eben Moglen, “Freeing the Mind: Free Software and the Death of Proprietary Culture,” June 29, 2003, available at http://emoglen.law/columbia.edu/publi cations/maine-speech.html. }~ +Eben Moglen, a professor of law at Columbia University and general counsel for the Free Software Foundation since 1994, calls the provisions of the GPL “elegant and simple. They respond to the proposition that when the marginal cost of goods is zero, any nonzero cost of barbed wire is too high. That’s a fact about the twentyfirst century, and everybody had better get used to it. Yet as you know, there are enormous cultural enterprises profoundly committed to the proposition that more and more barbed wire is necessary. And their basic strategy is to get that barbed wire paid for by the public everywhere.”~{ Eben Moglen, “Freeing the Mind: Free Software and the Death of Proprietary Culture,” June 29, 2003, available at http://emoglen.law/columbia.edu/publications/maine-speech.html. }~ ={Moglen, Eben;Free Software Foundation} The GPL truly was something new under the sun: a legally enforceable tool to vouchsafe a commons of software code. The license is based on copyright law yet it cleverly turns copyright law against itself, limiting its reach and carving out a legally protected zone to build and protect the public domain. In the larger scheme of things, the GPL was an outgrowth of the “gift economy” ethic that has governed academic life for centuries and computer science for decades. What made the GPL different from these (abridgeable) social norms was its legal enforceability. @@ -355,7 +355,7 @@ The Linux kernel, when combined with the GNU programs developed by Stallman and The real innovation of Linux, writes Eric S. Raymond, a leading analyst of the technology, was “not technical, but sociological”: ={Linux:sociological effect of+1} -_1 Linux was rather casually hacked on by huge numbers of volunteers coordinating only through the Internet. Quality was maintained not by rigid standards or autocracy but by the naively simple strategy of releasing every week and getting feedback from hundreds of users within days, creating a sort of rapid Darwinian selection on the mutations introduced by developers. To the amazement of almost everyone, this worked quite well.~{ Eric S. Raymond, “A Brief History of Hackerdom,” http://www.catb.org/ ~est/writings/cathedral-bazaar/hacker-history/ar01s06.html.}~ +_1 Linux was rather casually hacked on by huge numbers of volunteers coordinating only through the Internet. Quality was maintained not by rigid standards or autocracy but by the naively simple strategy of releasing every week and getting feedback from hundreds of users within days, creating a sort of rapid Darwinian selection on the mutations introduced by developers. To the amazement of almost everyone, this worked quite well.~{ Eric S. Raymond, “A Brief History of Hackerdom,” http://www.catb.org/~est/writings/cathedral-bazaar/hacker-history/ar01s06.html. }~ The Free Software Foundation had a nominal project to develop a kernel, but it was not progressing very quickly. The Linux kernel, while primitive, “was running and ready for experimentation,” writes Steven Weber in his book /{The Success of Open Source}/: “Its crude functionality was interesting enough to make people believe that it could, with work, evolve into something important. That promise was critical and drove the broader development process from early on.”~{ Steven Weber, /{The Success of Open Source}/ (Cambridge, MA: Harvard University Press, 2004), p. 100. }~ ={Weber, Steven:The Success of Open Source;Free Software Foundation} @@ -395,7 +395,7 @@ The philosophical rift between free software and open-source software amounts to ={FOSS/FLOSS+3;free software:FOSS/FLOSS+3;Raymond, Eric S.+1;Linux:sociological effect of+1} -_1 The Linux world behaves in many respects like a free market or an ecology, a collection of selfish agents attempting to maximize utility which in the process produces a selfcorrecting spontaneous order more elaborate and efficient than any amount of central planning could have achieved. . . . The utility function Linux hackers are maximizing is not classically economic, but is the intangible of their own ego satisfaction and reputation among other hackers.~{ Eric Raymond, “The Cathedral and the Bazaar,” available at http://www .catb.org/~esr/writings/cathedral-bazaar/cathedral-bazaar/ar01s11.html.}~ +_1 The Linux world behaves in many respects like a free market or an ecology, a collection of selfish agents attempting to maximize utility which in the process produces a selfcorrecting spontaneous order more elaborate and efficient than any amount of central planning could have achieved. . . . The utility function Linux hackers are maximizing is not classically economic, but is the intangible of their own ego satisfaction and reputation among other hackers.~{ Eric Raymond, “The Cathedral and the Bazaar,” available at http://www.catb.org/~esr/writings/cathedral-bazaar/cathedral-bazaar/ar01s11.html. }~ It turns out that an accessible collaborative process, FOSS, can elicit passions and creativity that entrenched markets often cannot. In this respect, FOSS is more than a type of freely usable software; it reunites two vectors of human behavior that economists have long considered separate, and points to the need for new, more integrated theories of economic and social behavior. ={free software:economic effects of+1} @@ -406,7 +406,7 @@ FOSS represents a new breed of “social production,” one that draws upon soci Red Hat, a company founded in 1993 by Robert Young, was the first to recognize the potential of selling a custom version (or “distribution”) of GNU/Linux as a branded product, along with technical support. A few years later, IBM became one of the first large corporations to recognize the social realities of GNU/Linux and its larger strategic and competitive implications in the networked environment. In 1998 IBM presciently saw that the new software development ecosystem was becoming far too variegated and robust for any single company to dominate. It understood that its proprietary mainframe software could not dominate the burgeoning, diversified Internet-driven marketplace, and so the company adopted the open-source Apache Web server program in its new line of WebSphere business software. ={Red Hat;Young, Robert;GNU/Linux:IBM, and+1|Red Hat, and;IBM:GNU/Linux, and+1;Apache Web server;open source software:functions of+2} -It was a daring move that began to bring the corporate and open-source worlds closer together. Two years later, in 2000, IBM announced that it would spend $1 billion to help develop GNU/Linux for its customer base. IBM shrewdly realized that its customers wanted to slash costs, overcome system incompatibilities, and avoid expensive technology “lock-ins” to single vendors. GNU/Linux filled this need well. IBM also realized that GNU/Linux could help it compete against Microsoft. By assigning its property rights to the commons, IBM could eliminate expensive property rights litigation, entice other companies to help it improve the code (they could be confident that IBM could not take the code private), and unleash a worldwide torrent of creative energy focused on GNU/Linux. Way ahead of the curve, IBM decided to reposition itself for the emerging networked marketplace by making money through tech service and support, rather than through proprietary software alone.~{ Andrew Leonard, “How Big Blue Fell for Linux,” Salon.com, September 12, 2000, available at http://www.salon.com/tech/fsp/2000/09/12/chapter_7_ part_one.print.html. The competitive logic behind IBM’s moves are explored in Pamela Samuelson, “IBM’s Pragmatic Embrace of Open Source,” /{Communications of the ACM}/ 49, no. 21 (October 2006), and Robert P. Merges, “A New Dynamism in the Public Domain,” /{University of Chicago Law Review}/ 71, no. 183 (Winter 2004). }~ +It was a daring move that began to bring the corporate and open-source worlds closer together. Two years later, in 2000, IBM announced that it would spend $1 billion to help develop GNU/Linux for its customer base. IBM shrewdly realized that its customers wanted to slash costs, overcome system incompatibilities, and avoid expensive technology “lock-ins” to single vendors. GNU/Linux filled this need well. IBM also realized that GNU/Linux could help it compete against Microsoft. By assigning its property rights to the commons, IBM could eliminate expensive property rights litigation, entice other companies to help it improve the code (they could be confident that IBM could not take the code private), and unleash a worldwide torrent of creative energy focused on GNU/Linux. Way ahead of the curve, IBM decided to reposition itself for the emerging networked marketplace by making money through tech service and support, rather than through proprietary software alone.~{ Andrew Leonard, “How Big Blue Fell for Linux,” Salon.com, September 12, 2000, available at http://www.salon.com/tech/fsp/2000/09/12/chapter_7_part_one.print.html. The competitive logic behind IBM’s moves are explored in Pamela Samuelson, “IBM’s Pragmatic Embrace of Open Source,” /{Communications of the ACM}/ 49, no. 21 (October 2006), and Robert P. Merges, “A New Dynamism in the Public Domain,” /{University of Chicago Law Review}/ 71, no. 183 (Winter 2004). }~ ={Microsoft:competition against} It was not long before other large tech companies realized the benefits of going open source. Amazon and eBay both saw that they could not affordably expand their large computer infrastructures without converting to GNU/Linux. GNU/Linux is now used in everything from Motorola cell phones to NASA supercomputers to laptop computers. In 2005, /{BusinessWeek}/ magazine wrote, “Linux may bring about the greatest power shift in the computer industry since the birth of the PC, because it lets companies replace expensive proprietary systems with cheap commodity servers.”~{ Steve Hamm, “Linux Inc.,” /{BusinessWeek}/, January 31, 2005. }~ As many as one-third of the programmers working on open-source projects are corporate employees, according to a 2002 survey.~{ Cited by Elliot Maxwell in “Open Standards Open Source and Open Innovation,” note 80, Berlecon Research, /{Free/Libre Open Source Software: Survey and Study — Firms’ Open Source Activities: Motivations and Policy Implications}/, FLOSS Final Report, Part 2, at www.berlecon.de/studien/downloads/200207FLOSS _Activities.pdf. }~ @@ -418,7 +418,7 @@ With faster computing speeds and cost savings of 50 percent or more on hardware But how does open source work without a conventional market apparatus? The past few years have seen a proliferation of sociological and economic theories about how open-source communities create value. One formulation, by Rishab Ghosh, compares free software development to a “cooking pot,” in which you can give a little to the pot yet take a lot — with no one else being the poorer. “Value” is not measured economically at the point of transaction, as in a market, but in the nonmonetary /{flow}/ of value that a project elicits (via volunteers) and generates (through shared software).~{ Rishab Aiyer Ghosh, “Cooking Pot Markets and Balanced Value Flows,” in Rishab Aiyer Ghosh, ed., /{CODE: Collaborative Ownership and the Digital Economy}/ (Cambridge, MA: MIT Press, 2005), pp. 153–68. }~ Another important formulation, which we will revisit later, comes from Harvard law professor Yochai Benkler, who has written that the Internet makes it cheap and easy to access expertise anywhere on the network, rendering conventional forms of corporate organization costly and cumbersome for many functions. Communities based on social trust and reciprocity are capable of mobilizing creativity and commitment in ways that market incentives often cannot — and this can have profound economic implications.~{ See, e.g., Benkler, “Coase’s Penguin, or Linux and the Nature of the Firm,” /{Yale Law Journal}/ 112, no. 369 (2002); Benkler, “ ‘Sharing Nicely’: On Shareable Goods and the Emergence of Sharing as a Modality of Economic Production,” Yale Law Journal 114, no. 273 (2004).}~ Benkler’s analysis helps explain how a global corps of volunteers could create an operating system that, in many respects, outperforms software created by a well-paid army of Microsoft employees. ={Benkler, Yochai:open networks, on;FOSS/FLOSS;free software:FOSS/FLOSS;Ghosh, Rishab;open source software:economic implications of|uses of term+4} -A funny thing happened to free and open-source software as it matured. It became hip. It acquired a cultural cachet that extends well beyond the cloistered precincts of computing. “Open source” has become a universal signifier for any activity that is participatory, collaborative, democratic, and accountable. Innovators within filmmaking, politics, education, biological research, and drug development, among other fields, have embraced the term to describe their own attempts to transform hidebound, hierarchical systems into open, accessible, and distributed meritocracies. Open source has become so much of a cultural meme — a self-replicating symbol and idea — that when the Bikram yoga franchise sought to shut down unlicensed uses of its yoga techniques, dissident yoga teachers organized themselves into a nonprofit that they called Open Source Yoga Unity. To tweak the supremacy of Coca-Cola and Pepsi, culture jammers even developed nonproprietary recipes for a cola drink and beer called “open source cola” and “open source beer.”~{ Open Source Yoga Unity, http://www.yogaunity.org; open-source cola, http://alfredo.octavio.net/soft_drink_formula.pdf; open-source beer, Vores OI (Danish for “Our Beer”), http://en.wikipedia.org/wiki/Vores_%C3%981 . See also http://freebeer.org/blog and http://www.project21.ch/freebeer. }~ +A funny thing happened to free and open-source software as it matured. It became hip. It acquired a cultural cachet that extends well beyond the cloistered precincts of computing. “Open source” has become a universal signifier for any activity that is participatory, collaborative, democratic, and accountable. Innovators within filmmaking, politics, education, biological research, and drug development, among other fields, have embraced the term to describe their own attempts to transform hidebound, hierarchical systems into open, accessible, and distributed meritocracies. Open source has become so much of a cultural meme — a self-replicating symbol and idea — that when the Bikram yoga franchise sought to shut down unlicensed uses of its yoga techniques, dissident yoga teachers organized themselves into a nonprofit that they called Open Source Yoga Unity. To tweak the supremacy of Coca-Cola and Pepsi, culture jammers even developed nonproprietary recipes for a cola drink and beer called “open source cola” and “open source beer.”~{ Open Source Yoga Unity, http://www.yogaunity.org; open-source cola, http://alfredo.octavio.net/soft_drink_formula.pdf; open-source beer, Vores OI (Danish for “Our Beer”), http://en.wikipedia.org/wiki/Vores_%C3%981. See also http://freebeer.org/blog and http://www.project21.ch/freebeer. }~ ={free software:uses of term+5} Stallman’s radical acts of dissent in the 1980s, regarded with bemusement and incredulity at the time, have become, twenty-five years later, a widely embraced ideal. Small-/{d}/ democrats everywhere invoke open source to lambaste closed and corrupt political systems and to express their aspirations for political transcendence. People invoke open source to express a vision of life free from overcommercialization and corporate manipulation. The term enables one to champion bracing democratic ideals without seeming naïve or flaky because, after all, free software is solid stuff. Moreover, despite its image as the software of choice for granola-loving hippies, free and open-source software is entirely compatible with the commercial marketplace. How suspect can open source be when it has been embraced by the likes of IBM, Hewlett-Packard, and Sun Microsystems? @@ -441,7 +441,7 @@ Fortunately, a small but fierce and keenly intelligent corps of progressive copy For decades, the public domain was regarded as something of a wasteland, a place where old books, faded posters, loopy music from the early twentieth century, and boring government reports go to die. It was a dump on the outskirts of respectable culture. If anything in the public domain had any value, someone would sell it for money. Or so goes the customary conception of the public domain. -Jack Valenti, the longtime head of the Motion Picture Association of America, once put it this way: “A public domain work is an orphan. No one is responsible for its life. But everyone exploits its use, until that time certain when it becomes soiled and haggard, barren of its previous virtues. Who, then, will invest the funds to renovate and nourish its future life when no one owns it?”~{ Jack Valenti, “A Plea for Keeping Alive the U.S. Film Industry’s Competitive Energy, ” testimony on behalf of the Motion Picture Association of America to extend the term of copyright protection, Senate Judiciary Committee, September 20, 1995, at http://instructors.cwrl.utexas.edu/~martin/Valenti .pdf. }~ (Valenti was arguing that longer copyright terms would give film studios the incentive to digitize old celluloid films that would otherwise enter the public domain and physically disintegrate.) +Jack Valenti, the longtime head of the Motion Picture Association of America, once put it this way: “A public domain work is an orphan. No one is responsible for its life. But everyone exploits its use, until that time certain when it becomes soiled and haggard, barren of its previous virtues. Who, then, will invest the funds to renovate and nourish its future life when no one owns it?”~{ Jack Valenti, “A Plea for Keeping Alive the U.S. Film Industry’s Competitive Energy, ” testimony on behalf of the Motion Picture Association of America to extend the term of copyright protection, Senate Judiciary Committee, September 20, 1995, at http://instructors.cwrl.utexas.edu/~martin/Valenti.pdf. }~ (Valenti was arguing that longer copyright terms would give film studios the incentive to digitize old celluloid films that would otherwise enter the public domain and physically disintegrate.) ={Valenti, Jack} One of the great, unexplained mysteries of copyright law is how a raffish beggar grew up to be King Midas. How did a virtually ignored realm of culture — little studied and undertheorized— become a subject of intense scholarly interest and great practical importance to commoners and businesses alike? How did the actual value of the public domain become known? The idea that the public domain might be valuable in its own right — and therefore be worth protecting — was a fringe idea in the 1990s and before. So how did a transformation of legal and cultural meaning occur? @@ -505,7 +505,7 @@ Yet rather than negotiate a new copyright bargain to take account of the public Most content industries, then and now, do not see any “imbalance” in copyright law; they prefer to talk in different terms entirely. They liken copyrighted works to personal property or real estate, as in “and you wouldn’t steal a CD or use my house without permission, would you?” A copyrighted work is analogized to a finite physical object, But the essential point about works in the digital age is that they can’t be “used up” in the same way that physical objects can. They are “nondepletable” and “nonrival,” as economists put it. A digital work can be reproduced and shared for virtually nothing, without depriving another person of it. ={property rights:copyright law, and+1;copyright law:property rights, and} -Nonetheless, a new narrative was being launched — copyrighted works as property. The idea of copyright law reflecting a policy bargain between the public and authors/corporations was being supplanted by a new story that casts copyright as property that is nearly absolute in scope and virtually perpetual in term. In hindsight, for those scholars who cared enough to see, a disquieting number of federal court cases were strengthening the hand of copyright holders at the expense of the public. James Boyle, in a much-cited essay, called this the “second enclosure movement” — the first one, of course, being the English enclosure movement of common lands in medieval times and into the nineteenth century.~{ James Boyle, “The Second Enclosure Movement and the Construction of the Public Domain,” /{Law and Contemporary Problems}/ 66 (Winter–Spring 2003), pp. 33–74, at http://www.law.duke.edu/shell/cite.pl?66+Law+&+Contemp.+ Probs.+ 33+ (WinterSpring+2003). }~ +Nonetheless, a new narrative was being launched — copyrighted works as property. The idea of copyright law reflecting a policy bargain between the public and authors/corporations was being supplanted by a new story that casts copyright as property that is nearly absolute in scope and virtually perpetual in term. In hindsight, for those scholars who cared enough to see, a disquieting number of federal court cases were strengthening the hand of copyright holders at the expense of the public. James Boyle, in a much-cited essay, called this the “second enclosure movement” — the first one, of course, being the English enclosure movement of common lands in medieval times and into the nineteenth century.~{ James Boyle, “The Second Enclosure Movement and the Construction of the Public Domain,” /{Law and Contemporary Problems}/ 66 (Winter–Spring 2003), pp. 33–74, at http://www.law.duke.edu/shell/cite.pl?66+Law+&+Contemp.+Probs.+33+ (WinterSpring+2003). }~ ={Boyle, James:enclosure movement, on;commons:enclosure of+1;enclosure movement+1;copyright law:enclosure movement, and+1} Enclosure took many forms. Copyright scholar Peter Jaszi recalls, “Sometime in the mid-1980s, the professoriate started getting worried about software copyright.”~{ Interview with Peter Jaszi, October 17, 2007. }~ It feared that copyrights for software would squelch competition and prevent others from using existing code to innovate. This battle was lost, however. Several years later, the battle entered round two as copyright scholars and programmers sought to protect reverse-engineering as fair use. This time, they won.~{ /{Sega Enterprises v. Accolade}/, 977 F.2d 1510 (9th Cir. 1993). }~ @@ -553,7 +553,7 @@ Critics also argue that the DMCA gives large corporations a powerful legal tool In her excellent history of the political run-up to the DMCA, Litman notes, “There is no overarching vision of the public interest animating the Digital Millennium Copyright Act. None. Instead, what we have is what a variety of different private parties were able to extract from each other in the course of an incredibly complicated four-year multiparty negotiation.”~{ Litman, /{Digital Copyright}/, pp. 144–45. }~ The DMCA represents a new frontier of proprietarian control — the sanctioning of technological locks that can unilaterally override the copyright bargain. Companies asked themselves, Why rely on copyrights alone when technology can embed even stricter controls into the very design of products? ={Litman, Jessica} -The year 1998 was an especially bad year for the public domain. Besides enacting the trademark dilution bill and DMCA, the Walt Disney Company and other large media corporations succeeded in their six-year campaign to enact the Sonny Bono Copyright Term Extension Act.~{ See Wikipedia entry for the Copyright Term Extension Act, at http://en .wikipedia.org/wiki/Sonny_Bono_Copyright_Term_Extension_Act. See also /{Eldred v. Ashcroft}/, 537 U.S. 186 (2003), F. 3d 849 (2001). }~ The legislation, named after the late House legislator and former husband of the singer Cher, retroactively extended the terms of existing copyrights by twenty years. As we will see in chapter 3, this law became the improbable catalyst for a new commons movement. +The year 1998 was an especially bad year for the public domain. Besides enacting the trademark dilution bill and DMCA, the Walt Disney Company and other large media corporations succeeded in their six-year campaign to enact the Sonny Bono Copyright Term Extension Act.~{ See Wikipedia entry for the Copyright Term Extension Act, at http://en.wikipedia.org/wiki/Sonny_Bono_Copyright_Term_Extension_Act. See also /{Eldred v. Ashcroft}/, 537 U.S. 186 (2003), F. 3d 849 (2001). }~ The legislation, named after the late House legislator and former husband of the singer Cher, retroactively extended the terms of existing copyrights by twenty years. As we will see in chapter 3, this law became the improbable catalyst for a new commons movement. ={trademarks:dilution of;Walt Disney Company;Copyright Term Extension Act} 2~ Confronting the Proprietarian Juggernaut @@ -574,7 +574,7 @@ A number of activist voices were also coming forward at this time to challenge t The organization was oriented to hackers and cyberlibertarians, who increasingly realized that they needed an organized presence to defend citizen freedoms in cyberspace. (Barlow adapted the term /{cyberspace}/ from science-fiction writer William Gibson in 1990 and applied it to the then-unnamed cultural life on the Internet.) Initially, the EFF was concerned with hacker freedom, individual privacy, and Internet censorship. It later went through some growing pains as it moved offices, changed directors, and sought to develop a strategic focus for its advocacy and litigation. In more recent years, EFF, now based in San Francisco, has become the leading litigator of copyright, trademark, and Internet free expression issues. It also has more than ten thousand members and spirited outreach programs to the press and public. ={Gibson, William;cyberspace:use of term} -John Perry Barlow was an important visionary and populizer of the time. His March 1994 article “The Economy of Ideas” is one of the most prophetic yet accessible accounts of how the Internet was changing the economics of information. He astutely realized that information is not a “product” like most physical property, but rather a social experience or form of life unto itself. “Information is a verb, not a noun,” he wrote. “Freed of its containers, information obviously is not a thing. In fact, it is something that happens in the field of interaction between minds or objects or other pieces of information. . . . Sharks are said to die of suffocation if they stop swimming, and the same is nearly true of information.”~{22. John Perry Barlow, “The Economy of Ideas,” /{Wired}/, March 1994, at http:// www.wired.com/wired/archive/2.03/economy.ideas.html.}~ +John Perry Barlow was an important visionary and populizer of the time. His March 1994 article “The Economy of Ideas” is one of the most prophetic yet accessible accounts of how the Internet was changing the economics of information. He astutely realized that information is not a “product” like most physical property, but rather a social experience or form of life unto itself. “Information is a verb, not a noun,” he wrote. “Freed of its containers, information obviously is not a thing. In fact, it is something that happens in the field of interaction between minds or objects or other pieces of information. . . . Sharks are said to die of suffocation if they stop swimming, and the same is nearly true of information.”~{22. John Perry Barlow, “The Economy of Ideas,” /{Wired}/, March 1994, at http://www.wired.com/wired/archive/2.03/economy.ideas.html. }~ Instead of the sober polemics of law professors, Barlow — a retired Wyoming cattle rancher who improbably doubled as a tech intellectual and rock hipster — spiced his analysis of information with colorful metaphors and poetic aphorisms. Comparing information to DNA helices, Barlow wrote, “Information replicates into the cracks of possibility, always seeking new opportunities for /{Lebensraum}/.” Digital information, he said, “is a continuing process more like the metaphorphosing tales of prehistory than anything that will fit in shrink-wrap.” @@ -640,7 +640,7 @@ As Litman unpacked the realities of “authorship,” she showed how the idea of English professor Martha Woodmansee and law professor Peter Jaszi helped expose many of the half-truths about “authorship” and “originality.” Their 1994 anthology of essays, /{The Construction of Authorship}/, showed how social context is an indispensable element of “authorship,” one that copyright law essentially ignores.~{ Martha Woodmansee and Peter Jaszi, eds., /{The Construction of Authorship: Textual Appropriation in Law and Literature}/ (Durham, NC: Duke University Press, 1994). }~ Thus, even though indigenous cultures collectively create stories, music, and designs, and folk cultures generate works in a collaborative fashion, copyright law simply does not recognize such acts of collective authorship. And so they go unprotected. They are vulnerable to private appropriation and enclosure, much as Stallman’s hacker community at MIT saw its commons of code destroyed by enclosure. ={Jaszi, Peter;Woodmansee, Martha;commons:enclosure of;enclosure movement} -Before the Internet, the collaborative dimensions of creativity were hardly given much thought. An “author” was self-evidently an individual endowed with unusual creative skills. As the World Wide Web and digital technologies have proliferated, however, copyright’s traditional notions of “authorship” and “originality” have come to seem terribly crude and limited. The individual creator still matters and deserves protection, of course. But when dozens of people contribute to a single entry of Wikipedia, or thousands contribute to an open-source software program, how then shall we determine who is the “author”?~{ Henry Miller writes: “We carry within us so many entities, so many voices, that rare indeed is the man who can say he speaks with his own voice. In the final analysis, is that iota of uniqueness which we boast of as ‘ours’ really ours? Whatever real or unique contribution we make stems from the same inscrutable source whence everything derives. We contribute nothing but our understanding, which is a way of saying — our acceptance.” Miller, /{The Books in My Life}/ (New York: New Directions), p. 198. }~ By the lights of copyright law, how shall the value of the public domain, reconstituted as a commons, be assessed?~{ Rufus Pollock, “The Value of the Public Domain,” report for Institute for Public Policy Research, London, July 2006, at http://www.rufuspollock.org/ economics/papers/value_of_public_domain.ippr.pdf. }~ +Before the Internet, the collaborative dimensions of creativity were hardly given much thought. An “author” was self-evidently an individual endowed with unusual creative skills. As the World Wide Web and digital technologies have proliferated, however, copyright’s traditional notions of “authorship” and “originality” have come to seem terribly crude and limited. The individual creator still matters and deserves protection, of course. But when dozens of people contribute to a single entry of Wikipedia, or thousands contribute to an open-source software program, how then shall we determine who is the “author”?~{ Henry Miller writes: “We carry within us so many entities, so many voices, that rare indeed is the man who can say he speaks with his own voice. In the final analysis, is that iota of uniqueness which we boast of as ‘ours’ really ours? Whatever real or unique contribution we make stems from the same inscrutable source whence everything derives. We contribute nothing but our understanding, which is a way of saying — our acceptance.” Miller, /{The Books in My Life}/ (New York: New Directions), p. 198. }~ By the lights of copyright law, how shall the value of the public domain, reconstituted as a commons, be assessed?~{ Rufus Pollock, “The Value of the Public Domain,” report for Institute for Public Policy Research, London, July 2006, at http://www.rufuspollock.org/economics/papers/value_of_public_domain.ippr.pdf. }~ ={World Wide Web:collective authorship, and;creativity:collaborative} The Bellagio Declaration, the outgrowth of a conference organized by Woodmansee and Jaszi in 1993, called attention to the sweeping deficiencies of copyright law as applied. One key point stated, “In general, systems built around the author paradigm tend to obscure or undervalue the importance of the ‘public domain,’ the intellectual and cultural commons from which future works will be constructed. Each intellectual property right, in effect, fences off some portion of the public domain, making it unavailable to future creators.”~{ See James Boyle, /{Shamans, Software, and Spleens: Law and the Construction of the Information Society}/ (Cambridge, MA: Harvard University Press, 1995), p. 192. }~ @@ -659,7 +659,7 @@ But as the millennium drew near, the tech-minded legal community — and law-min That task was made easier by the intensifying cultural squeeze. The proprietarian lockdown was starting to annoy and anger people in their everyday use of music, software, DVDs, and the Web. And the property claims were growing more extreme. The American Society of Composers, Authors and Publishers had demanded that Girl Scout camps pay a public performance license for singing around the campfire. Ralph Lauren challenged the U.S. Polo Association for ownership of the word /{polo}/. McDonald’s succeeded in controlling the Scottish prefix Mc as applied to restaurants and motels, such as “McVegan” and “McSleep.”~{ These examples can be found in Bollier, /{Brand Name Bullies}/. }~ ={Lauren, Ralph} -The mounting sense of frustration fueled a series of conferences between 1999 and 2001 that helped crystallize the disparate energies of legal scholarship into something resembling an intellectual movement. “A number of us [legal scholars] were still doing our own thing, but we were beginning to get a sense of something,” recalls Yochai Benkler, “It was no longer Becky Eisenberg working on DNA sequences and Pamela Samuelson on computer programs and Jamie Boyle on ‘environmentalism for the ’Net’ and me working on spectrum on First Amendment issues,” said Benkler. “There was a sense of movement.”~{ Interview with Yochai Benkler, February 7, 2006.}~ (“Environmentalism for the ’Net” was an influential piece that Boyle wrote in 1998, calling for the equivalent of an environmental movement to protect the openness and freedom of the Internet.)~{ James Boyle, “A Politics of Intellectual Property: Environmentalism for the Net,” /{Duke Law Journal}/ 47, no. 1 (October 1997), pp. 87–116, at http://www .law.duke.edu/boylesite/Intprop.htm. }~ +The mounting sense of frustration fueled a series of conferences between 1999 and 2001 that helped crystallize the disparate energies of legal scholarship into something resembling an intellectual movement. “A number of us [legal scholars] were still doing our own thing, but we were beginning to get a sense of something,” recalls Yochai Benkler, “It was no longer Becky Eisenberg working on DNA sequences and Pamela Samuelson on computer programs and Jamie Boyle on ‘environmentalism for the ’Net’ and me working on spectrum on First Amendment issues,” said Benkler. “There was a sense of movement.”~{ Interview with Yochai Benkler, February 7, 2006.}~ (“Environmentalism for the ’Net” was an influential piece that Boyle wrote in 1998, calling for the equivalent of an environmental movement to protect the openness and freedom of the Internet.)~{ James Boyle, “A Politics of Intellectual Property: Environmentalism for the Net,” /{Duke Law Journal}/ 47, no. 1 (October 1997), pp. 87–116, at http://www.law.duke.edu/boylesite/Intprop.htm. }~ ={Boyle, James+1;Benkler, Yochai+1;Eisenberg, Rebecca;Samuelson, Pamela} “The place where things started to get even crisper,” said Benkler, “was a conference at Yale that Jamie Boyle organized in April 1999, which was already planned as a movement-building event.” That conference, Private Censorship/Perfect Choice, looked at the threats to free speech on the Web and how the public might resist. It took inspiration from John Perry Barlow’s 1996 manifesto “A Declaration of the Independence of Cyberspace.” It is worth quoting at length from Barlow’s lyrical cri de coeur — first published in /{Wired}/ and widely cited — because it expresses the growing sense of thwarted idealism among Internet users, and a yearning for greater self-determination and self-governance among commoners. Barlow wrote: @@ -698,7 +698,7 @@ In the course of his frequent travels, he had a particularly significant rendezv Eldred was a book enthusiast and computer programmer who had reached the end of his rope. Three years earlier, in 1995, he had launched a simple but brilliant project: a free online archive of classic American literature. Using his PC and a server in his home in New Hampshire, Eldred posted the books of Nathaniel Hawthorne, Henry James, Wallace Stevens, and dozens of other great authors whose works were in the public domain. Eldred figured it would be a great service to humanity to post the texts on the World Wide Web, which was just beginning to go mainstream. -Eldred had previously worked for Apollo Computer and Hewlett-Packard and was experienced in many aspects of computers and software. In the late 1980s, in fact, he had developed a system that enabled users to post electronic text files and then browse and print them on demand. When the World Wide Web arrived, Eldred was understandably excited. “It seemed to me that there was a possibility of having a system for electronic books that was similar to what I had done before. I was interested in experimenting with this to see if it was possible.”~{ Interview with Eric Eldred, August 1, 2006; Daren Fonda, “Copyright Crusader,” /{Boston Globe Magazine}/, August 29, 1999, available at http://www .boston.com/globe/magazine/8-29/featurestory1.shtml; and Eric Eldred, “Battle of the Books: The Ebook vs. the Antibook,” November 15, 1998, at http://www.eldritchpress.org/battle.html. }~ +Eldred had previously worked for Apollo Computer and Hewlett-Packard and was experienced in many aspects of computers and software. In the late 1980s, in fact, he had developed a system that enabled users to post electronic text files and then browse and print them on demand. When the World Wide Web arrived, Eldred was understandably excited. “It seemed to me that there was a possibility of having a system for electronic books that was similar to what I had done before. I was interested in experimenting with this to see if it was possible.”~{ Interview with Eric Eldred, August 1, 2006; Daren Fonda, “Copyright Crusader,” /{Boston Globe Magazine}/, August 29, 1999, available at http://www.boston.com/globe/magazine/8-29/featurestory1.shtml; and Eric Eldred, “Battle of the Books: The Ebook vs. the Antibook,” November 15, 1998, at http://www.eldritchpress.org/battle.html. }~ So Eldred set out to build his own archive of public-domain books: “I got books from the library or wherever, and I learned how to do copyright research and how to scan books, do OCR [opticalcharacter recognition] and mark them up as HTML [the programming language used on the Web],” he said. “I just wanted to make books more accessible to readers.”~{ Interview with Eric Eldred, August 1, 2006. }~ @@ -730,7 +730,7 @@ At a more basic level, the copyright term extension showed contempt for the very The copyright term extension act privatized so many of the public domain books on the Eldritch Press Web site, and so offended Eldred’s sense of justice, that in November 1998 he decided to close his site in protest. The new law meant that he would not be able to add any works published since 1923 to his Web site until 2019. “I can no longer accomplish what I set out to do,” said Eldred.~{ Ibid. }~ ={Eldred, Eric:public domain, and|Lessig, and+3;Lessig, Lawrence+3:Eldred, and+3} -As luck had it, Larry Lessig was looking for an Everyman of the Internet. Lessig, then a thirty-seven-year-old professor at Harvard Law School, was looking for a suitable plaintiff for his envisioned constitutional test case. He had initially approached Michael S. Hart, the founder of Project Gutenberg, the first producer of free electronic books. At the time, the project had nearly six thousand public-domain books available online. (It now has twenty thousand books; about 3 million books are downloaded every month.) Hart was receptive to the case but had his own ideas about how the case should be argued. He wanted the legal complaint to include a stirring populist manifesto railing against rapacious copyright holders. Lessig demurred and went in search of another plaintiff.~{ Richard Poynder interview with Lawrence Lessig, “The Basement Interviews: Free Culture,” April 7, 2006, p. 26, available at http://poynder.blogspot.com/ 2006/03/basement-interviews.html. See also Steven Levy, “Lawrence Lessig’s Supreme Showdown,” /{Wired}/, October 2002, pp. 140–45, 154–56, available at http://www.wired.com/wired/archive/10.10/lessig.html. Project Gutenberg is at http://wwwgutenberg.org. }~ +As luck had it, Larry Lessig was looking for an Everyman of the Internet. Lessig, then a thirty-seven-year-old professor at Harvard Law School, was looking for a suitable plaintiff for his envisioned constitutional test case. He had initially approached Michael S. Hart, the founder of Project Gutenberg, the first producer of free electronic books. At the time, the project had nearly six thousand public-domain books available online. (It now has twenty thousand books; about 3 million books are downloaded every month.) Hart was receptive to the case but had his own ideas about how the case should be argued. He wanted the legal complaint to include a stirring populist manifesto railing against rapacious copyright holders. Lessig demurred and went in search of another plaintiff.~{ Richard Poynder interview with Lawrence Lessig, “The Basement Interviews: Free Culture,” April 7, 2006, p. 26, available at http://poynder.blogspot.com/2006/03/basement-interviews.html. See also Steven Levy, “Lawrence Lessig’s Supreme Showdown,” /{Wired}/, October 2002, pp. 140–45, 154–56, available at http://www.wired.com/wired/archive/10.10/lessig.html. Project Gutenberg is at http://wwwgutenberg.org. }~ ={Hart, Michael S.;Project Gutenberg} After reading about Eldred’s protests in the /{Boston Globe}/, and meeting with him over coffee, Lessig asked Eldred if he would be willing to be the plaintiff in his envisioned case. Eldred readily agreed. As a conscientious objector and draft resister during the Vietnam War, he was ready to go to great lengths to fight the Sonny Bono Act. “Initially, I volunteered to violate the law if necessary and get arrested and go to jail,” Eldred said. “But Larry told me that was not necessary.” A good thing, because under the No Electronic Theft Act, passed in 1997, Eldred could be charged with a felony. “I could face jail, fines, seizure of my computer, termination of my Internet service without notice — and so all the e-books on the Web site could be instantly lost,” he said. @@ -771,7 +771,7 @@ For Lessig, the LambdaMOO “rape” had an obvious resonance with Catherine Mac To explore the issues further, Lessig developed one of the first courses on the law of cyberspace. He taught it in the spring semester of 1995 at Yale Law School, where he was a visiting professor, and later at the University of Chicago and Harvard law schools. During the Yale class, an exchange with a student, Andrew Shapiro, jarred his thinking in a new direction: “I was constantly thinking about the way that changing suppositions of constitutional eras had to be accounted for in the interpretation of the Constitution across time. Andrew made this point about how there’s an equivalent in the technical infrastructure [of the Internet] that you have to think about. And then I began to think about how there were norms and law and infrastructure — and then I eventually added markets into this — which combine to frame what policymaking is in any particular context.”~{ Ibid. }~ ={Shapiro, Andrew} -This line of analysis became a central theme of Lessig’s startling first book, /{Code and Other Laws of Cyberspace}/, published in 1999.~{ Lessig, /{Code and Other Laws of Cyberspace}/ (New York: Basic Books, 1999). }~ /{Code}/ took on widespread assumptions that the Internet would usher in a new libertarian, free-market utopia. Cyberlibertarian futurists such as Alvin Toffler, Esther Dyson, George Gilder, and John Gilmore had routinely invoked cyberspace as a revolutionary force that would render government, regulation, and social welfare programs obsolete and unleash the transformative power of free markets.~{ Esther Dyson, George Gilder, George Keyworth, and Alvin Toffler, “Cyberspace and the American Dream: A Magna Carta for the Knowledge Age,” Progress and Freedom Foundation, August 1994, available at http://www.pff .org/issues-pubs/futureinsights/fil.2magnacarta.html. }~ In the libertarian scenario, individual freedom can flourish only if government gets the hell out of the way and lets individuals create, consume, and interact as they see fit, without any paternalistic or tyrannical constraints. Prosperity can prevail and scarcity disappear only if meddling bureaucrats and politicians leave the citizens of the Internet to their own devices. As Louis Rossetto, the founder and publisher of /{Wired}/, bluntly put it: “The idea that we need to worry about anyone being ‘left out’ is entirely atavistic to me, a product of that old economics of scarcity and the 19th century social thinking that grew out of it.”~{ David Hudson, interview with Louis Rossetto, “What Kind of Libertarian,” /{Rewired}/ (Macmillan, 1997), p. 255. }~ +This line of analysis became a central theme of Lessig’s startling first book, /{Code and Other Laws of Cyberspace}/, published in 1999.~{ Lessig, /{Code and Other Laws of Cyberspace}/ (New York: Basic Books, 1999). }~ /{Code}/ took on widespread assumptions that the Internet would usher in a new libertarian, free-market utopia. Cyberlibertarian futurists such as Alvin Toffler, Esther Dyson, George Gilder, and John Gilmore had routinely invoked cyberspace as a revolutionary force that would render government, regulation, and social welfare programs obsolete and unleash the transformative power of free markets.~{ Esther Dyson, George Gilder, George Keyworth, and Alvin Toffler, “Cyberspace and the American Dream: A Magna Carta for the Knowledge Age,” Progress and Freedom Foundation, August 1994, available at http://www.pff.org/issues-pubs/futureinsights/fil.2magnacarta.html. }~ In the libertarian scenario, individual freedom can flourish only if government gets the hell out of the way and lets individuals create, consume, and interact as they see fit, without any paternalistic or tyrannical constraints. Prosperity can prevail and scarcity disappear only if meddling bureaucrats and politicians leave the citizens of the Internet to their own devices. As Louis Rossetto, the founder and publisher of /{Wired}/, bluntly put it: “The idea that we need to worry about anyone being ‘left out’ is entirely atavistic to me, a product of that old economics of scarcity and the 19th century social thinking that grew out of it.”~{ David Hudson, interview with Louis Rossetto, “What Kind of Libertarian,” /{Rewired}/ (Macmillan, 1997), p. 255. }~ ={code:law, as+4;law:code as+4;Lessig, Lawrence:Code and Other Laws of Cyberspace+4;Dyson, Esther;Gilder, George;Gilmore, John;Rossetto, Louis;Toffler, Alvin;Internet:architecture of+2|freedom of+1;cyberspace: economic effects of} Lessig was more wary. In /{Code}/, he constructed a sweeping theoretical framework to show how freedom on the Internet must be actively, deliberately constructed; it won’t simply happen on its own. Inspired by conversations with computer programmer Mitch Kapor, who declared that “architecture is politics” in 1991, Lessig’s book showed how software code was supplanting the regulatory powers previously enjoyed by sovereign nation-states and governments. The design of the Internet and software applications was becoming more influential than conventional sources of policymaking — Congress, the courts, federal agencies. /{Code is law}/, as Lessig famously put it. @@ -799,7 +799,7 @@ Back at the Berkman Center, however, there were plenty of opportunities to influ While nourished by the work of his academic colleagues, Lessig was determined to come up with ingenious ways to /{do something}/ about the distressing drift of copyright law. It was important to take the offensive. Notwithstanding the pessimism of /{Code}/, Lessig’s decidedly optimistic answer was to gin up a constitutional challenge to copyright law. Many legal experts and even sympathetic colleagues were skeptical. Peter Jaszi, a leading intellectual law professor at American University, told a reporter at the time, “It’s not so much that we thought it was a terrible idea but that it was just unprecedented. Congress has been extending copyright for 180 years, and this is the first time someone said it violated the Constitution.”~{ David Streitfeld, “The Cultural Anarchist vs. the Hollywood Police State,” /{Los Angeles Times Magazine}/, September 22, 2002, p. 32. }~ Others worried that an adverse ruling could set back the larger cause of copyright reform. ={Jaszi, Peter;Lessig, Lawrence:Code and Other Laws of Cyberspace;law:social change, and+3;copyright law:expansion of} -In the spirit of the commons, Lessig and his Berkman Center colleagues decided that the very process for mounting the /{Eldred}/ lawsuit would be different: “Rather than the secret battles of lawyers going to war, we will argue this case in the open. This is a case about the commons; we will litigate it in the commons. Our arguments and strategy will be developed online, in a space called ‘openlaw.org.’ Key briefs will be drafted online, with participants given the opportunity to criticize the briefs and suggest other arguments. . . . Building on the model of open source software, we are working from the hypothesis that an open development process best harnesses the distributed resources of the Internet community. By using the Internet, we hope to enable the public interest to speak as loudly as the interests of corporations.”~{ Lawrence Lessig, “Commons Law,” June 24, 1999, posted on www.intellectu alcapital.com/issues/issue251/item5505.asp, and Open Law archive at http:// cyber.law.harvard.edu/openlaw. }~ +In the spirit of the commons, Lessig and his Berkman Center colleagues decided that the very process for mounting the /{Eldred}/ lawsuit would be different: “Rather than the secret battles of lawyers going to war, we will argue this case in the open. This is a case about the commons; we will litigate it in the commons. Our arguments and strategy will be developed online, in a space called ‘openlaw.org.’ Key briefs will be drafted online, with participants given the opportunity to criticize the briefs and suggest other arguments. . . . Building on the model of open source software, we are working from the hypothesis that an open development process best harnesses the distributed resources of the Internet community. By using the Internet, we hope to enable the public interest to speak as loudly as the interests of corporations.”~{ Lawrence Lessig, “Commons Law,” June 24, 1999, posted on www.intellectu alcapital.com/issues/issue251/item5505.asp, and Open Law archive at http://cyber.law.harvard.edu/openlaw. }~ ={Eldred v. Reno/Eldred v. Ashcroft+28;Lessig, Lawrence:Eldred v. Reno, and+28|law in contemporary context, and+1} Emulating the open-source development model was a nice touch, and perhaps useful; dozens of people around the world registered at the Openlaw site and posted suggestions. Some of the examples and legal critiques were used in developing the case, and the model was later used by lawyers in the so-called DeCSS case, in which a hacker broke the encryption of a DVD. But it turns out that open, distributed creativity has its limits in the baroque dance of litigation; it can’t work when secrecy and confidentiality are important, for example. @@ -816,18 +816,18 @@ Normally, this would have been the end of the road for a case. Very few appeals At this point, Lessig realized he needed the advice and support of some experienced Supreme Court litigators. He enlisted help from additional lawyers at Jones, Day; Alan Morrison of Public Citizen Litigation Group; Kathleen Sullivan, the dean of Stanford Law School; and Charles Fried, a former solicitor general under President Reagan. Professor Peter Jaszi and the students of his law clinic drafted an amicus brief. ={orrison, Alan;Fried, Charles;Jaszi, Peter;Sullivan, Kathleen} -A key concern was how to frame the arguments. Attorney Don Ayer of Jones, Day repeatedly urged Lessig to stress the dramatic harm that the Bono Act was inflicting on free speech and free culture. But as Lessig later confessed, “I hate this view of the law. . . . I was not persuaded that we had to sell our case like soap.”~{ Lessig, “How I Lost the Big One,” /{Legal Affairs}/, March/April 2004, available at http://www.legalaffairs.org/issues/March-April-2004/story_lessig_marap r04.msp. }~ Lessig was convinced that the only way /{Eldred}/ could prevail at the Supreme Court would be to win over the conservative justices with a matter of principle. To Lessig, the harm was obvious; what needed emphasis was how the Sonny Bono Act violated “originalist” principles of jurisprudence. (Originalist judges claim to interpret the Constitution based on its “original” meanings in 1791, which includes a belief that Congress has strictly enumerated powers, not broad legislative discretion.) +A key concern was how to frame the arguments. Attorney Don Ayer of Jones, Day repeatedly urged Lessig to stress the dramatic harm that the Bono Act was inflicting on free speech and free culture. But as Lessig later confessed, “I hate this view of the law. . . . I was not persuaded that we had to sell our case like soap.”~{ Lessig, “How I Lost the Big One,” /{Legal Affairs}/, March/April 2004, available at http://www.legalaffairs.org/issues/March-April-2004/story_lessig_marapr04.msp. }~ Lessig was convinced that the only way /{Eldred}/ could prevail at the Supreme Court would be to win over the conservative justices with a matter of principle. To Lessig, the harm was obvious; what needed emphasis was how the Sonny Bono Act violated “originalist” principles of jurisprudence. (Originalist judges claim to interpret the Constitution based on its “original” meanings in 1791, which includes a belief that Congress has strictly enumerated powers, not broad legislative discretion.) ={Ayer, Don;law:originalist principles of+2} “We tried to make an argument that if you were an originalist— in the way these conservative judges said they were in many other cases — then you should look to the original values in the Copyright Clause,” said Lessig. “And we argued that if you did that then you had to conclude that Congress had wildly overstepped its constitutional authority, and so the law should be struck down.”~{ Lessig interview with Richard Poynder, April 7, 2006, p. 25. }~ Flaunting the harm caused by the copyright term extension struck Lessig as showy and gratuitous; he considered the harm more or less selfevident. In the aftermath of a public debate that Lessig once had with Jack Valenti, a questioner on Slashdot, a hacker Web site, suggested that Lessig would be more persuasive if he asserted “a clear conception of direct harm . . . than the secondary harm of the copyright holders getting a really sweet deal.” Lessig conceded that such a focus “has been a weakness of mine for a long time. In my way of looking at the world, the point is a matter of principle, not pragmatics. . . . There are many others who are better at this pragmatism stuff. To me, it just feels insulting.”~{ “Lawrence Lessig Answers Your Questions,” Slashdot.org, December 21, 2001, Question 1, “The question of harm,” posted by “caduguid,” with Lessig response, available at http://interviews.slashdot.org/article.pl?sid=01/12/21/155221. }~ ={copyright law:expansion of;Copyright Clause, U.S. Constitution;Valenti, Jack} -And so, despite warnings to the contrary, Lessig’s legal strategy relied on a call to uphold originalist principles. Having clerked for Justice Scalia and Judge Posner, Lessig felt that he understood the mind-set and sympathies of the conservative jurists. “If we get to the Supreme Court,” Lessig told Slashdot readers in December 2001, “I am certain that we will win. This is not a left/right issue. The conservatives on the Court will look at the framers’ Constitution— which requires that copyrights be granted for ‘limited times’ — and see that the current practice of Congress . . . makes a mockery of the framers’ plan. And the liberals will look at the effect of these never-ending copyrights on free speech, and conclude that Congress is not justified in this regulation of speech. The Supreme Court doesn’t give a hoot about Hollywood; they will follow the law.”~{ Lessig response to question 11, Slashdot.org, “Will the extension of copyright continue?” posed by “Artifice_Eternity,” available at http://interviews.slash dot.org/article.pl?sid=01/12/21/155221. }~ +And so, despite warnings to the contrary, Lessig’s legal strategy relied on a call to uphold originalist principles. Having clerked for Justice Scalia and Judge Posner, Lessig felt that he understood the mind-set and sympathies of the conservative jurists. “If we get to the Supreme Court,” Lessig told Slashdot readers in December 2001, “I am certain that we will win. This is not a left/right issue. The conservatives on the Court will look at the framers’ Constitution— which requires that copyrights be granted for ‘limited times’ — and see that the current practice of Congress . . . makes a mockery of the framers’ plan. And the liberals will look at the effect of these never-ending copyrights on free speech, and conclude that Congress is not justified in this regulation of speech. The Supreme Court doesn’t give a hoot about Hollywood; they will follow the law.”~{ Lessig response to question 11, Slashdot.org, “Will the extension of copyright continue?” posed by “Artifice_Eternity,” available at http://interviews.slashdot.org/article.pl?sid=01/12/21/155221. }~ ={Posner, Richard;Scalia, Antonin;Copyright Clause, U.S. Constitution;copyright law:expansion of+5;Copyright Term Extension Act+5} Lessig took pride in the fact that thirty-eight amicus briefs were filed on behalf of /{Eldred}/. They included a wide range of authors, computer and consumer electronics companies, and organizations devoted to arts, culture, education, and journalism. Besides the usual suspects like the Free Software Foundation, Electronic Frontier Foundation, and Public Knowledge, supporting briefs were filed by fifteen economists including Kenneth Arrow and Milton Friedman, Phyllis Schlafly of the Eagle Forum, and the Intel Corporation. -At oral arguments, Lessig immediately confronted a skeptical bench. Justice Sandra Day O’Connor worried about overturning years of previous copyright term extensions. Justice William Rehnquist proposed. “You want the right to copy verbatim other people’s books, don’t you?” And when Justice Anthony Kennedy invited Lessig to expound upon the great harm that the law was inflicting on free speech and culture, Lessig declined the opportunity. He instead restated his core constitutional argument, that copyright terms cannot be perpetual. “This was a correct answer, but it wasn’t the right answer,” Lessig later confessed in a candid postmortem of the case. “The right answer was to say that there was an obvious and profound harm. Any number of briefs had been written about it. Kennedy wanted to hear it. And here was where Don Ayer’s advice should have mattered. This was a softball; my answer was a swing and a miss.”~{ See http://www.supremecourtus.gov/oral_arguments/argument_transcripts/ 01-618.pdf. See also Lessig, “How I Lost the Big One,” and Linda Greenhouse, “Justices Hear Arguments in Challenge to Copyrights,” /{New York Times}/, October 10, 2002. A number of Supreme Court opinions in the /{Eldred}/ case can be found at the Openlaw archive at http://cyber.law.harvard.edu/openlaw/ eldredvreno. The /{Loyola Los Angeles Law Review}/ held a symposium on /{Eldred v. Ashcroft}/, available at http://llr.lls.edu/volumes/v36-issue1. }~ No justices spoke in defense of the Sonny Bono Act. +At oral arguments, Lessig immediately confronted a skeptical bench. Justice Sandra Day O’Connor worried about overturning years of previous copyright term extensions. Justice William Rehnquist proposed. “You want the right to copy verbatim other people’s books, don’t you?” And when Justice Anthony Kennedy invited Lessig to expound upon the great harm that the law was inflicting on free speech and culture, Lessig declined the opportunity. He instead restated his core constitutional argument, that copyright terms cannot be perpetual. “This was a correct answer, but it wasn’t the right answer,” Lessig later confessed in a candid postmortem of the case. “The right answer was to say that there was an obvious and profound harm. Any number of briefs had been written about it. Kennedy wanted to hear it. And here was where Don Ayer’s advice should have mattered. This was a softball; my answer was a swing and a miss.”~{ See http://www.supremecourtus.gov/oral_arguments/argument_transcripts/01-618.pdf. See also Lessig, “How I Lost the Big One,” and Linda Greenhouse, “Justices Hear Arguments in Challenge to Copyrights,” /{New York Times}/, October 10, 2002. A number of Supreme Court opinions in the /{Eldred}/ case can be found at the Openlaw archive at http://cyber.law.harvard.edu/openlaw/eldredvreno. The /{Loyola Los Angeles Law Review}/ held a symposium on /{Eldred v. Ashcroft}/, available at http://llr.lls.edu/volumes/v36-issue1. }~ No justices spoke in defense of the Sonny Bono Act. ={Ayer, Don;Kennedy, Anthony;O’Connor, Sandra Day;Rehnquist, William} Yet they had clear reservations about the Supreme Court’s authority to dictate the length of copyright terms. @@ -841,7 +841,7 @@ Justices Stephen Breyer and John Paul Stevens accepted Lessig’s arguments, and In assessing the broad impact of the /{Eldred}/ ruling, copyright scholar Siva Vaidhyanathan cited law professor Shubha Ghosh’s observation that the /{Eldred}/ ruling had effectively “deconstitutionalized” copyright law. /{Eldred}/ pushed copyright law ={Ghosh, Shubha;Vaidhyanathan, Siva+1} -_1 farther into the realm of policy and power battles and away from principles that have anchored the system for two centuries. That means public interest advocates and activists must take their battles to the public sphere and the halls of Congress. We can’t appeal to the Founders’ wishes or republican ideals. We will have to make pragmatic arguments in clear language about the effects of excessive copyright on research, teaching, art and journalism. And we will have to make naked mass power arguments with echoes of “we want our MP3” and “it takes an industry of billions to hold us back.”~{ Siva Vaidhyanathan, “After the Copyright Smackdown: What Next?” /{Salon}/, January 17, 2003, at http://www.salon.com/tech/feature/2003/01/17/copy right.print.html. }~ +_1 farther into the realm of policy and power battles and away from principles that have anchored the system for two centuries. That means public interest advocates and activists must take their battles to the public sphere and the halls of Congress. We can’t appeal to the Founders’ wishes or republican ideals. We will have to make pragmatic arguments in clear language about the effects of excessive copyright on research, teaching, art and journalism. And we will have to make naked mass power arguments with echoes of “we want our MP3” and “it takes an industry of billions to hold us back.”~{ Siva Vaidhyanathan, “After the Copyright Smackdown: What Next?” /{Salon}/, January 17, 2003, at http://www.salon.com/tech/feature/2003/01/17/copyright.print.html. }~ ={copyright law:balance of public and private rights} 2~ A Movement Is Born @@ -854,7 +854,7 @@ After four years of relentless work, Lessig was frustrated and dejected. “I ha Yet Lessig had certainly been correct that /{Eldred}/ would not succeed unless it convinced the Court’s conservative majority. The fact that the originalist gambit failed was perhaps the strongest message of all: /{nothing}/ would convince this Court to rein in the excesses of copyright law. -Even before the Supreme Court had delivered its ruling, Lessig admitted his misgivings about the power of law to solve copyright’s failings: “The more I’m in this battle, the less I believe that constitutional law on its own can solve the problem. If Americans can’t see the value of freedom without the help of lawyers, then we don’t deserve freedom.”~{ Lessig response to Question 11, “Cyberspace Amendment,” posed by “kzinti,” in Slashdot, available at http://interviews.slashdot.org/article.pl?sid=01/12/ 21/155221. }~ Yet mobilizing freedom-loving Americans to seek redress from Congress was also likely to be doomed. Hollywood film studios and record companies had showered some $16.6 million and $1.8 million, respectively, on federal candidates and parties in 1998. Legislators know who butters their bread, and the public was not an organized influence on this issue. No wonder a progressive copyright reform agenda was going nowhere. +Even before the Supreme Court had delivered its ruling, Lessig admitted his misgivings about the power of law to solve copyright’s failings: “The more I’m in this battle, the less I believe that constitutional law on its own can solve the problem. If Americans can’t see the value of freedom without the help of lawyers, then we don’t deserve freedom.”~{ Lessig response to Question 11, “Cyberspace Amendment,” posed by “kzinti,” in Slashdot, available at http://interviews.slashdot.org/article.pl?sid=01/12/21/155221. }~ Yet mobilizing freedom-loving Americans to seek redress from Congress was also likely to be doomed. Hollywood film studios and record companies had showered some $16.6 million and $1.8 million, respectively, on federal candidates and parties in 1998. Legislators know who butters their bread, and the public was not an organized influence on this issue. No wonder a progressive copyright reform agenda was going nowhere. ={Copyright Term Extension Act+1;Eldred v. Reno/Eldred v. Ashcroft:Supreme Court, and;law:limited power of;copyright law:expansion of+1} Four years after the /{Eldred}/ ruling, Lessig had some second thoughts about the “Mickey Mouse” messaging strategy. Opponents of the copyright term extension, including Lessig, had often flaunted Mickey motifs in their dealings with the press and railed at the “Mickey Mouse Protection Act.” Yet in 2006, Lessig lamented to one interviewer that “the case got framed as one about Mickey Mouse. Whereas the reality is, who gives a damn about Mickey Mouse? The really destructive feature of the Sonny Bono law is the way it locks up culture that has no continuing commercial value at all. It orphaned culture. So by focusing on Mickey Mouse, the Court thought this was an issue of whether you believed in property or not. If, however, we had focused people on all the culture that is being lost because it is locked up by copyright, we might have succeeded.”~{ Interview with Poynder, April 7, 2006, pp. 26–27. }~ @@ -862,7 +862,7 @@ Four years after the /{Eldred}/ ruling, Lessig had some second thoughts about th The lasting impact of the /{Eldred}/ case, ironically, may have less to do with the law than with the cultural movement it engendered. The lawsuit provided a powerful platform for educating the American people about copyright law. A subject long regarded as arcane and complicated was now the subject of prominent articles in the /{New York Times}/, /{Salon}/, computer magazines, wire services, and countless other publications and Web sites. A cover story for the /{Los Angeles Times}/'s Sunday magazine explained how the case could “change the way Hollywood makes money — and the way we experience art.” /{Wired}/ magazine headlined its profile of Lessig “The Great Liberator.” Lessig himself barnstormed the country giving dozens of presentations to librarians, technologists, computer programmers, filmmakers, college students, and many others. Even Lessig’s adversary at the district court level, Arthur R. Miller, a Harvard Law School professor, agreed, “The case has sparked a public discussion that wasn’t happening before.” ={Miller, Arthur R.} -Lessig’s orations often provoked the fervor of a revival meeting — and led to more than a few conversions. This may appear surprising because Lessig, with his receding hairline and wireframe glasses, strikes an unprepossessing pose. In the professorial tradition, he can sometimes be didactic and patronizing. But on the stage, Lessig is stylish, poised, and mesmerizing. His carefully crafted talks are intellectual but entertaining, sophisticated but plainspoken— and always simmering with moral passion. He typically uses a customized version of Keynote, a Macintosh-based program similar to PowerPoint, to punctuate his dramatic delivery with witty visuals and quick flashes of words. (Experts in professional presentations have dubbed this style the “Lessig Method,” and likened it to the Takahashi Method in Japan because slides often use a single word, short quote, or photo.)~{ Garr Reynolds’s blog on professional presentation design, “The ‘Lessig Method’ of Presentation,” October 5, 2005, available at http://presentationzen .blogs.com/presentationzen/2005/10/the_lessig_meth.html. }~ +Lessig’s orations often provoked the fervor of a revival meeting — and led to more than a few conversions. This may appear surprising because Lessig, with his receding hairline and wireframe glasses, strikes an unprepossessing pose. In the professorial tradition, he can sometimes be didactic and patronizing. But on the stage, Lessig is stylish, poised, and mesmerizing. His carefully crafted talks are intellectual but entertaining, sophisticated but plainspoken— and always simmering with moral passion. He typically uses a customized version of Keynote, a Macintosh-based program similar to PowerPoint, to punctuate his dramatic delivery with witty visuals and quick flashes of words. (Experts in professional presentations have dubbed this style the “Lessig Method,” and likened it to the Takahashi Method in Japan because slides often use a single word, short quote, or photo.)~{ Garr Reynolds’s blog on professional presentation design, “The ‘Lessig Method’ of Presentation,” October 5, 2005, available at http://presentationzen.blogs.com/presentationzen/2005/10/the_lessig_meth.html. }~ More than a sidebar, Lessig’s public speaking has been an important aspect of his leadership in building a commons movement. His talks have helped some fairly sequestered constituencies in technical fields — computer programming, library science, Internet policy, copyright law — understand the larger political and cultural significance of their work. The results have sometimes been galvanizing. As one veteran hacker told me in 2006, “There’s a whole connoisseurship of Lessig talks. He’s a little past his peak right now — but there was a period where, like when he gave the lecture at OSCON [a conference of open-source programmers], when he was done, they wanted to start a riot. People were literally milling around, looking for things to smash. He was saying to these people who worked on open source, ‘There’s a larger world context to your work. The government is doing things — and you can stop them!’ ”~{ Interview with Aaron Swartz, October 10, 2006. }~ ={Lessig, Lawrence:public speaker, as} @@ -916,7 +916,7 @@ Lessig told me that when he recognized Eldred’s Web site as a new type of soci It helps to remember that in 1998 and the following years, the legality of sharing online works and downloading them was highly ambiguous. Prevailing legal discourse set forth a rather stark, dualistic world: either a work is copyrighted with “all rights reserved,” or a work is in the public domain, available to anyone without restriction. The mental categories of the time offered no room for a “constituency of the reasonable,” in Lessig’s words. ={copyright law:public domain vs.;public domain:copyright law, and} -Copyright law made nominal provisions for a middle ground in the form of the fair use doctrine and the public domain. But Lessig realized that fair use was “just a terrible structure on which to build freedom. There are basically no bright lines; everything is a constant debate. Of course, we don’t want to erase or compromise or weaken [these doctrines] in any sense. But it’s very important to build an infrastructure that doesn’t depend upon four years of litigation.” Or as Lessig was wont to put it in his impassioned performances on the stump: “Fuck fair use.”~{ Robert S. Boynton, “Righting Copyright: Fair Use and Digital Environmentalism,” /{Bookforum}/, February/March 2005, available at http://www.robert boynton.com/articleDisplay.php?article_id=1. }~ +Copyright law made nominal provisions for a middle ground in the form of the fair use doctrine and the public domain. But Lessig realized that fair use was “just a terrible structure on which to build freedom. There are basically no bright lines; everything is a constant debate. Of course, we don’t want to erase or compromise or weaken [these doctrines] in any sense. But it’s very important to build an infrastructure that doesn’t depend upon four years of litigation.” Or as Lessig was wont to put it in his impassioned performances on the stump: “Fuck fair use.”~{ Robert S. Boynton, “Righting Copyright: Fair Use and Digital Environmentalism,” /{Bookforum}/, February/March 2005, available at http://www.robertboynton.com/articleDisplay.php?article_id=1. }~ ={copyright law:fair use doctrine, and+2;fair use doctrine:copyright law, and+2;Lessig, Lawrence:fair use, on+2} This was a theatrical flourish, of course. Back in Palo Alto, Lessig in 2001 had launched the Center for Internet & Society at Stanford Law School, which actively takes on lawsuits seeking to vindicate the public’s fair use rights, among other things. One notable case was against Stephen Joyce, the grandson of novelist James Joyce. As executor of the Joyce literary estate, Stephen Joyce steadfastly prevented dozens of scholars from quoting from the great writer’s archive of unpublished letters.~{ See, e.g., D. T. Max, “The Injustice Collector,” /{New Yorker}/, June 19, 2006, pp. 34ff. }~ (After losing a key court ruling in February 2007, the Joyce estate settled the case on terms favorable to a scholar who had been denied access to the Joyce papers.) @@ -991,7 +991,7 @@ What ensued was a lengthy and irregular series of e-mail conversations and socia A digital archive for donated and public-domain works had great appeal. Just as land trusts acted as trustees of donated plots of land, so the Copyright’s Commons (as Lessig proposed that it be named) would be a “conservancy” for film, books, music, and other works that were either in the public domain or donated. Six weeks after Abelson’s original suggestion, Lessig produced a “Proposal for an Intellectual Property Conservancy” for discussion purposes.~{ Lawrence Lessig, “Proposal for the Intellectual Property Conservancy,” e-mail to ipcommons group, November 12, 2000. }~ He now called the concept “an IP commons” — “the establishment of an intellectual property conservancy to facilitate the collection and distribution under a GPL-like license of all forms of intellectual property.” As elaborated by two Harvard Law School students, Chris Babbitt and Claire Prestel, “The conservancy will attempt to bridge the gap between authors, corporate copyright holders and public domain advocates by providing a repository of donated works which we believe will create a more perfect ‘market’ for intellectual property.”~{ Chris Babbitt and Claire Prestel, “Memorandum to Michael Carroll, Wilmer Cutler Pickering, ‘IP Conservancy,’ ” October 24, 2000. }~ ={belson, Hal:copyright conservancy idea, and+2;Babbitt, Chris;Prestel, Claire;Copyright’s Commons+27;Creative Commons (CC):Copyright’s Commons, as+27;IP Commons+27;Lessig, Lawrence:Copyright’s Commons, and+27} -Friendly critiques started arriving immediately. Stallman considered the proposal a “good idea overall,” but as usual he objected to the words, such as “intellectual property” and “copyright protection,” which he considered “propaganda for the other side.”~{ E-mail from Richard Stallman to Lessig, September 11, 2000. See also http:// www.gnu.org/philosophy/words-to-avoid.html. Stallman suggested calling the project the “Copyright and Patent Conservancy.” }~ Abelson, a friend and colleague of Stallman’s at MIT, was not finicky about word choices, but he did believe that software donations should be directed to the Free Software Foundation, not to the envisioned project. FSF already existed, for one thing, but in addition, said Abelson, “It may be detrimental to have people initially associate this [new project] too closely with the FSF. . . . We need to craft a public position that will unify people. An FSF-style ‘let’s undo the effects of all those evil people licensing software’ is not what we want here.”~{ E-mail from Hal Abelson to Lessig, September 12, 2000. }~ Some people suggested attracting people to the conservancy by having “jewels” such as material from the estates of deceased artists. Another suggested hosting special licenses, such as the Open Audio License, a license issued by the Electronic Frontier Foundation in 2001 that lets musicians authorize the copying and reuse of their songs so long as credit is given and derivative songs can be shared. +Friendly critiques started arriving immediately. Stallman considered the proposal a “good idea overall,” but as usual he objected to the words, such as “intellectual property” and “copyright protection,” which he considered “propaganda for the other side.”~{ E-mail from Richard Stallman to Lessig, September 11, 2000. See also http://www.gnu.org/philosophy/words-to-avoid.html. Stallman suggested calling the project the “Copyright and Patent Conservancy.” }~ Abelson, a friend and colleague of Stallman’s at MIT, was not finicky about word choices, but he did believe that software donations should be directed to the Free Software Foundation, not to the envisioned project. FSF already existed, for one thing, but in addition, said Abelson, “It may be detrimental to have people initially associate this [new project] too closely with the FSF. . . . We need to craft a public position that will unify people. An FSF-style ‘let’s undo the effects of all those evil people licensing software’ is not what we want here.”~{ E-mail from Hal Abelson to Lessig, September 12, 2000. }~ Some people suggested attracting people to the conservancy by having “jewels” such as material from the estates of deceased artists. Another suggested hosting special licenses, such as the Open Audio License, a license issued by the Electronic Frontier Foundation in 2001 that lets musicians authorize the copying and reuse of their songs so long as credit is given and derivative songs can be shared. ={Stallman, Richard:Copyright’s Commons, and;Abelson, Hal:Free Software Foundation, and+1;Electronic Frontier Foundation (EFF);Free Software Foundation} The most difficult issue, said Abelson, was the economics of the project. The care and maintenance of donations, such as the master version of films, could be potentially huge expenses. Digitizing donated works could also be expensive. Finally, there were questions about the economic incentives to potential donors. Would people really wish to donate works that have significant cash value? @@ -1042,7 +1042,7 @@ Viewpoints quickly diverged on how a commons ought to be structured and what met For the next nine months, the group intensified its debate about how to build the envisioned conservancy. After law student Dotan Oliar sketched out possible “business models,” Saltzman persuaded a friend at McKinsey & Company, the consulting firm, to provide a pro bono assessment.~{ Dotan Oliar, “Memo on Creative Commons — Towards Formulating a Business Plan,” March 19, 2001. }~ “The McKinsey folks were very skeptical and, I think, had a hard time fitting this into their [business] framework,” recalled one student at the meeting, Chris Babbitt. After the meeting, he was convinced that Creative Commons could not possibly host a content commons: “It would just be huge amounts of material, huge costs, and we didn’t have the money for that.” ~{ Interview with Chris Babbitt, September 14, 2006. }~ ={Babbitt, Chris+1;McKinsey & Company;Oliar, Dotan} -Feeling the need to force some concrete decisions, Saltzman and Lessig convened twenty-eight people for an all-day meeting in Hauser Hall at Harvard Law School, on May 11, 2001, to hash out plans. “What we’re trying to do here is /{brand the public domain}/,” Lessig said. A briefing book prepared by Chris Babbitt posed a pivotal question to the group: Should Creative Commons be structured as a centralized Web site or as an distributed, open-source licensing protocol that would allow content to be spread across cyberspace? The centralized model could be “an eBay for opensource IP” or a more niche-based commons for out-of-print books, film, or poetry. A mock Web site was actually prepared to illustrate the scenario. The home page read: “The member sites listed on the CommonExchange have been certified by Creative Commons to offer high-quality, non-infringing content on an unrestricted basis. Please feel free to use and pass these works along to others. We invite you to donate works of your own to help maintain the digital Commons.”~{ The mock-up can be found at http://cyber.law.harvard.edu/creativecom mons/site.htm. }~ +Feeling the need to force some concrete decisions, Saltzman and Lessig convened twenty-eight people for an all-day meeting in Hauser Hall at Harvard Law School, on May 11, 2001, to hash out plans. “What we’re trying to do here is /{brand the public domain}/,” Lessig said. A briefing book prepared by Chris Babbitt posed a pivotal question to the group: Should Creative Commons be structured as a centralized Web site or as an distributed, open-source licensing protocol that would allow content to be spread across cyberspace? The centralized model could be “an eBay for opensource IP” or a more niche-based commons for out-of-print books, film, or poetry. A mock Web site was actually prepared to illustrate the scenario. The home page read: “The member sites listed on the CommonExchange have been certified by Creative Commons to offer high-quality, non-infringing content on an unrestricted basis. Please feel free to use and pass these works along to others. We invite you to donate works of your own to help maintain the digital Commons.”~{ The mock-up can be found at http://cyber.law.harvard.edu/creativecommons/site.htm. }~ ={public domain:branding of} The distributed commons model would resemble the Chicago Mercantile Exchange or the New York Stock Exchange — “a trusted matchmaker to facilitate the transaction of securing rights,” according to the briefing book. “Just as corporations or commodities producers must meet certain criteria before they are listed on the Exchange, we could condition ‘listing’ in the Commons on similar criteria, albeit reflecting open source rather than financial values.”~{ “Briefing Book for Creative Commons Inaugural Meeting,” May 7,2001, p.10. }~ The virtue of the distributed model was that it would shift costs, quality control, and digitization to users. Creative Commons would serve mostly as a credentialing service and facilitator. On the other hand, giving up control would be fraught with peril — and what if Creative Commons’ intentions were ignored? @@ -1074,7 +1074,7 @@ A classical composer said he “loved the idea of a Nigerian high school chamber In short, there was no stampede for starting a public-domain conservancy or a set of licenses. Some worried that the CC licenses would be a “case of innovation where’s there’s no current demand.” Another person pointed out, more hopefully, that it could be a case of “changing the market demand with a new model.”~{ Oren Bracha and Dotan Oliar, “Memo: May 7th Consensus Regarding the Creative Commons Project,” August 20, 2001, p. 3, note 9. }~ -The Lessig caucus was clearly struggling with how best to engage with the networked environment. Napster had demonstrated that, in the dawning Internet age, creativity would increasingly be born, distributed, and viewed on the Web; print and mass media would be secondary venues. For a society still deeply rooted in print and mass media, this was a difficult concept to grasp. But Michael Carroll, the Washington lawyer who had earlier vetted the conservancy’s liability issues, shrewdly saw network dynamics as a potentially powerful tool for building new types of digital commons. In 2001, he had noticed how a bit of Internet folk art had become an overnight sensation. Mike Collins, an amateur cartoonist from Elmira, New York, had posted the cartoon below on Taterbrains, a Web site.~{ http://politicalhumor.about.com/od/funnypictures/ig/100-Funny-Pictures/ Confusing-Florida-Ballot.htm. }~ The image suddenly rocketed throughout the cyberlandscape. Everyone was copying it and sharing it with friends. +The Lessig caucus was clearly struggling with how best to engage with the networked environment. Napster had demonstrated that, in the dawning Internet age, creativity would increasingly be born, distributed, and viewed on the Web; print and mass media would be secondary venues. For a society still deeply rooted in print and mass media, this was a difficult concept to grasp. But Michael Carroll, the Washington lawyer who had earlier vetted the conservancy’s liability issues, shrewdly saw network dynamics as a potentially powerful tool for building new types of digital commons. In 2001, he had noticed how a bit of Internet folk art had become an overnight sensation. Mike Collins, an amateur cartoonist from Elmira, New York, had posted the cartoon below on Taterbrains, a Web site.~{ http://politicalhumor.about.com/od/funnypictures/ig/100-Funny-Pictures/Confusing-Florida-Ballot.htm. }~ The image suddenly rocketed throughout the cyberlandscape. Everyone was copying it and sharing it with friends. ={Carroll, Michael W.+4;Collins, Mike+4;Napster} { vs_db_1.png }http://viralspiral.cc/ @@ -1119,7 +1119,7 @@ As the lawyers brooded and debated the licensing terms, another complicated deba At this time, in 2001, the founder of the World Wide Web, Tim Berners-Lee, and others at the World Wide Web Consortium, based at MIT, were trying to conceptualize the protocols for a new “logical layer” of code on top of the World Wide Web. They called it the Semantic Web. The idea is to enable people to identify and retrieve information that is strewn across the Internet but not readily located through conventional computer searches. Through a software format known as RDF/XML,~[* RDF, or Resource Description Framework, is a way to make a statement about content in a digital artifact. XML, or Extensible Markup Language, is a way to write a specialized document format to send across the Web, in which certain content can be marked up, or emphasized, so that other computers can “read” it.]~ digital content could be tagged with machine-readable statements that would in effect say, “This database contains information about x and y.” Through Semantic Web protocols and metatags on content, it would be possible to conduct searches across many types of digital content — Web pages, databases, software programs, even digital sensors — that could yield highly specific and useful results. ={Berners-Lee, Tim;Semantic Web+6;World Wide Web:Semantic Web+6|protocols for+6;RDF/XML} -Unfortunately, progress in developing the Semantic Web has been bogged down in years of technical disagreement and indifference among the larger Web community. Some critics argue that the project has stalled because it was being driven by a small corps of elite software theorists focused on databases, and not by a wider pool of decentralized Web practitioners. In any case, the Creative Commons became one of the first test cases of trying to implement RDF/XML for the Semantic Web.~{ For background, see “The Semantic Web: An Introduction,” at http://in fomesh.net/2001/swintro; Aaron Swartz and James Hendler, “The Semantic Web: A Network of Content for the Digital City,” at http://blogspace.com/ rdf/SwartzHendler; and John Markoff, “Entrepreneurs See a Web Guided by Common Sense,” /{New York Times}/, November 12, 2006. }~ The project was led initially by Lisa Rein, a thirty-three-year-old data modeler who met Lessig at an O’Reilly open-source software conference. Lessig hired her as CC’s first technical director in late 2001 to embed the CC legal licenses in machine-readable formats. +Unfortunately, progress in developing the Semantic Web has been bogged down in years of technical disagreement and indifference among the larger Web community. Some critics argue that the project has stalled because it was being driven by a small corps of elite software theorists focused on databases, and not by a wider pool of decentralized Web practitioners. In any case, the Creative Commons became one of the first test cases of trying to implement RDF/XML for the Semantic Web.~{ For background, see “The Semantic Web: An Introduction,” at http://infomesh.net/2001/swintro; Aaron Swartz and James Hendler, “The Semantic Web: A Network of Content for the Digital City,” at http://blogspace.com/rdf/SwartzHendler; and John Markoff, “Entrepreneurs See a Web Guided by Common Sense,” /{New York Times}/, November 12, 2006. }~ The project was led initially by Lisa Rein, a thirty-three-year-old data modeler who met Lessig at an O’Reilly open-source software conference. Lessig hired her as CC’s first technical director in late 2001 to embed the CC legal licenses in machine-readable formats. ={Rein, Lisa+2;Swartz, Aaron;Lessig, Lawrence:CC licenses, and} Writing the XML code was not so difficult, said Rein; the real challenge was “deciding what needed to be included and how you represent the licenses as simply as possible.”~{ Interview with Lisa Rein, December 20, 2006. }~ This required the lawyers and the techies to have intense dialogues about how the law should be faithfully translated into software code, and vice versa. Once again, there were complicated problems to sort through: Should there be a central database of CC-licensed content? How could machine-readable code be adapted if the legal licenses were later modified? @@ -1213,7 +1213,7 @@ It soon became clear that very few people were choosing any of the five licenses Still another choice was offered to copyright holders, a “public domain dedication,” which is not a license so much as “an overt act of relinquishment in perpetuity” of any rights in the work. The public domain dedication places no restrictions whatsoever on subsequent reuses of the work. ={public domain dedication} -To the first-time user, the licenses may seem a little daunting.~{ A FAQ at the Creative Commons Web site answers the most frequent user questions about the licenses. It is available at http://wiki.creativecommons .org/. }~ The full implications of using one or another license are not immediately obvious. The tagline for the licenses, “Some Rights Reserved,” while catchy, was not really self-explanatory. This became the next big challenge to Creative Commons, as we see in chapter 6: how to educate creators about a solution when they may not have realized they even had a problem. +To the first-time user, the licenses may seem a little daunting.~{ A FAQ at the Creative Commons Web site answers the most frequent user questions about the licenses. It is available at http://wiki.creativecommons.org/. }~ The full implications of using one or another license are not immediately obvious. The tagline for the licenses, “Some Rights Reserved,” while catchy, was not really self-explanatory. This became the next big challenge to Creative Commons, as we see in chapter 6: how to educate creators about a solution when they may not have realized they even had a problem. By December 2002, the three levels of code — legal, digital, and human — had been coordinated and finalized as version 1.0. The organization was set to go public, which it did at a splashy coming-out party in San Francisco. The gala featured appearances by the likes of rapper DJ Spooky (an ardent advocate for remix culture) and a London multimedia jam group, People Like Us. Lessig proudly introduced the licenses as “delivering on our vision of promoting the innovative reuse of all types of intellectual works, unlocking the potential of sharing and transforming others’ work.”~{ http://creativecommons.org/press-releases/entry/3476. }~ ={DJ Spooky;People Like Us;code:levels of;Lessig, Lawrence:CC licenses, and+2} @@ -1261,7 +1261,7 @@ Junell designed the now-familiar CC logo as a deliberate counterpoint to the cop In promoting its licenses, Creative Commons fashioned itself as a neutral, respectable defender of individual choice. “Our tools are just that — tools,” said Haughey, who was then developing the CC Web site. “Our model intentionally depends on copyright holders to take responsibility for how they use those tools. Or how they don’t use them: If you’re unsure and want to keep your full copyright, fine. If you choose to allow others to re-use your work, great.”~{ Matthew Haughey, “Blogging in the Public Domain,” Creative Commons blog post, February 5, 2003, at http://creativecommons.org/weblog/entry/3601. }~ While many CC users were enthusiastically bashing copyright law, Lessig and the CC staff made it a point to defend the basic principles of copyright law — while extolling the value of collaborative creativity and sharing under CC licenses. ={Haughey, Matt} -Despite praise by the heads of the Motion Picture Association of America and the Recording Industry Association of America, the licenses nonetheless did attract critics. Some in the music industry regarded the licenses as a Trojan horse that would dupe unsuspecting artists. David Israelite, president and CEO of the National Music Publishers’ Association, told /{Billboard}/, “My concern is that many who support Creative Commons also support a point of view that would take away people’s choices about what to do with their own property.”~{ Susan Butler, “Movement to Share Creative Works Raises Concerns in Music Circles,” /{Billboard}/, May 28, 2005.}~ /{Billboard}/ went on to cite the cautionary tale of a songwriter who was being kept alive by his AIDS medications, thanks to the royalties from a highly successful song. “No one should let artists give up their rights,” said Andy Fraser of the rock group Free. Other critics, such as John Dvorak of /{PC Magazine}/, called the CC licenses “humbug” and accused them of adding “some artificial paperwork and complexity to the mechanism [of copyright],” while weakening the rights that an author would otherwise enjoy.~{ John C. Dvorak, “Creative Commons Humbug: This Scheme Doesn’t Seem to Benefit the Public,” PC Magazine, July 28, 2005. }~ Still others had cultural scores to settle and criticized “anything advocated by clever, sleek young lawyers.”~{ Researchers at the Economic Observatory of the University of Openness, “Commercial Commons,” on the online journal /{Metamute}/, at http://www .metamute.org/?q=en/Commercial-Commons. }~ +Despite praise by the heads of the Motion Picture Association of America and the Recording Industry Association of America, the licenses nonetheless did attract critics. Some in the music industry regarded the licenses as a Trojan horse that would dupe unsuspecting artists. David Israelite, president and CEO of the National Music Publishers’ Association, told /{Billboard}/, “My concern is that many who support Creative Commons also support a point of view that would take away people’s choices about what to do with their own property.”~{ Susan Butler, “Movement to Share Creative Works Raises Concerns in Music Circles,” /{Billboard}/, May 28, 2005.}~ /{Billboard}/ went on to cite the cautionary tale of a songwriter who was being kept alive by his AIDS medications, thanks to the royalties from a highly successful song. “No one should let artists give up their rights,” said Andy Fraser of the rock group Free. Other critics, such as John Dvorak of /{PC Magazine}/, called the CC licenses “humbug” and accused them of adding “some artificial paperwork and complexity to the mechanism [of copyright],” while weakening the rights that an author would otherwise enjoy.~{ John C. Dvorak, “Creative Commons Humbug: This Scheme Doesn’t Seem to Benefit the Public,” PC Magazine, July 28, 2005. }~ Still others had cultural scores to settle and criticized “anything advocated by clever, sleek young lawyers.”~{ Researchers at the Economic Observatory of the University of Openness, “Commercial Commons,” on the online journal /{Metamute}/, at http://www.metamute.org/?q=en/Commercial-Commons. }~ ={Creative Commons (CC) licenses:critics of;sraelite, David;Recording Industry Association of America (RIAA);Dvorak, John;Fraser, Andy} Putting aside such quibbles and prejudices, the CC licenses seemed a benign enough idea. Given its reliance on copyright law, how could any entertainment lawyer object? Yet the real significance of the licenses was only appreciated by those who realized that a Great Value Shift was kicking in. For them, the licenses were a useful legal tool and cultural flag for building a new sharing economy. @@ -1272,7 +1272,7 @@ Putting aside such quibbles and prejudices, the CC licenses seemed a benign enou In retrospect, the CC licenses could not have been launched at a more propitious moment. Networked culture was exploding in 2003. Broadband was rapidly supplanting dial-up Internet access, enabling users to navigate the Web and share information at much faster speeds. Prices for personal computers were dropping even as computing speeds and memory capacity were soaring. Sophisticated new software applications were enabling users to collaborate in more powerful, user-friendly ways. The infrastructure for sharing was reaching a flashpoint. -Put another way, the original promise of the Internet as a gift economy was coming into its own. Originally built as a platform for efficient sharing among academic researchers, the Internet by 2003 was being used by some 600 million people worldwide.~{ Nielsen/Net Ratings estimated 585 million Internet users in 2002; the International Telecommunications Union estimated 665 million. See http://www2 .sims.berkeley.edu/research/proiects/how-much-info-2003/internet.htm. }~ The open framework for sharing was no longer just a plaything of technophiles and academics; it was now insinuated into most significant corners of the economy and social life. As it scaled and grew new muscles and limbs, the Internet began to radically change the ways in which wealth is generated and allocated. +Put another way, the original promise of the Internet as a gift economy was coming into its own. Originally built as a platform for efficient sharing among academic researchers, the Internet by 2003 was being used by some 600 million people worldwide.~{ Nielsen/Net Ratings estimated 585 million Internet users in 2002; the International Telecommunications Union estimated 665 million. See http://www2.sims.berkeley.edu/research/proiects/how-much-info-2003/internet.htm. }~ The open framework for sharing was no longer just a plaything of technophiles and academics; it was now insinuated into most significant corners of the economy and social life. As it scaled and grew new muscles and limbs, the Internet began to radically change the ways in which wealth is generated and allocated. ={Internet:gift economy of+1} I call this the Great Value Shift — a deep structural change in how valuable things are created for commerce and culture. The shift is not only a fundamental shift in business strategy and organizational behavior, but in the very definition of wealth. On the Internet, wealth is not just financial wealth, nor is it necessarily privately held. Wealth generated through open platforms is often /{socially created value}/ that is shared, evolving, and nonmonetized. It hovers in the air, so to speak, accessible to everyone. @@ -1398,13 +1398,13 @@ The Web 2.0 environment was quite hospitable for the spread of the CC licenses. While technology and economics have been driving forces in shaping the new participatory platforms, much of their appeal has been frankly cultural. Amateur content on the Net may be raw and irregular, but it also tends to be more interesting and authentic than the highly produced, homogenized fare of commercial media. Some of it vastly outshines the lowest common denominator of mass media. Again, the cheap connectivity of the Internet has been key. It has made it possible for people with incredibly specialized interests to find one another and organize themselves into niche communities. For closeted homosexuals in repressive countries or isolated fans of the actor Wallace Beery, the Internet has enabled them to find one another and mutually feed their narrow interests. You name it, there are sites for it: the fans of obscure musicians, the collectors of beer cans, Iranian exiles, kite flyers. Freed of the economic imperative of attracting huge audiences with broad fare, niche-driven Internet content is able to connect with people’s personal passions and interests: a powerful foundation not just for social communities, but for durable markets. ={Internet:communication system, as+1} -This, truly, is one of the more profound effects of networking technologies: the subversion of the “blockbuster” economics of the mass media. It is becoming harder and more expensive for film studios and broadcast networks to amass the huge, cross-demographic audiences that they once could. In the networked environment, it turns out that a diversified set of niche markets can be eminently profitable with lower-volume sales. While Centralized Media require a supply-side “push” of content, the Internet enables a demand-side “pull” of content by users. This radically reduces transaction costs and enhances the economic appeal of niche production. It is easier and cheaper for a company (or single creator) to “pull” niche audiences through word of mouth than it is to pay for expensive “push” advertising campaigns. Specialty interests and products that once were dismissed as too marginal or idiosyncratic to be profitable can now flourish in small but robust “pull markets.”~{ David Bollier, “When Push Comes to Pull: The New Economy and Culture of Networking Technology” (Washington, DC: Aspen Institute, 2006), at http://www.aspeninstitute.org/atf/cf/%7BDEB6F227-659B-4EC8-8F84-8 DF23CA704F5%7D/2005InfoTechText.pdf. }~ +This, truly, is one of the more profound effects of networking technologies: the subversion of the “blockbuster” economics of the mass media. It is becoming harder and more expensive for film studios and broadcast networks to amass the huge, cross-demographic audiences that they once could. In the networked environment, it turns out that a diversified set of niche markets can be eminently profitable with lower-volume sales. While Centralized Media require a supply-side “push” of content, the Internet enables a demand-side “pull” of content by users. This radically reduces transaction costs and enhances the economic appeal of niche production. It is easier and cheaper for a company (or single creator) to “pull” niche audiences through word of mouth than it is to pay for expensive “push” advertising campaigns. Specialty interests and products that once were dismissed as too marginal or idiosyncratic to be profitable can now flourish in small but robust “pull markets.”~{ David Bollier, “When Push Comes to Pull: The New Economy and Culture of Networking Technology” (Washington, DC: Aspen Institute, 2006), at http://www.aspeninstitute.org/atf/cf/%7BDEB6F227-659B-4EC8-8F84-8DF23CA704F5%7D/2005InfoTechText.pdf. }~ ={Centralized Media:Internet vs.;Internet:Centralized Media vs.} The term associated with this phenomenon is the “Long Tail” — the title of a much-cited article by Chris Anderson in the October 2004 issue of /{Wired}/ magazine, later expanded into a book. Anderson explained the “grand transition” now under way: ={Anderson, Chris+2;Long Tail+3} -_1 For too long we’ve been suffering the tyranny of lowestcommon-denominator fare, subjected to brain-dead summer blockbusters and manufactured pop. Why? Economics. Many of our assumptions about popular taste are actually artifacts of poor supply-and-demand matching — a market response to inefficient distribution. . . . Hit-driven economics is a creation of an age without enough room to carry everything for everybody. Not enough shelf space for all the CDs, DVDs, and games produced. Not enough screens to show all the available movies. . . .~{ Chris Anderson, “The Long Tail,” /{Wired}/, October 2004, at http://www.wired .com/wired/archive/12.10/tail.html. }~ +_1 For too long we’ve been suffering the tyranny of lowestcommon-denominator fare, subjected to brain-dead summer blockbusters and manufactured pop. Why? Economics. Many of our assumptions about popular taste are actually artifacts of poor supply-and-demand matching — a market response to inefficient distribution. . . . Hit-driven economics is a creation of an age without enough room to carry everything for everybody. Not enough shelf space for all the CDs, DVDs, and games produced. Not enough screens to show all the available movies. . . .~{ Chris Anderson, “The Long Tail,” /{Wired}/, October 2004, at http://www.wired.com/wired/archive/12.10/tail.html. }~ The “Long Tail” refers to the huge potential markets that can be created for low-volume niche books, CD, DVDs, and other products. More than half of Amazon’s book sales, for example, come from books that rank below its top 130,000 titles. The implication is that “the market for books that are not even sold in the average bookstore is larger than the market for those that are,” writes Anderson. “In other words, the potential book market may be twice as big as it appears to be, if only we can get over the economics of scarcity.” ={Amazon} @@ -1469,11 +1469,11 @@ In January 2003, a month after the CC licenses were released, Doctorow published _1 Well, it’s a long story, but to shorten it up: first-time novelists have a tough row to hoe. Our publishers don’t have a lot of promotional budget to throw at unknown factors like us. Mostly, we rise and fall based on word-of-mouth. I’m not bad at word-of-mouth. I have a blog, Boing Boing (http://boingboingnet), where I do a /{lot}/ of word-ofmouthing. I compulsively tell friends and strangers about things I like. And telling people about stuff is /{way, way}/ easier if I can just send it to ’em. Way easier.~{ Cory Doctorow, “A Note About This Book,” February 12, 2004, and “A Note About This Book,” January 9, 2003, in /{Down and Out in the Magic Kingdom}/, available at http://www.craphound.com/down. }~ -A year later, Doctorow announced that his “grand experiment” was a success; in fact, he said, “my career is turning over like a goddamned locomotive engine.” More than thirty thousand people had downloaded the book within a day of its posting. He proceeded to release a collection of short stories and a second novel under a CC license. He also rereleased /{Down and Out in the Magic Kingdom}/ under a less restrictive CC license — an Attribution, NonCommercial, ShareAlike license (BY-NC-SA), which allows readers to make their own translations, radio and film adaptations, sequels, and other remixes of the novel, so long as they are made available on the same terms.~{ Anna Weinberg,“Buying the Cow, Though the Milk Is Free: Why Some Publishers are Digitizing Themselves,” June 24, 2005, /{Book Standard}/, June 24, 2005, available at http://www.thebookstandard.com/bookstandard/news/publisher/ article_display.jsp?vnu_content_id=1000968186.}~ +A year later, Doctorow announced that his “grand experiment” was a success; in fact, he said, “my career is turning over like a goddamned locomotive engine.” More than thirty thousand people had downloaded the book within a day of its posting. He proceeded to release a collection of short stories and a second novel under a CC license. He also rereleased /{Down and Out in the Magic Kingdom}/ under a less restrictive CC license — an Attribution, NonCommercial, ShareAlike license (BY-NC-SA), which allows readers to make their own translations, radio and film adaptations, sequels, and other remixes of the novel, so long as they are made available on the same terms.~{ Anna Weinberg,“Buying the Cow, Though the Milk Is Free: Why Some Publishers are Digitizing Themselves,” June 24, 2005, /{Book Standard}/, June 24, 2005, available at http://www.thebookstandard.com/bookstandard/news/publisher/article_display.jsp?vnu_content_id=1000968186. }~ With some sheepish candor, Doctorow conceded: “I wanted to see if the sky would fall: you see writers are routinely schooled by their peers that maximal copyright is the only thing that stands between us and penury, and so ingrained was this lesson in me that even though I had the intellectual intuition that a ‘some rights reserved’ regime would serve me well, I still couldn’t shake the atavistic fear that I was about to do something very foolish indeed.” -By June 2006, /{Down and Out in the Magic Kingdom}/ had been downloaded more than seven hundred thousand times. It had gone through six printings, many foreign translations, and two competing online audio adaptations made by fans. “Most people who download the book don’t end up buying it,” Doctorow conceded, “but they wouldn’t have bought it in any event, so I haven’t lost any sales. I’ve just won an audience. A tiny minority of downloaders treats the free e-book as a substitute for the printed book — those are the lost sales. But a much larger minority treats the e-book as an enticement to buy the printed book. They’re gained sales. As long as gained sales outnumber lost sales, I’m ahead of the game. After all, distributing nearly a million copies of my book has cost me nothing.”~{ Cory Doctorow, “Giving it Away,” Forbes.com, December 1, 2006, available at http://www.forbes.com/2006/11/30/cory-doctorow-copyright-tech-media _cz_cd_books06_1201doctorow.html. }~ In 2008, Doctorow’s marketing strategy of giving away online books to stimulate sales of physical books paid off in an even bigger way. His novel for teenagers, /{Little Brother}/, about a youthful hacker who takes on the U.S. government after it becomes a police state, spent weeks on the /{New York Times}/ bestseller list for children’s books. +By June 2006, /{Down and Out in the Magic Kingdom}/ had been downloaded more than seven hundred thousand times. It had gone through six printings, many foreign translations, and two competing online audio adaptations made by fans. “Most people who download the book don’t end up buying it,” Doctorow conceded, “but they wouldn’t have bought it in any event, so I haven’t lost any sales. I’ve just won an audience. A tiny minority of downloaders treats the free e-book as a substitute for the printed book — those are the lost sales. But a much larger minority treats the e-book as an enticement to buy the printed book. They’re gained sales. As long as gained sales outnumber lost sales, I’m ahead of the game. After all, distributing nearly a million copies of my book has cost me nothing.”~{ Cory Doctorow, “Giving it Away,” Forbes.com, December 1, 2006, available at http://www.forbes.com/2006/11/30/cory-doctorow-copyright-tech-media_cz_cd_books06_1201doctorow.html. }~ In 2008, Doctorow’s marketing strategy of giving away online books to stimulate sales of physical books paid off in an even bigger way. His novel for teenagers, /{Little Brother}/, about a youthful hacker who takes on the U.S. government after it becomes a police state, spent weeks on the /{New York Times}/ bestseller list for children’s books. It is perhaps easier for a sci-fi futurist like Doctorow than a publishing business to take such a wild leap into the unknown. But that, too, is an important insight: artists are more likely to lead the way into the sharing economy than entrenched industries. “I’d rather stake my future on a literature that people care about enough to steal,” said Doctorow, “than devote my life to a form that has no home in the dominant medium of the century.” Book lovers and authors will pioneer the future; corporate publishing will grudgingly follow, or be left behind. @@ -1491,10 +1491,10 @@ Free culture publishing models are popping up in many unusual quarters these day Founder Hugh McGuire said the inspiration for LibriVox was a distributed recording of Lessig’s book /{Free Culture}/ read by bloggers and podcasters, chapter by chapter. “After listening to that, it took me a while to figure out how to record things on my computer (which I finally did, thanks to free software Audacity). Brewster Kahle’s call for ‘Universal Access to all human knowledge’ was another inspiration, and the free hosting provided by archive.org and ibiblio.org meant that LibriVox was possible: there was no worry about bandwidth and storage. So the project was started with an investment of $0, which continues to be our global budget.” LibriVox’s mission, said McGuire, is the “acoustical liberation of books in the public domain.” ={Kahle, Brewster;LibriVox;McGuire, Hugh;bloging} -Several publishing businesses now revolve around CC licenses. Wikitravel is a collaborative Web site that amasses content about cities and regions around the world; content is licensed under the CC Attribution, ShareAlike license (BY-SA).~{ “Wikitravel Press launches,” Creative Commons blog, August 3, 2007, at http://creativecommons.org/weblog/entry/7596. See also Mia Garlick, “Wikitravel,” Creative Commons blog, June 20, 2006, at http://creativecom mons.org/text/wikitravel. }~ In 2007, its founder joined with a travel writer to start Wikitravel Press, which now publishes travel books in a number of languages. Like the Wikitravel Web pages, the text in the books can be freely copied and reused. +Several publishing businesses now revolve around CC licenses. Wikitravel is a collaborative Web site that amasses content about cities and regions around the world; content is licensed under the CC Attribution, ShareAlike license (BY-SA).~{ “Wikitravel Press launches,” Creative Commons blog, August 3, 2007, at http://creativecommons.org/weblog/entry/7596. See also Mia Garlick, “Wikitravel,” Creative Commons blog, June 20, 2006, at http://creativecommons.org/text/wikitravel. }~ In 2007, its founder joined with a travel writer to start Wikitravel Press, which now publishes travel books in a number of languages. Like the Wikitravel Web pages, the text in the books can be freely copied and reused. ={Wikitravel Press} -Another new business using CC licenses is Lulu, a technology company started by Robert Young, the founder of the Linux vendor Red Hat and benefactor for the Center for the Public Domain.Lulu lets individuals publish and distribute their own books, which can be printed on demand or downloaded. Lulu handles all the details of the publishing process but lets people control their content and rights. Hundreds of people have licensed their works under the CC ShareAlike license and Public Domain Dedication, and under the GNU Project’s Free Documentation License.~{ Mia Garlick, “Lulu,” Creative Commons blog, May 17, 2006, at http://creative commons.org/text/lulu. }~ +Another new business using CC licenses is Lulu, a technology company started by Robert Young, the founder of the Linux vendor Red Hat and benefactor for the Center for the Public Domain.Lulu lets individuals publish and distribute their own books, which can be printed on demand or downloaded. Lulu handles all the details of the publishing process but lets people control their content and rights. Hundreds of people have licensed their works under the CC ShareAlike license and Public Domain Dedication, and under the GNU Project’s Free Documentation License.~{ Mia Garlick, “Lulu,” Creative Commons blog, May 17, 2006, at http://creativecommons.org/text/lulu. }~ ={Lulu;Red Hat;Young, Robert;Center for the Public Domain;GNU Project:GNU FDL;public domain:Center for Public Domain} As more of culture and commerce move to the Internet, the question facing the book industry now is whether the text of a book is more valuable as a physical object (a codex) or as a digital file (intangible bits that can circulate freely), or some combination of the two. Kevin Kelly, the former editor of /{Wired}/ magazine, once explained: “In a regime of superabundant free copies, copies lose value. They are no longer the basis of wealth. Now relationships, links, connection and sharing are. Value has shifted away from a copy toward the many ways to recall, annotate, personalize, edit, authenticate, display, mark, transfer and engage a work.”~{ Kevin Kelly, “Scan This Book!” /{New York Times Magazine}/, May 14, 2006, p. 43. }~ @@ -1505,7 +1505,7 @@ What this means in practice, Kelly has pointed out, is that books become more va Needless to say, most book publishers and authors’ organizations are not yet prepared to embrace this newfangled value proposition. It seems way too iffy. A “sharing” business model would seemingly cannibalize their current revenues and copyright control with little guarantee of doing better in an open, online milieu. The bigger problem may be the cultural prejudice that an absolute right of control over any possible uses of a book is the best way to make money. ={open business models} -In general, the publishing trade remains skeptical of the Internet, clueless about how to harness its marketing power, and strangers to CC licenses. And it could be years before mainstream publishing accepts some of the counterintuitive notions that special-interest Internet communities will drive publishing in the future. In a presentation that caused a stir in the book industry, futurist Mike Shatzkin said in May 2007 that this is already happening in general trade publishing: “We’re close to a tipping point, or maybe we’re past it . . . where Web-based branding will have more credibility than print, because print, needing more horizontal reach to be viable, won’t deliver the attention of the real experts and megaphones in each field.”~{ Mike Shatzkin, “The End of General Trade Publishing Houses: Death or Rebirth in a Niche-by-Niche World,” presented to the Book Expo America, New York, May 31, 2007, available at http://www.idealog.com/speeches/ endoftrade.htm. }~ +In general, the publishing trade remains skeptical of the Internet, clueless about how to harness its marketing power, and strangers to CC licenses. And it could be years before mainstream publishing accepts some of the counterintuitive notions that special-interest Internet communities will drive publishing in the future. In a presentation that caused a stir in the book industry, futurist Mike Shatzkin said in May 2007 that this is already happening in general trade publishing: “We’re close to a tipping point, or maybe we’re past it . . . where Web-based branding will have more credibility than print, because print, needing more horizontal reach to be viable, won’t deliver the attention of the real experts and megaphones in each field.”~{ Mike Shatzkin, “The End of General Trade Publishing Houses: Death or Rebirth in a Niche-by-Niche World,” presented to the Book Expo America, New York, May 31, 2007, available at http://www.idealog.com/speeches/endoftrade.htm. }~ ={Shatzkin, Mike} 2~ DIY Videos and Film @@ -1534,7 +1534,7 @@ One of the more daring experiments in film production is being pioneered by the Ton Roosendaal, who directs the Blender Institute, is trying to demonstrate that a small studio can develop a virtuous cycle of economically sustainable creativity using open-source software, Creative Commons licenses, and talented programmers and artists from around the world. “We give programmers the freedom to do their best, and what they want to do is improve the technology,” he said. “The market is too hyper-rational and nailed down and filled with limits,” he argues, referring to his peers at major animation studios. “Open source is free of most of these constraints.”~{ Ton Roosendaal remarks at conference, “Economies of the Commons,” De Balie Centre for Culture and Politics, Amsterdam, April 10–12, 2008. }~ ={Roosendaal, Ton} -In April 2008, the Blender Institute released a ten-minute animated short, /{Big Buck Bunny}/, which features a kind-hearted, fat white bunny who endures the abuse of three stone-throwing rodents until they smash a beautiful butterfly with a rock — at which point the bunny rallies to teach the bullies a lesson.~{ The film can be downloaded at http://www.bigbuckbunny.org/index.php/ download. }~ The film uses cutting-edge computer-generated animation techniques that rival anything produced by Pixar, the Hollywood studio responsible for /{Toy Story}/, /{Cars}/, and /{Ratatouille}/. /{Big Buck Bunny}/ is licensed under a CC Attribution license, which means the digital content can be used by anyone for any purpose so long as credit is given to the Blender Institute. +In April 2008, the Blender Institute released a ten-minute animated short, /{Big Buck Bunny}/, which features a kind-hearted, fat white bunny who endures the abuse of three stone-throwing rodents until they smash a beautiful butterfly with a rock — at which point the bunny rallies to teach the bullies a lesson.~{ The film can be downloaded at http://www.bigbuckbunny.org/index.php/download. }~ The film uses cutting-edge computer-generated animation techniques that rival anything produced by Pixar, the Hollywood studio responsible for /{Toy Story}/, /{Cars}/, and /{Ratatouille}/. /{Big Buck Bunny}/ is licensed under a CC Attribution license, which means the digital content can be used by anyone for any purpose so long as credit is given to the Blender Institute. ={Big Buck Bunny (animated short)+1} /{Big Buck Bunny}/ was initially distributed to upfront investors as a DVD set that includes extras such as interviews, outtakes, deleted scenes, and the entire database used in making the film. Then, to pique wider interest in sales of the DVD set, priced at thirty-four euros, a trailer was released on the Internet. This resulted in extensive international press coverage and blog exposure. Early signs are promising that Blender will be able to continue to make highquality animation on a fairly modest budget without worries about illegal downloads or a digital rights management system. The Blender production model also has the virtue of enabling access to top creative talent and cutting-edge animation technologies as well as efficient distribution to paying audiences on a global scale. @@ -1554,7 +1554,7 @@ Media reform activist Harold Feld offers a succinct overview of why creativity i _1 The 1990s saw a number of factors that allowed the major labels to push out independents and dominate the market with their own outrageously priced and poorly produced products: consolidation in the music industry, the whole “studio system” of pumping a few big stars to the exclusion of others, the consolidation in music outlets from mom-andpop record stores to chains like Tower Records and retail giants like Wal-Mart that exclude indies and push the recordings promoted by major labels, and the consolidation of radio — which further killed indie exposure and allowed the labels to artificially pump their selected “hits” through payola. All this created a cozy cartel that enjoyed monopoly profits. ={music:music industry+1} -_1 As a result, the major labels, the mainstream retailers, and the radio broadcasters grew increasingly out of touch with what listeners actually wanted. But as long as the music cartel controlled what the vast majority of people got to hear, it didn’t matter . . . The music cartel remained the de facto only game in town.~{ Harold Feld, “CD Sales Dead? Not for Indies!” blog post on Public Knowledge Web site, March 27, 2007, at http://www.publicknowledge.org/node/ 890. }~ +_1 As a result, the major labels, the mainstream retailers, and the radio broadcasters grew increasingly out of touch with what listeners actually wanted. But as long as the music cartel controlled what the vast majority of people got to hear, it didn’t matter . . . The music cartel remained the de facto only game in town.~{ Harold Feld, “CD Sales Dead? Not for Indies!” blog post on Public Knowledge Web site, March 27, 2007, at http://www.publicknowledge.org/node/890. }~ Changing the music industry is obviously a major challenge that is not going to be solved overnight. Still, there is a growing effort led by indie musicians, small record labels, Internet music entrepreneurs, and advocacy groups such as the Future of Music Coalition to address these problems. Creative Commons is clearly sympathetic, but has largely focused on a more modest agenda — enabling a new universe of shareable music to arise. Its chief tools for this mission, beyond the CC licenses, are new software platforms for legal music remixes, online commons that legally share music, and new business models that respect the interests of both fans and artists. Ultimately, it is hoped that a global oeuvre of shareable music will emerge. Once this body of music matures, attracting more artists and fans in a self-sustaining viral spiral, the record industry may be forced to give up its dreams of perfect control of how music may circulate and adopt fan-friendly business practices. ={Future of Music Coalition} @@ -1562,7 +1562,7 @@ Changing the music industry is obviously a major challenge that is not going to This, at least, is the theory, as Lessig explains it. He calls it the “BMI strategy,” a reference to the strategy that broadcasters and musicians used to fight ASCAP’s monopoly control over radio music in the early 1940s. ASCAP, the American Society of Composers, Authors and Publishers, is a nonprofit organization that collects royalties for musical performances. At the time, ASCAP required artists to have five hits before it would serve as a collection agency for them, a rule that privileged the playing of pop music on the radio at the expense of rhythm and blues, jazz, hillbilly, and ethnic music. Then, over the course of eight years, ASCAP raised its rates by 450 percent between 1931 and 1939 — at which point, ASCAP then proposed /{doubling}/ its rates for 1940. In protest, many radio stations refused to play ASCAP-licensed music. They formed a new performance-rights body, BMI, or Broadcast Music, Inc., which sought to break the ASCAP monopoly by offering free arrangements of public-domain music to radio stations. They also charged lower rates than ASCAP for licensing music and offered better contracts for artists.~{ Donald Clarke, /{The Rise and Fall of Popular Music}/, chapter 11. }~ ={ASCAP+1;BMI (Broadcast Music, Inc.)+3;music:ASCAP+l;Lessig, Lawrence:CC licenses, and+2|music, and+2} -“The Internet is today’s broadcasters,” said Lessig in a 2006 speech. “They are facing the same struggle.”~{ Lessig explained his BMI strategy at a speech, “On Free, and the Differences Between Culture and Code,” at the 23d Chaos Communications Conference (23C3) in Berlin, Germany, December 30, 2006; video can be watched at http://video.google.com/videoplay?docid=7661663613180520595&q= 23c3. }~ Just as ASCAP used its monopoly power to control what music could be heard and at what prices, he said, so today’s media corporations want to leverage their control over content to gain control of the business models and technologies of digital environments. When Google bought YouTube, one-third of the purchase price of $1.65 billion was allegedly a financial reserve to deal with any copyright litigation, said Lessig. This is how the incumbent media world is trying to stifle the emergence of free culture. +“The Internet is today’s broadcasters,” said Lessig in a 2006 speech. “They are facing the same struggle.”~{ Lessig explained his BMI strategy at a speech, “On Free, and the Differences Between Culture and Code,” at the 23d Chaos Communications Conference (23C3) in Berlin, Germany, December 30, 2006; video can be watched at http://video.google.com/videoplay?docid=7661663613180520595&q=23c3. }~ Just as ASCAP used its monopoly power to control what music could be heard and at what prices, he said, so today’s media corporations want to leverage their control over content to gain control of the business models and technologies of digital environments. When Google bought YouTube, one-third of the purchase price of $1.65 billion was allegedly a financial reserve to deal with any copyright litigation, said Lessig. This is how the incumbent media world is trying to stifle the emergence of free culture. ={Google;YouTube} The same questions that once confronted broadcasters are now facing Internet innovators, Lessig argues: “How do we free the future from the dead hand of the past? What do we do to make it so they can’t control how technology evolves?” With copyright terms lasting so long, it is not really feasible to try to use public-domain materials to compete with a commercial cartel. Lessig’s answer is a BMI-inspired solution that uses the CC licenses to create a new body of “free” works that, over time, can begin to compete with popular works. The legendary record producer Jerry Wexler recalled how ASCAP marginalized R & B, country, folk, and ethnic music, but “once the lid was lifted — which happened when BMI entered the picture — the vacuum was filled by all these archetypal musics. BMI turned out to be the mechanism that released all those primal American forms of music that fused and became rock-androll.”~{ From BMI, Inc., Web site, at http://www.bmi.com/genres/entry/533380. }~ Lessig clearly has similar ambitions for Creative Commons. @@ -1574,7 +1574,7 @@ For now, the subculture of CC-licensed music remains something of a fringe movem Creative Commons’s primary task is practical — to help musicians reach audiences directly and reap more of the financial rewards of their music. So far, a wide range of indie bands, hip-hop artists, and bohemian experimentalists of all stripes have used the licenses. One of the most popular is the Attribution, NonCommercial license, which lets artists share their works while getting credit and retaining commercial rights. A number of marquee songwriters and performers — David Byrne, Gilberto Gil, the Beastie Boys, Chuck D — have also used CC licenses as a gesture of solidarity with free culture artists and as an enlightened marketing strategy. Inviting people to remix your songs is a great way to engage your fan base and sell more records. And tagging your music with a CC license, at least for now, wraps an artist in a mantle of tech sophistication and artistic integrity. ={Beastie Boys;Byrne, David;Chuck D;Gil, Gilberto} -Guitarist Jake Shapiro was one of the first musicians to show the marketing potential of unleashing free music on the Internet. In 1995, Shapiro put MP3 files of music by his band, Two Ton Shoe, on the group’s Web site. Within a few years, Two Ton Shoe was one of the most-downloaded bands on the Internet, developing fan bases in Italy, Brazil, Russia, and South Korea. One day Shapiro received a phone call out of the blue from a South Korean concert promoter. He wanted to know if the band would fly over to Seoul to perform four concerts. It turned out that fans in South Korea, where fast broadband connections are the norm, had discovered Two Ton Shoe through file sharing. A local CD retailer kept getting requests for the band’s music, which led him to contact a concert promoter. In August 2005, Shapiro and his buddies arrived in Seoul as conquering rock stars, selling out all four of their concerts. “The kids who showed up knew all the words to the songs,” Shapiro recalled. A year later, the band signed a deal to distribute a double CD to East Asia.~{ Shapiro described his experiences at the “Identity Mashup Conference,” June 19–21, 2006, hosted by the Berkman Center for Internet and Society at Harvard Law School, at http://blogs.law.harvard.edu/mediaberkman/2006/ 06/28/id-mashup-2006-day-two-the-commons-open-apis-meshups-andmashups. His band’s Web site is at http://www.twotonshoe.com. }~ +Guitarist Jake Shapiro was one of the first musicians to show the marketing potential of unleashing free music on the Internet. In 1995, Shapiro put MP3 files of music by his band, Two Ton Shoe, on the group’s Web site. Within a few years, Two Ton Shoe was one of the most-downloaded bands on the Internet, developing fan bases in Italy, Brazil, Russia, and South Korea. One day Shapiro received a phone call out of the blue from a South Korean concert promoter. He wanted to know if the band would fly over to Seoul to perform four concerts. It turned out that fans in South Korea, where fast broadband connections are the norm, had discovered Two Ton Shoe through file sharing. A local CD retailer kept getting requests for the band’s music, which led him to contact a concert promoter. In August 2005, Shapiro and his buddies arrived in Seoul as conquering rock stars, selling out all four of their concerts. “The kids who showed up knew all the words to the songs,” Shapiro recalled. A year later, the band signed a deal to distribute a double CD to East Asia.~{ Shapiro described his experiences at the “Identity Mashup Conference,” June 19–21, 2006, hosted by the Berkman Center for Internet and Society at Harvard Law School, at http://blogs.law.harvard.edu/mediaberkman/2006/06/28/id-mashup-2006-day-two-the-commons-open-apis-meshups-and-mashups. His band’s Web site is at http://www.twotonshoe.com. }~ ={Shapiro, Jake;Two Ton Shoe} While such stories of viral marketing success are not common, neither are they rare. Lots of bands now promote themselves, and find admiring (paying) fans, by posting their music, for free, on Web sites and file-sharing sites. Perhaps the most scrutinized example was Radiohead’s decision to release its album /{In Rainbows}/ for free online, while inviting fans to pay whatever they wanted. (The band did not release any numbers, but considered the move a success. They later released the album through conventional distribution channels as well.)~{ Jon Pareles, “Pay What You Want for This Article,” /{New York Times}/, December 9, 2007. }~ @@ -1586,7 +1586,7 @@ Just as previous generations of fans came together around FM radio or live perfo It is also why the Creative Commons licenses have acquired such cachet. They have come to be associated with musicians who honor the integrity of music making. They symbolize the collective nature of creativity and the importance of communing freely with one’s fans. Nimrod Lev, a prominent Israeli musician and supporter of the CC licenses, received considerable press coverage in his country for a speech that lamented the “cunning arrangement” (in Israeli slang, /{combina}/) by which the music industry has betrayed people’s love of music, making it “only a matter of business and commerce.” Said Lev: ={music:music industry+1;Lev, Nimrod+2} -_1 The music industry treats its consumer as a consumer of sex, not of love, the love of music. Just like everything else: a vacuum without values or meaning. But it is still love that everyone wants and seeks. . . . The music vendors knew then [a generation ago] what they have forgotten today, namely that we must have cultural heroes: artists that are not cloned in a manner out to get our money. There was an added value with a meaning: someone who spoke to our hearts in difficult moments, and with that someone, we would walk hand in hand for a while. We had loyalty and love, and it all meant something.~{ Nimrod Lev, “The Combina Industry,” November 16, 2004, at http://law .haifa.ac.il/techlaw/new/try/eng/nimrod.htm. }~ +_1 The music industry treats its consumer as a consumer of sex, not of love, the love of music. Just like everything else: a vacuum without values or meaning. But it is still love that everyone wants and seeks. . . . The music vendors knew then [a generation ago] what they have forgotten today, namely that we must have cultural heroes: artists that are not cloned in a manner out to get our money. There was an added value with a meaning: someone who spoke to our hearts in difficult moments, and with that someone, we would walk hand in hand for a while. We had loyalty and love, and it all meant something.~{ Nimrod Lev, “The Combina Industry,” November 16, 2004, at http://law.haifa.ac.il/techlaw/new/try/eng/nimrod.htm. }~ At the risk of sounding naïve, Lev said he wanted to stand up for the importance of “authenticity and empathy and my own truth” in making music. It is a complaint that echoes throughout the artistic community globally. A few years ago, Patti Smith, the punk rocker renowned for her artistic integrity, decried the “loss of our cultural voice” as the radio industry consolidated and as music television became a dominant force. She grieved for the scarcity of places for her to “feel connected” to a larger musical community of artists and fans.~{ Patti Smith at a panel at the National Conference for Media Reform, St. Louis, sponsored by Free Press, May 14, 2005. }~ ={Smith, Patti} @@ -1614,7 +1614,7 @@ The impetus for a solution to the sampling problem started with Negativland, an As an experienced sampler of music, Negativland and collagist People Like Us (aka Vicki Bennett) asked Creative Commons if it would develop and offer a music sampling license. Don Joyce of Negativland explained: ={Joyce, Don} -_1 This would be legally acknowledging the now obvious state of modern audio/visual creativity in which quoting, sampling, direct referencing, copying and collaging have become a major part of modern inspiration. [A sampling option would] stop legally suppressing it and start culturally encouraging it — because it’s here to stay. That’s our idea for encouraging a more democratic media for all of us, from corporations to the individual.~{ Glenn Otis Brown, “Mmm . . . Free Samples (Innovation la),” Creative Commons blog, March 11, 2003, at http://creativecommons.org/weblog/entry/ 3631. }~ +_1 This would be legally acknowledging the now obvious state of modern audio/visual creativity in which quoting, sampling, direct referencing, copying and collaging have become a major part of modern inspiration. [A sampling option would] stop legally suppressing it and start culturally encouraging it — because it’s here to stay. That’s our idea for encouraging a more democratic media for all of us, from corporations to the individual.~{ Glenn Otis Brown, “Mmm . . . Free Samples (Innovation la),” Creative Commons blog, March 11, 2003, at http://creativecommons.org/weblog/entry/3631. }~ With legal help from Cooley Godward Kronish and Wilson, Sonsini, Goodrich & Rosati, Creative Commons did just that. During its consultations with the remix community, Creative Commons learned that Gilberto Gil, the renowned /{tropicalismo}/ musician and at the time the Brazilian minister of culture, had been thinking along similar lines, and so it received valuable suggestions and support from him. ={Cooley Godward Kronish;Wilson, Sonsini, Goodrich & Rosati;Gil, Gilberto} @@ -1624,26 +1624,26 @@ In 2005, Creative Commons issued the Sampling license as a way to let people tak The CC Sampling license only whetted the imagination of people who wanted to find new ways to sample, share, and transform music. Neeru Paharia, then the assistant director of the Creative Commons, came up with the idea of developing ccMixter, a software platform for remixing music on the Web.~{ See http://wiki.creativecommons.org/ccMixter. Interview with Mike Linksvayer, February 7, 2007, and Neeru Paharia, April 13, 2007. }~ Paharia realized one day that “this whole remixing and sharing ecology is about getting feedback on who’s using your work and how it’s evolving. That’s almost half the pleasure.”~{ Interview with Neeru Paharia, April 13, 2007. }~ So the organization developed a Web site that would allow people to upload music that could be sampled and remixed. The site has about five thousand registered users, which is not terribly large, but it is an enthusiastic and active community of remix artists that acts as a great proof of concept while promoting the CC licenses. There are other, much larger remix sites on the Internet, such as Sony’s ACIDplanet, but such sites are faux commons. They retain ownership in the sounds and remixes that users make, and no derivative or commercial versions are allowed. ={Paharia, Neeru} -One feature of viral spirals is their propensity to call forth a jumble of new projects and unexpected partners. The CC licenses have done just that for music. ccMixter has joined with Opsound to offer a joint “sound pool” of clips licensed under an Attribution ShareAlike license. It also supports Freesound, a repository of more than twenty thousand CC-licensed samples ranging from waterfalls to crickets to music.~{ Neeru Paharia, “Opsound’s Sal Randolph,” Creative Commons blog, October 1, 2005, at http://creativecommons.org/audio/opsound; Mike Linksvayer, “Freesound,” Creative Commons blog, October 1, 2005, at http://creative commons.org/audio/freesound; Matt Haughey, “Free Online Music Booms as SoundClick Offers Creative Commons Licenses,” Creative Commons blog, August 11, 2004. }~ +One feature of viral spirals is their propensity to call forth a jumble of new projects and unexpected partners. The CC licenses have done just that for music. ccMixter has joined with Opsound to offer a joint “sound pool” of clips licensed under an Attribution ShareAlike license. It also supports Freesound, a repository of more than twenty thousand CC-licensed samples ranging from waterfalls to crickets to music.~{ Neeru Paharia, “Opsound’s Sal Randolph,” Creative Commons blog, October 1, 2005, at http://creativecommons.org/audio/opsound; Mike Linksvayer, “Freesound,” Creative Commons blog, October 1, 2005, at http://creativecommons.org/audio/freesound; Matt Haughey, “Free Online Music Booms as SoundClick Offers Creative Commons Licenses,” Creative Commons blog, August 11, 2004. }~ Runoff Records, Inc., a record label, discovered a remix artist who teaches physics and calculus and goes by the name of Minus Kelvin. Runoff heard a podcast of Kelvin’s CC-licensed music, and signed him up, along with another ccMixter contributor, to do music for three seasons of the television show /{America’s Next Top Model}/.~{ Neeru Paharia, “Minus Kelvin Discovered on ccMixter,” Creative Commons blog, May 17, 2005, at http://creativecommons.org/weblog/archive/2005/5. }~ A few months later, two ccMixter fans based in Poland and Holland started an online record label, DiSfish, that gives 5 percent of all sale proceeds to CC, another 5 percent to charity, with the remainder split between the label and the artist. All music on the label is licensed under CC.~{ Cezary Ostrowski from Poland and Marco Raaphorst from Holland met online at ccMixter and decided to go into business together. They started an online label called DiSfish. }~ -The CC licenses are not just the province of daring remix artists and other experimentalists. Disappointed by its CD sales through traditional channels, the Philharmonia Baroque Orchestra released its performance of Handel’s 1736 opera, /{Atalanta}/, exclusively through the online record label Magnatune, using a CC license. Conductor Nicholas McGegan said the Internet “has potentially given the industry a tremendous shot in the arm,” letting orchestras reach “new audiences, including ones that are unlikely to hear you in person.”~{ Mia Garlick, “Classical Music Goes Digital (& CC),” May 3, 2006, at http://creativecommons.org/weblog/entry/5883. }~ A company that specializes in Catalan music collaborated with the Catalonian government to release two CDs full of CC-licensed music.~{ The Enderrock Group, a company that specializes in Catalan music and publishes three popular music magazines, released the two CDs, /{Música Lliure and Música Lliure II}/, free within the page of its magazines. See Margot Kaminski, “Enderrock,” Creative Commons Web site, January 17, 2007, at http://cre ativecommons.org/audio/enderrock. }~ A group of Gamelan musicians from central Java who perform in North Carolina decided to release their recordings under a CC license.~{ The group, Gamelan Nyai Saraswait, was blogged about by Matt Haughey on February 1, 2003, at http://creativecommons.org/weblog/entry/3599. }~ +The CC licenses are not just the province of daring remix artists and other experimentalists. Disappointed by its CD sales through traditional channels, the Philharmonia Baroque Orchestra released its performance of Handel’s 1736 opera, /{Atalanta}/, exclusively through the online record label Magnatune, using a CC license. Conductor Nicholas McGegan said the Internet “has potentially given the industry a tremendous shot in the arm,” letting orchestras reach “new audiences, including ones that are unlikely to hear you in person.”~{ Mia Garlick, “Classical Music Goes Digital (& CC),” May 3, 2006, at http://creativecommons.org/weblog/entry/5883. }~ A company that specializes in Catalan music collaborated with the Catalonian government to release two CDs full of CC-licensed music.~{ The Enderrock Group, a company that specializes in Catalan music and publishes three popular music magazines, released the two CDs, /{Música Lliure and Música Lliure II}/, free within the page of its magazines. See Margot Kaminski, “Enderrock,” Creative Commons Web site, January 17, 2007, at http://creativecommons.org/audio/enderrock. }~ A group of Gamelan musicians from central Java who perform in North Carolina decided to release their recordings under a CC license.~{ The group, Gamelan Nyai Saraswait, was blogged about by Matt Haughey on February 1, 2003, at http://creativecommons.org/weblog/entry/3599. }~ ={McGegan, Nicholas} Big-name artists have gotten into the licenses as well. DJ Vadim created a splash when he released all the original solo, individual instrumental, and a cappella studio tracks of his album /{The Sound Catcher}/ under an Attribution, NonCommercial license, so that remixers could have at it.~{ Victor Stone, “DJ Vadim Releases Album Tracks Under CC,” August 20, 2007, at http://creativecommons.org/weblog/entry/7619. }~ In 2004, /{Wired}/ magazine released a CD with sixteen tracks by the likes of David Byrne, Gilberto Gil, and the Beastie Boys. “By contributing a track to /{The Wired CD}/., these musicians acknowledge that for an art form to thrive, it needs to be open, fluid and alive,” wrote /{Wired}/. “These artists — and soon, perhaps, many more like them — would rather have people share their work than steal it.”~{ Thomas Goetz, “Sample the Future,” /{Wired}/, November 2004, pp. 181–83. }~ ={Byrne, David;Gil, Gilberto+1;DJ Vadim;Beastie Boys} -Soon thereafter, Byrne and Gil went so far as to host a gala benefit concert for Creative Commons in New York City. In a fitting fusion of styles, Gil sang a Brazilian arrangement of Cole Porter’s cowboy song, “Don’t Fence Me In.” The crowd of 1,500 was high on the transcultural symbolism, said Glenn Brown: “Musical superstars from North and South, jamming together, building earlier works into new creations, in real time. Lawyers on the sidelines and in the audience, where they belong. The big Creative Commons logo smiling overhead.”~{ Glenn Otis Brown, “WIRED Concert and CD: A Study in Collaboration,” September 24, 2004, available at http://creativecommons.org/weblog/entry/ 4415. }~ The description captures the CC enterprise to a fault: the fusion of some clap-your-hands populism and hardheaded legal tools, inflected with an idealistic call to action to build a better world. +Soon thereafter, Byrne and Gil went so far as to host a gala benefit concert for Creative Commons in New York City. In a fitting fusion of styles, Gil sang a Brazilian arrangement of Cole Porter’s cowboy song, “Don’t Fence Me In.” The crowd of 1,500 was high on the transcultural symbolism, said Glenn Brown: “Musical superstars from North and South, jamming together, building earlier works into new creations, in real time. Lawyers on the sidelines and in the audience, where they belong. The big Creative Commons logo smiling overhead.”~{ Glenn Otis Brown, “WIRED Concert and CD: A Study in Collaboration,” September 24, 2004, available at http://creativecommons.org/weblog/entry/4415. }~ The description captures the CC enterprise to a fault: the fusion of some clap-your-hands populism and hardheaded legal tools, inflected with an idealistic call to action to build a better world. ={Brown, Glenn Otis;Porter, Cole} -By 2008 the power of open networks had persuaded the major record labels to abandon digital rights management of music CDs, and more major artists were beginning to venture forth with their own direct distribution plans, bypassing the standard record label deals. Prince, Madonna, and others found it more lucrative to run their own business affairs and deal with concert venues and merchandisers. In a major experiment that suggests a new business model for major music acts, Nine Inch Nails released its album /{Ghosts I-IV}/ under a Creative Commons NonCommercial ShareAlike license, and posted audio files of the album on its official Web site, inviting free downloads. It did not do advertising or promotion. Despite the free distribution — or because of it — the group made money by selling 2,500 copies of an “Ultra-Deluxe Limited Edition” of the album for $300; the edition sold out in less than three days. There were also nonlimited sales of a “deluxe edition” for $75 and a $10 CD. The scheme showed how free access to the music can be used to drive sales for something that remains scarce, such as a “special edition” CD or a live performance. One week after the album’s release, the Nine Inch Nails’ Web site reported that the group had made over $1.6 million from over 750,000 purchase and download transactions. Considering that an artist generally makes only $1.60 on the sale of a $15.99 CD, Nine Inch Nails made a great deal more money from a “free” album distribution than it otherwise would have made through a standard record deal.~{ See, e.g., Wikipedia entry, “Ghosts I-IV,” at http://en.wikipedia.org/wiki/ Ghosts_I-IV. }~ +By 2008 the power of open networks had persuaded the major record labels to abandon digital rights management of music CDs, and more major artists were beginning to venture forth with their own direct distribution plans, bypassing the standard record label deals. Prince, Madonna, and others found it more lucrative to run their own business affairs and deal with concert venues and merchandisers. In a major experiment that suggests a new business model for major music acts, Nine Inch Nails released its album /{Ghosts I-IV}/ under a Creative Commons NonCommercial ShareAlike license, and posted audio files of the album on its official Web site, inviting free downloads. It did not do advertising or promotion. Despite the free distribution — or because of it — the group made money by selling 2,500 copies of an “Ultra-Deluxe Limited Edition” of the album for $300; the edition sold out in less than three days. There were also nonlimited sales of a “deluxe edition” for $75 and a $10 CD. The scheme showed how free access to the music can be used to drive sales for something that remains scarce, such as a “special edition” CD or a live performance. One week after the album’s release, the Nine Inch Nails’ Web site reported that the group had made over $1.6 million from over 750,000 purchase and download transactions. Considering that an artist generally makes only $1.60 on the sale of a $15.99 CD, Nine Inch Nails made a great deal more money from a “free” album distribution than it otherwise would have made through a standard record deal.~{ See, e.g., Wikipedia entry, “Ghosts I-IV,” at http://en.wikipedia.org/wiki/Ghosts_I-IV. }~ ={Nine Inch Nails} It is too early to know if Lessig’s “BMI strategy” will in fact catalyze a structural transformation in the entertainment industries. But Lessig apparently feels that it is the only feasible strategy. As he said in a 2006 speech, intensified hacking to break systems of proprietary control will not work; new campaigns to win progressive legislation won’t succeed within the next twenty years; and litigation is “a long-term losing strategy,” as the /{Eldred}/ case demonstrated. For Lessig and much of the free culture community, the long-term project of building one’s own open, commons-friendly infrastructure is the only enduring solution. ={BMI (Broadcast Music, Inc.);Eldred v. Reno/Eldred v. Ashcroft:effects of;Lessig, Lawrence:Eldred v. Reno, and|music, and+1} -In the music industry, the early signs seem to support this approach. When digital guru Don Tapscott surveyed the events of 2006, he concluded that “the losers built digital music stores and the winners built vibrant communities based on music. The losers built walled gardens while the winners built public squares. The losers were busy guarding their intellectual property while the winners were busy getting everyone’s attention.” In a penetrating analysis in 2007, music industry blogger Gerd Leonhard wrote: “In music, it’s always been about interaction, about sharing, about engaging — not Sell-Sell-Sell right from the start. Stop the sharing and you kill the music business — it’s that simple. When the fan/user/listener stops engaging with the music, it’s all over.”~{ Gerd Leonhard, “Open Letter to the Independent Music Industry: Music 2.0 and the Future of Music,” July 1, 2007, at http://www.gerdleonhard.net/ 2007/07/gerd-leonhards.html. }~ +In the music industry, the early signs seem to support this approach. When digital guru Don Tapscott surveyed the events of 2006, he concluded that “the losers built digital music stores and the winners built vibrant communities based on music. The losers built walled gardens while the winners built public squares. The losers were busy guarding their intellectual property while the winners were busy getting everyone’s attention.” In a penetrating analysis in 2007, music industry blogger Gerd Leonhard wrote: “In music, it’s always been about interaction, about sharing, about engaging — not Sell-Sell-Sell right from the start. Stop the sharing and you kill the music business — it’s that simple. When the fan/user/listener stops engaging with the music, it’s all over.”~{ Gerd Leonhard, “Open Letter to the Independent Music Industry: Music 2.0 and the Future of Music,” July 1, 2007, at http://www.gerdleonhard.net/2007/07/gerd-leonhards.html. }~ ={Leonhard, Gerd;Tapscott, Don} Serious change is in the air when the producer/consumer dichotomy is no longer the only paradigm, and a vast network of ordinary people and talented creators are becoming active participants in making their own culture. They are sharing and co-creating. Markets are no longer so separate from social communities; indeed, the two are blurring into each other. Although we may live in a complicated interregnum between Centralized Media and distributed media, the future is likely to favor those creators and businesses who build on open platforms. As Dan Hunter and F. Gregory Lastowka write: “It is clear that two parallel spheres of information production exist today. One is a traditional, copyright-based and profit-driven model that is struggling with technological change. The second is a newly enabled, decentralized amateur production sphere, in which individual authors or small groups freely release their work.”~{ Dan Hunter and F. Gregory Lastowka, “Amateur-to-Amateur,” /{William and Mary Law Review}/ 46, no. 951 (December 2004), pp. 1029–30. }~ @@ -1674,7 +1674,7 @@ Even as the machine was getting built, Lessig was taking steps to stoke up a mov Although /{Free Culture}/ repeats many of the fundamental arguments made in his earlier books, Lessig’s arguments this time did not sound like a law professor’s or academic’s, but more like an activist trying to rally a social movement. “This movement must begin in the streets,” he writes. “It must recruit a significant number of parents, teachers, librarians, creators, authors, musicians, filmmakers, scientists — all to tell their story in their own words, and to tell their neighbors why this battle is so important. . . . We will not reclaim a free culture by individual action alone. It will take important reforms of laws. We have a long way to go before the politicians will listen to these ideas and implement these reforms. But that also means that we have time to build awareness around the changes that we need.”~{ Lawrence Lessig, /{Free Culture}/ (New York: Penguin, 2004), pp. 275, 287. }~ The preeminent challenge for this would-be movement, Lessig wrote, is “rebuilding freedoms previously presumed” and “rebuilding free culture.” -Lessig had reason to think that his analysis and exhortations would find receptive ears. He was now a leading voice on copyright and Internet issues, and well known through his earlier books, public speaking, and /{Eldred}/ advocacy. The launch of the Creative Commons was thrusting him into the spotlight again. Adoption of the CC licenses was steadily growing in 2003 and 2004 based on the most comprehensive sources at the time, search engines. Yahoo was reporting in September 2004 that there were 4.7 million links to CC licenses on the Web. This number shot up to 14 million only six months later, and by August 2005 it had grown to 53 million.~{ CC license statistics, on CC wiki page, at http://wiki.creativecommons.org/ License_statistics. }~ These numbers offer only a crude estimate of actual license usage, but they nonetheless indicated a consistent trend. Usage was also being propelled by new types of Web 2.0 sites featuring usergenerated content. For example, Flickr, the photo-sharing site, had 4.1 million photos tagged with CC licenses at the end of 2004, a number that has soared to an estimated 75 million by 2008. +Lessig had reason to think that his analysis and exhortations would find receptive ears. He was now a leading voice on copyright and Internet issues, and well known through his earlier books, public speaking, and /{Eldred}/ advocacy. The launch of the Creative Commons was thrusting him into the spotlight again. Adoption of the CC licenses was steadily growing in 2003 and 2004 based on the most comprehensive sources at the time, search engines. Yahoo was reporting in September 2004 that there were 4.7 million links to CC licenses on the Web. This number shot up to 14 million only six months later, and by August 2005 it had grown to 53 million.~{ CC license statistics, on CC wiki page, at http://wiki.creativecommons.org/License_statistics. }~ These numbers offer only a crude estimate of actual license usage, but they nonetheless indicated a consistent trend. Usage was also being propelled by new types of Web 2.0 sites featuring usergenerated content. For example, Flickr, the photo-sharing site, had 4.1 million photos tagged with CC licenses at the end of 2004, a number that has soared to an estimated 75 million by 2008. ={Lessig, Lawrence:CC licenses, and;Yahoo;Web 2.0:CC licenses, and;Creative Commons (CC) licenses:Web 2.0 environment, and} The decisive choice, four years earlier, to build a suite of licenses that could propagate themselves via open networks was bearing fruit. @@ -1724,7 +1724,7 @@ Perhaps the neatest self-promotional trick that the Creative Commons has devised Infrastructure grows old and occasionally needs to be updated and improved. The CC licenses have been no exception. As users have incorporated them into one medium after another, the unwitting omissions and infelicitous legal language of some parts of the licenses needed revisiting. After many months of discussions with many parts of the CC world, the Creative Commons issued a new set of 2.0 licenses in May 2004.~{ Glenn Otis Brown, “Announcing (and explaining) our new 2.0 licenses,” CC blog, May 25, 2004, at http://creativecommons.org/weblog/entry/4216. }~ They did not differ substantially from the original ones, and in fact the changes would probably bore most nonlawyers. For example, version 2.0 included a provision that allows a licensor to require licensees to provide a link back to the licensor’s work. The 2.0 licenses also clarify many complicated license options affecting music rights, and make clear that licensors make no warranties of title, merchantability, or fitness for use. Perhaps the biggest change in version 2.0 was the elimination of the choice of Attribution licenses. Since nearly 98 percent of all licensors chose Attribution, the Creative Commons decided to drop licenses without the Attribution requirement, thereby reducing the number of CC licenses from eleven to six. ={Creative Commons (CC) licenses:version 2.0 of} -Another set of major revisions to the licenses was taken up for discussion in 2006, and agreed upon in February 2007.~{ 7. Mia Garlick, “Version 3.0 Launched,” CC blog, http://creativecommons.org/ weblog/entry/7249. }~ Once again, the layperson would care little for the debates leading to the changes, but considerable, sometimes heated discussion went into the revisions. In general, the 3.0 tweaks sought to make the licenses clearer, more useful, and more enforceable. The issue of “moral rights” under copyright law — an issue in many European countries — is explicitly addressed, as are the complications of the CC licenses and collecting societies. New legal language was introduced to ensure that people who remix works under other licenses, such as the GNU Free Documentation License (FDL), would be able to also use CC-licensed materials in the same work — an important provision for preventing free culture from devolving into “autistic islands” of legally incomptabile material. Besides helping align the CC world with Wikipedia (which uses the GNU FDL license), the 3.0 revisions also made harmonizing legal changes to take account of MIT and the Debian software development community. +Another set of major revisions to the licenses was taken up for discussion in 2006, and agreed upon in February 2007.~{ 7. Mia Garlick, “Version 3.0 Launched,” CC blog, http://creativecommons.org/weblog/entry/7249. }~ Once again, the layperson would care little for the debates leading to the changes, but considerable, sometimes heated discussion went into the revisions. In general, the 3.0 tweaks sought to make the licenses clearer, more useful, and more enforceable. The issue of “moral rights” under copyright law — an issue in many European countries — is explicitly addressed, as are the complications of the CC licenses and collecting societies. New legal language was introduced to ensure that people who remix works under other licenses, such as the GNU Free Documentation License (FDL), would be able to also use CC-licensed materials in the same work — an important provision for preventing free culture from devolving into “autistic islands” of legally incomptabile material. Besides helping align the CC world with Wikipedia (which uses the GNU FDL license), the 3.0 revisions also made harmonizing legal changes to take account of MIT and the Debian software development community. ={GNU Project:GNU FDL;copyright law:moral rights, and;Creative Commons (CC) licenses:version 3.0 of} By getting the CC licenses integrated into so many types of software and Web services, and even leveraging market players to embrace the sharing ethic, Creative Commons has managed to kill at least three birds with one stone. It has enlarged the universe of shareable Internet content. It has educated people to consider how copyright law affects them personally. And it has given visibility to its larger vision of free culture. @@ -1763,7 +1763,7 @@ In a pre-Internet context, the whole idea of a creating a new international lice Going international with the licenses offered an appealing way to grow both simultaneously without forcing unpleasant trade-offs between the two, at least initially. Drafting the licenses for a country, for example, helps convene top lawyers committed to the idea of legal sharing and collaboration while also mobilizing diverse constituencies who are the potential leaders of a movement. -According to Jonathan Zittrain, an early collaborator on the project and a board member, Creative Commons at the international level is more of a “persuasive, communicative enterprise than a legal licensing one.”~{ Interview with Jonathan Zittrain, September 28, 2006. }~ It is a vehicle for starting a process for engaging public-spirited lawyers, law scholars, and all manner of creators. The licenses do have specific legal meanings in their respective legal jurisdictions, of course, or are believed to have legal application. (Only three courts, in the Netherlands and Spain, have ever ruled on the legal status of the CC licenses. In two instances the courts enforced the licenses; in the other case, in which the defendant lost, the validity of the licenses was not at issue.)~{ The most famous court case involving the CC licenses is /{A. Curry v. Audax/Weekend}/, in which Adam Curry sued the publishers of a Dutch tabloid magazine and two senior editors for using four photos of his family on his Flickr account that had been licensed under a BY-NC-SA license. See http://creativecommons.org/weblog/entry/5944 and http://creativecommons.org/weblog/entry/5823. A District Court of Amsterdam upheld Curry’s usage of the CC licenses in a March 9, 2006, decision; see http://mir rors.creativecommons.org/judgements/Curry-Audax-English.pdf. There have been two Spanish cases involving CC licenses. In both cases, a collecting society, the Sociedad General de Autores y Editores (SGAE), sued cafés for playing “free music” licensed under CC licenses; SGAE claimed that it was owed royalties for the public performance of music because artists cannot legally apply a CC license to their work (or even release it online) without the consent of their collecting society. In both instances, the cases turned on evidentiary issues, not on the enforceability of CC licenses. See http:// creativecommons.org/weblog/entry/5830 and http://creativecommons.org/ weblog/entry/7228. }~ Apart from their legal meaning, the licenses’ most important function may be as a social signaling device. They let people announce, “I participate in and celebrate the sharing economy.” The internationalization of the CC licenses has also been a way of “localizing” the free culture movement. +According to Jonathan Zittrain, an early collaborator on the project and a board member, Creative Commons at the international level is more of a “persuasive, communicative enterprise than a legal licensing one.”~{ Interview with Jonathan Zittrain, September 28, 2006. }~ It is a vehicle for starting a process for engaging public-spirited lawyers, law scholars, and all manner of creators. The licenses do have specific legal meanings in their respective legal jurisdictions, of course, or are believed to have legal application. (Only three courts, in the Netherlands and Spain, have ever ruled on the legal status of the CC licenses. In two instances the courts enforced the licenses; in the other case, in which the defendant lost, the validity of the licenses was not at issue.)~{ The most famous court case involving the CC licenses is /{A. Curry v. Audax/Weekend}/, in which Adam Curry sued the publishers of a Dutch tabloid magazine and two senior editors for using four photos of his family on his Flickr account that had been licensed under a BY-NC-SA license. See http://creativecommons.org/weblog/entry/5944 and http://creativecommons.org/weblog/entry/5823. A District Court of Amsterdam upheld Curry’s usage of the CC licenses in a March 9, 2006, decision; see http://mirrors.creativecommons.org/judgements/Curry-Audax-English.pdf. There have been two Spanish cases involving CC licenses. In both cases, a collecting society, the Sociedad General de Autores y Editores (SGAE), sued cafés for playing “free music” licensed under CC licenses; SGAE claimed that it was owed royalties for the public performance of music because artists cannot legally apply a CC license to their work (or even release it online) without the consent of their collecting society. In both instances, the cases turned on evidentiary issues, not on the enforceability of CC licenses. See http://creativecommons.org/weblog/entry/5830 and http://creativecommons.org/weblog/entry/7228. }~ Apart from their legal meaning, the licenses’ most important function may be as a social signaling device. They let people announce, “I participate in and celebrate the sharing economy.” The internationalization of the CC licenses has also been a way of “localizing” the free culture movement. ={Zittrain, Jonathan} The first nation to port the CC licenses was Japan. This was partly an outgrowth of a five-month sabbatical that Lessig had spent in Tokyo, from late 2002 through early 2003. There were already stirrings of dissatisfaction with copyright law in Japan. Koichiro Hayashi, a professor who had once worked for the telecom giant NTT, had once proposed a so-called d-mark system to allow copyright owners to forfeit the statutory term of copyright protection and voluntarily declare a shorter term for their works. In the spring of 2003, a team of Japanese lawyers associated with a technology research institute, the Global Communications Center (GLOCOM), working with CC International in Berlin, set about porting the licenses to Japanese law. @@ -1786,7 +1786,7 @@ As each jurisdiction introduces its licenses, it typically hosts a gala public e Luiz Inácio Lula da Silva had just been elected president of Brazil, and he was eager to stake out a new set of development policies to allow his nation to plot its own economic and cultural future. His government, reflecting his electoral mandate, resented the coercive effects of international copyright law and patent law. To tackle some of these issues on the copyright front, President Lula appointed Gilberto Gil, the renowned singer-songwriter, as his minister of culture. ={Lula da Silva, Luiz Inácio;Gil, Gilberto+11} -Gil became a revered cultural figure when he helped launch a new musical style, /{tropicalismo}/, in the late 1960s, giving Brazil a fresh, international cachet. The music blended national styles of music with pop culture and was inflected with political and moral themes. As one commentator put it, /{tropicalismo}/ was “a very ’60s attempt to capture the chaotic, swirling feel of Brazil’s perennially uneven modernization, its jumble of wealth and poverty, of rural and urban, of local and global. . . . They cut and pasted styles with an abandon that, amid today’s sample-happy music scene, sounds up-to-theminute.”~{ Wikipedia entry, “Tropicalismo,” at http://en.wikipedia.org/wiki/Tropical ismo. }~ The military dictatorship then running the government considered /{tropicalismo}/ sufficiently threatening that it imprisoned Gil for several months before forcing him into exile, in London. Gil continued writing and recording music, however, and eventually returned to Brazil.~{ For a history of Gil, see his personal Web site at http://www.gilbertogil .com.br/index.php?language=en; the Wikipedia entry on him at http: //en.wikipedia.org/wiki/Gilberto_Gil; and Larry Rohter, “Gilberto Gil Hears the Future, Some Rights Reserved,” /{New York Times}/, March 11, 2007. }~ +Gil became a revered cultural figure when he helped launch a new musical style, /{tropicalismo}/, in the late 1960s, giving Brazil a fresh, international cachet. The music blended national styles of music with pop culture and was inflected with political and moral themes. As one commentator put it, /{tropicalismo}/ was “a very ’60s attempt to capture the chaotic, swirling feel of Brazil’s perennially uneven modernization, its jumble of wealth and poverty, of rural and urban, of local and global. . . . They cut and pasted styles with an abandon that, amid today’s sample-happy music scene, sounds up-to-theminute.”~{ Wikipedia entry, “Tropicalismo,” at http://en.wikipedia.org/wiki/Tropicalismo. }~ The military dictatorship then running the government considered /{tropicalismo}/ sufficiently threatening that it imprisoned Gil for several months before forcing him into exile, in London. Gil continued writing and recording music, however, and eventually returned to Brazil.~{ For a history of Gil, see his personal Web site at http://www.gilbertogil.com.br/index.php?language=en; the Wikipedia entry on him at http://en.wikipedia.org/wiki/Gilberto_Gil; and Larry Rohter, “Gilberto Gil Hears the Future, Some Rights Reserved,” /{New York Times}/, March 11, 2007. }~ This history matters, because when Gil was appointed culture minister, he brought with him a rare political sophistication and public veneration. His moral stature and joyous humanity allowed him to transcend politics as conventionally practiced. “Gil wears shoulder-length dreadlocks and is apt to show up at his ministerial offices dressed in the simple white linens that identify him as a follower of the Afro-Brazilian religion /{candomblé}/,” wrote American journalist Julian Dibbell in 2004. “Slouching in and out of the elegant Barcelona chairs that furnish his office, taking the occasional sip from a cup of pinkish herbal tea, he looks — and talks — less like an elder statesman than the posthippie, multiculturalist, Taoist intellectual he is.”~{ Julian Dibbell, “We Pledge Allegiance to the Penguin,” /{Wired}/, November 2004, at http://www.wired.com/wired/archive/12.11/linux_pr.html. }~ ={Dibbell, Julian+1} @@ -1806,10 +1806,10 @@ This alignment of intellectual firepower, artistic authority, and political clou One of the first collaborations between Creative Commons and the Brazilian government involved the release of a special CC-GPL license in December 2003.~{ Creative Commons press release, “Brazilian Government First to Adopt New ‘CC-GPL,’ ” December 2, 2003. }~ This license adapted the General Public License for software by translating it into Portuguese and putting it into the CC’s customary “three layers” — a plain-language version, a lawyers’ version compatible with the national copyright law, and a machine-readable metadata expression of the license. The CC-GPL license, released in conjunction with the Free Software Foundation, was an important international event because it gave the imprimatur of a major world government to free software and the social ethic of sharing and reuse. Brazil has since become a champion of GNU/Linux and free software in government agencies and the judiciary. It regards free software and open standards as part of a larger fight for a “development agenda” at the World Intellectual Property Organization and the World Trade Organization. In a related vein, Brazil has famously challenged patent and trade policies that made HIV/AIDS drugs prohibitively expensive for thousands of sick Brazilians. ={free software:international licensing, and+1;GNU/Linux:Brazil, in;World Trade Organization;World Intellectual Property Organization;open networks:international} -When the full set of CC Brazil licenses was finally launched— at the Fifth International Free Software Forum, in Port Alegre on June 4, 2004 — it was a major national event. Brazilian celebrities, government officials, and an enthusiastic crowd of nearly two thousand people showed up. Gil, flying in from a cabinet meeting in Brasília, arrived late. When he walked into the auditorium, the panel discussion under way immediately stopped, and there was a spontaneous standing ovation.~{ A ten-minute video of the CC Brazil opening can be seen at http:// support.creativecommons.org/videos#brasil. }~ “It was like a boxer entering the arena for a heavyweight match,” recalled Glenn Otis Brown. “He had security guards on both sides of him as he walked up the middle aisle. There were flashbulbs, and admirers trailing him, and this wave of people in the audience cresting as he walked by.”~{ Interview with Glenn Otis Brown, August 10, 2006. }~ +When the full set of CC Brazil licenses was finally launched— at the Fifth International Free Software Forum, in Port Alegre on June 4, 2004 — it was a major national event. Brazilian celebrities, government officials, and an enthusiastic crowd of nearly two thousand people showed up. Gil, flying in from a cabinet meeting in Brasília, arrived late. When he walked into the auditorium, the panel discussion under way immediately stopped, and there was a spontaneous standing ovation.~{ A ten-minute video of the CC Brazil opening can be seen at http://support.creativecommons.org/videos#brasil. }~ “It was like a boxer entering the arena for a heavyweight match,” recalled Glenn Otis Brown. “He had security guards on both sides of him as he walked up the middle aisle. There were flashbulbs, and admirers trailing him, and this wave of people in the audience cresting as he walked by.”~{ Interview with Glenn Otis Brown, August 10, 2006. }~ ={Brown, Glenn Otis, CC International, and+1} -Gil originally planned to release three of his songs under the new CC Sampling license — dubbed the “Recombo” license — but his record label, Warner Bros., balked. He eventually released one song, “Oslodum,” that he had recorded for an indie label. “One way to think about it,” said Brown, “is that now, anybody in the world can jam with Gilberto Gil.”~{ Film about CC Brazil launch, at http://support.creativecommons.org/ videos#brasil. }~ +Gil originally planned to release three of his songs under the new CC Sampling license — dubbed the “Recombo” license — but his record label, Warner Bros., balked. He eventually released one song, “Oslodum,” that he had recorded for an indie label. “One way to think about it,” said Brown, “is that now, anybody in the world can jam with Gilberto Gil.”~{ Film about CC Brazil launch, at http://support.creativecommons.org/videos#brasil. }~ As culture minister, Gil released all materials from his agency under a CC license, and persuaded the Ministry of Education as well as Radiobrás, the government media agency, to do the same. He also initiated the Cultural Points (Pontos de Cultura) program, which has given small grants to scores of community centers in poor neighborhoods so that residents can learn how to produce their own music and video works. Since industry concentration and payola make it virtually impossible for newcomers to get radio play and commercially distribute their CDs, according to many observers, the project has been valuable in allowing a fresh wave of grassroots music to “go public” and reach new audiences. @@ -1822,7 +1822,7 @@ Since its launch in June 2004, Lemos and the CC Brazil office have instigated a In Brazil, there are open-publishing projects for scientific journals;~{ http://www.scielo.br. }~ a Web site that brings together a repository of short films;~{ http://www.portacurtas.comb.br. }~ and Overmundo,a popular site for cultural commentary by Internet users.~{ http://www.overmundo.com.br }~ TramaVirtual, an open-platform record label that lets musicians upload their music and fans download it for free, now features more than thirty-five thousand artists.~{ http://tramavirtual.uol.com.br. }~ (By contrast, the largest commercial label in Brazil, Sony-BMG, released only twelve CDs of Brazilian music in 2006, according to Lemos.) -“Cultural production is becoming increasingly disconnected from traditional media forms,” said Lemos, because mass media institutions “are failing to provide the adequate incentives for culture to be produced and circulated. . . . Cultural production is migrating to civil society and/or the peripheries, which more or less already operate in a ‘social commons’ environment, and do not depend on intellectual property within their business models.”~{ Ronaldo Lemos, “From Legal Commons to Social Commons: Developing Countries and the Cultural Industry in the 21st Century,” http://icommons .org/banco/from-legal-commons-to-social-commons-brazil-and-the-culturalindustry-1. }~ +“Cultural production is becoming increasingly disconnected from traditional media forms,” said Lemos, because mass media institutions “are failing to provide the adequate incentives for culture to be produced and circulated. . . . Cultural production is migrating to civil society and/or the peripheries, which more or less already operate in a ‘social commons’ environment, and do not depend on intellectual property within their business models.”~{ Ronaldo Lemos, “From Legal Commons to Social Commons: Developing Countries and the Cultural Industry in the 21st Century,” http://icommons.org/banco/from-legal-commons-to-social-commons-brazil-and-the-culturalindustry-1. }~ As more people have adopted legal modes of copying and sharing under CC licenses, it is changing the social and political climate for copyright reform. Now that CC Brazil can cite all sorts of successful free culture ventures, it can more persuasively advocate for a Brazilian version of the fair use doctrine and press for greater photocopying privileges in educational settings (which are legally quite restrictive). ={free culture:international+2} @@ -1832,7 +1832,7 @@ Although the CC licenses are now familiar to many Brazilians, they have encounte As a unique global ambassador of creative sharing, Gilberto Gil did a lot to take the CC licenses to other nations and international forums such as the World Intellectual Property Organization. The day before his 2004 benefit concert for the Creative Commons in New York City with David Byrne, Gil delivered a powerful speech explaining the political implications of free culture: ={Byrne, David;Gil, Gilberto+3;World Intellectual Property Organization} -_1 A global movement has risen up in affirmation of digital culture. This movement bears the banners of free software and digital inclusion, as well as the banner of the endless expansion of the circulation of information and creation, and it is the perfect model for a Latin-American developmental cultural policy (other developments are possible) of the most anti-xenophobic, anti-authoritarian, anti-bureaucratizing, anti-centralizing, and for the very reason, profoundly democratic and transformative sort.~{ Gil remarks at New York University, September 19, 2004, at http://www .nyu.edu/fas/NewsEvents/Events/Minister_Gil_speech.pdf. }~ +_1 A global movement has risen up in affirmation of digital culture. This movement bears the banners of free software and digital inclusion, as well as the banner of the endless expansion of the circulation of information and creation, and it is the perfect model for a Latin-American developmental cultural policy (other developments are possible) of the most anti-xenophobic, anti-authoritarian, anti-bureaucratizing, anti-centralizing, and for the very reason, profoundly democratic and transformative sort.~{ Gil remarks at New York University, September 19, 2004, at http://www.nyu.edu/fas/NewsEvents/Events/Minister_Gil_speech.pdf. }~ The Brazilian government was making digital culture “one of its strategic public policies,” Gil said, because “the most important political battle that is being fought today in the technological, economic, social and cultural fields has to do with free software and with the method digital freedom has put in place for the production of shared knowledge. This battle may even signify a change in subjectivity, with critical consequences for the very concept of civilization we shall be using in the near future.”~{ Ibid. }~ @@ -1856,7 +1856,7 @@ In Scotland, government and other public-sector institutions have been huge fans The BBC was a pioneer in making its archived television and radio programs available to the public for free. In 2003, inspired by the CC licenses, the BBC drafted its own “Creative Archive” license as a way to open up its vast collection of taxpayer-financed television and radio programs.~{ See http://news.bbc.co.uk/2/hi/help/4527506.stm, and interview with Paula Le Dieu, joint director of the BBC Creative Archive project, May 28, 2004, at http://digital-lifestyles.info/2004/05/28/exclusive-providing-the-fuel-fora-creative-nation-an-interview-with-paula-le-dieu-joint-director-on-the-bbccreative-archive. }~ The license was later adopted by Channel 4, the Open University, the British Film Institute, and the Museum, Libraries and Archives Council. Although the Creative Archive license has similar goals as the CC licenses, it contains several significant differences: it restricts use of video programs to United Kingdom citizens only, and it prohibits use of materials for political or charitable campaigns and for any derogatory purposes. ={BBC} -The CC licenses have proven useful, also, to the British Museum and National Archives. In 2004, these and other British educational institutions were pondering how they should make their publicly funded digital resources available for reuse. A special government panel, the Common Information Environment, recommended usage of the CC licenses because they were already international in scope. The panel liked that the licenses allow Web links in licensed materials, which could help users avoid the complications of formal registration. The panel also cited the virtues of “human readable deeds” and machine-readable metadata.~{ Intrallect Ltd and AHRC Research Centre for Studies in Intellectual Property and Technology Law, University of Edinburgh, “The Common Information Environment and Creative Commons,” October 10, 2005, at http://www .intrallect.com/index.php/intrallect/content/download/632/2631/file/CIE _CC_Final_Report.pdf. }~ +The CC licenses have proven useful, also, to the British Museum and National Archives. In 2004, these and other British educational institutions were pondering how they should make their publicly funded digital resources available for reuse. A special government panel, the Common Information Environment, recommended usage of the CC licenses because they were already international in scope. The panel liked that the licenses allow Web links in licensed materials, which could help users avoid the complications of formal registration. The panel also cited the virtues of “human readable deeds” and machine-readable metadata.~{ Intrallect Ltd and AHRC Research Centre for Studies in Intellectual Property and Technology Law, University of Edinburgh, “The Common Information Environment and Creative Commons,” October 10, 2005, at http://www.intrallect.com/index.php/intrallect/content/download/632/2631/file/CIE _CC_Final_Report.pdf. }~ As it happened, a team of Scottish legal scholars led by a private attorney, Jonathan Mitchell, successfully ported the licenses and released them a few months later, in December 2005. The Scottish effort had been initiated a year earlier when Mitchell and his colleagues objected that the U.K. CC licenses then being drafted were too rooted in English law and not sufficiently attuned to Scottish law. Since the introduction of the CC Scotland licenses, publicsector institutions have enthusiastically embraced them. Museums use the licenses on MP3 files that contain audio tours, for example, as well as on Web pages, exhibition materials, and photographs of artworks. Interestingly, in England and Wales, individual artists and creative communities seem to be more active than public-sector institutions in using the licenses. ={Scotland:CC licenses in;Creative Commons International:Scotland;Mitchell, Jonathan} @@ -1864,7 +1864,7 @@ As it happened, a team of Scottish legal scholars led by a private attorney, Jon The use of CC licenses for government information and publicly funded materials is inspiring similar efforts in other countries. Governments are coming to realize that they are one of the primary stewards of intellectual property, and that the wide dissemination of their work — statistics, research, reports, legislation, judicial decisions — can stimulate economic innovation, scientific progress, education, and cultural development. Unfortunately, as Anne Fitzgerald, Brian Fitzgerald, and Jessica Coates of Australia have pointed out, “putting all such material into the public domain runs the risk that material which is essentially a public and national asset will be appropriated by the private sector, without any benefit to either the government or the taxpayers.”~{ iCommons annual report, 2007, http://www.icommons.org/annual07. }~ For example, the private sector may incorporate the public-domain material into a value-added proprietary model and find other means to take the information private. The classic instance of this is West Publishing’s dominance in the republishing of U.S. federal court decisions. Open-content licenses offer a solution by ensuring that taxpayerfinanced works will be available to and benefit the general public. ={Coates, Jessica;Fitzgerald, Anne;Fitzgerald, Brian;West Publishing} -In the United States, the National Institutes of Health has pursued a version of this policy by requiring that federally funded research be placed in an open-access archive or journal within twelve months of its commercial publication. The European Commission announced in 2007 that it plans to build a major open-access digital repository for publicly funded research.~{ Michael Geist, “Push for Open Access to Research, BBC News, February 28, 2007, at http://news.bbc.co.uk/go/pr/fr/~/2/hi/technology/6404429. }~ In Mexico, the Sistema Internet de la Presidencia, or Presidency Internet System (SIP), decided in 2006 to adopt CC licenses for all content generated by the Mexican presidency on the Internet — chiefly the president’s various Web sites, Internet radio station, and documents.~{ Creative Commons blog, Alex Roberts, March 8, 2006, at http://creative commons.org/text/sip. }~ In Italy, CC Italy is exploring legislation to open up national and local government archives. It also wants new contract terms for those who develop publicly funded information so that it will automatically be available in the future.~{ Interview with Juan Carlos de Martin, CC Italy, July 17, 2007. }~ +In the United States, the National Institutes of Health has pursued a version of this policy by requiring that federally funded research be placed in an open-access archive or journal within twelve months of its commercial publication. The European Commission announced in 2007 that it plans to build a major open-access digital repository for publicly funded research.~{ Michael Geist, “Push for Open Access to Research, BBC News, February 28, 2007, at http://news.bbc.co.uk/go/pr/fr/~/2/hi/technology/6404429. }~ In Mexico, the Sistema Internet de la Presidencia, or Presidency Internet System (SIP), decided in 2006 to adopt CC licenses for all content generated by the Mexican presidency on the Internet — chiefly the president’s various Web sites, Internet radio station, and documents.~{ Creative Commons blog, Alex Roberts, March 8, 2006, at http://creativecommons.org/text/sip. }~ In Italy, CC Italy is exploring legislation to open up national and local government archives. It also wants new contract terms for those who develop publicly funded information so that it will automatically be available in the future.~{ Interview with Juan Carlos de Martin, CC Italy, July 17, 2007. }~ ={Creative Commons International:Italy|Mexico;Italy:CC licenses in;Mexico:CC licenses in} 2~ Laboratories of Free Culture @@ -1881,7 +1881,7 @@ Not surprisingly, the American CC licenses — a version of which was spun off a As a fledgling network, the international CC community is a rudimentary platform for change. Its members are still groping toward a shared understanding of their work and devising new systems of communication and collaboration. But a great deal of cross-border collaboration is occurring. A variety of free culture advocates have constituted themselves as the Asia Commons and met in Bangkok to collaborate on issues of free software, citizen access to government information, and industry antipiracy propaganda. CC Italy has invited leaders of neighboring countries— France, Switzerland, Austria, Croatia, and Slovenia — to share their experiences and work together. A CC Latin America project started /{Scripta}/, a new Spanish-language journal based in Ecuador, to discuss free software and free culture issues affecting the continent. ={Creative Commons International:cross-border collaboration+1} -CC leaders in Finland, France, and Australia have published books about their licensing projects.~{ The French book is Danièle Bourcier and Mélanie Dulong de Rosnay, eds., /{International Commons at the Digital Age}/ (Paris: Romillat, 2004), at http://fr.creativecommons.org/icommons_book.htm. The Finnish book is Herkko Hietanen et al., /{Community Created Content: Law, Business and Policy}/ (Turre Publishing, 2007), at http://www.turre.com/images/stories/books/webkirja_koko_ optimoitu2.pdf. The Australian book is Brian Fitzgerald, /{Open Content Licensing: Cultivating the Creative Commons}/ (Sydney: Sydney University Press, 2007). }~ CC Brazil and CC South Africa have collaborated on a project about copyright and developing nations. CC Canada is working with partners to develop an online, globally searchable database of Canadian works in the Canadian public domain. CC Salons have been held in Amsterdam, Toronto, Berlin, Beijing, London, Warsaw, Seoul, Taipei, and Johannesburg. +CC leaders in Finland, France, and Australia have published books about their licensing projects.~{ The French book is Danièle Bourcier and Mélanie Dulong de Rosnay, eds., /{International Commons at the Digital Age}/ (Paris: Romillat, 2004), at http://fr.creativecommons.org/icommons_book.htm. The Finnish book is Herkko Hietanen et al., /{Community Created Content: Law, Business and Policy}/ (Turre Publishing, 2007), at http://www.turre.com/images/stories/books/webkirja_koko_optimoitu2.pdf. The Australian book is Brian Fitzgerald, /{Open Content Licensing: Cultivating the Creative Commons}/ (Sydney: Sydney University Press, 2007). }~ CC Brazil and CC South Africa have collaborated on a project about copyright and developing nations. CC Canada is working with partners to develop an online, globally searchable database of Canadian works in the Canadian public domain. CC Salons have been held in Amsterdam, Toronto, Berlin, Beijing, London, Warsaw, Seoul, Taipei, and Johannesburg. In the Netherlands, CC project lead Paul Keller engineered a breakthrough that may overcome the persistent objections of European collecting societies to CC-licensed content. Collecting societies in Europe generally insist that any musician that they represent transfer all of their copyrights to the collective. This means that professional musicians cannot distribute their works under a CC license. Artists who are already using CC licenses cannot join the collecting societies in order to receive royalties for commercial uses of their works. In this manner, collecting societies in many European nations have effectively prevented many musicians from using the CC licenses. ={Keller, Paul;collecting societies+1:see also ASCAP} @@ -1899,7 +1899,7 @@ Love was trying to do for books and journal articles what is already possible fo In the end, Creative Commons offered the Developing Nations license as a separate license, not a rider. It had simple terms: “You must attribute the work in the manner specified by the author or licensor (but not in any way that suggests that they endorse you or your use of the work)” — and the license was valid only in non–high income nations, as determined by United Nations’ statistics. Although the release of the license got considerable press coverage, actual usage of the license was extremely small. The most prominent use was totally unexpected — for architectural designs. Architecture for Humanity, a California nonprofit, used the license for its designs of low-cost housing and health centers. The organization wanted to give away its architectural plans to poor countries while not letting its competitors in the U.S. use them for free.~{ Creative Commons blog, Kathryn Frankel, “Commoners: Architecture for Humanity,” June 30, 2006, at http://creativecommons.org/education/architecture. }~ ={United Nations} -The expected uses of the Developing Nations license never materialized. In 2006, Love said, “The license is there, but people who might be willing to use it are not really aware of it.” He worried that the license “hasn’t really been explained in a way that would be obvious to them,” and ventured that there may be “a need for a re-marketing campaign.” By this time, however, the license had attracted the ire of Richard Stallman for its limitations on “freedom.”~{ See Lessig on Creative Commons blog, December 7, 2005, at http://cre ativecommons.org/weblog/archive/2005/12/page/3. }~ It prohibited copying of a work in certain circumstances (in high-income countries) even for noncommercial purposes, and so authorized only a partial grant of freedom, not a universal one. “Well, the whole point was /{not}/ to be universal,” said Love. “The license is for people that are unwilling to share with high-income countries, but are willing to share with developing countries. So it actually expands the commons, but only in developing countries.”~{ Interview with James Love, June 13, 2006. }~ +The expected uses of the Developing Nations license never materialized. In 2006, Love said, “The license is there, but people who might be willing to use it are not really aware of it.” He worried that the license “hasn’t really been explained in a way that would be obvious to them,” and ventured that there may be “a need for a re-marketing campaign.” By this time, however, the license had attracted the ire of Richard Stallman for its limitations on “freedom.”~{ See Lessig on Creative Commons blog, December 7, 2005, at http://creativecommons.org/weblog/archive/2005/12/page/3. }~ It prohibited copying of a work in certain circumstances (in high-income countries) even for noncommercial purposes, and so authorized only a partial grant of freedom, not a universal one. “Well, the whole point was /{not}/ to be universal,” said Love. “The license is for people that are unwilling to share with high-income countries, but are willing to share with developing countries. So it actually expands the commons, but only in developing countries.”~{ Interview with James Love, June 13, 2006. }~ ={Lessig, Lawrence:CC International, and+1;Stallman, Richard:freedom, and+2} The controversy that grew up around the Developing Nations license illuminates the different approaches to movement building that Lessig and Stallman represent. Lessig’s advocacy for free culture has been an exploratory journey in pragmatic idealism; Stallman’s advocacy for free software has been more of a crusade of true believers in a core philosophy. For Stallman, the principles of “freedom” are unitary and clear, and so the path forward is fairly self-evident and unassailable. For Lessig, the principles of freedom are more situational and evolving and subject to the consensus of key creative communities. The flexibility has enabled a broad-spectrum movement to emerge, but it does not have the ideological coherence of, say, the free software movement. @@ -1911,7 +1911,7 @@ Several factors converged to make it attractive for Creative Commons to revoke t Finally, many CC staff members regarded the Developing Nations and Sampling licenses as misbegotten experiments. Fewer than 0.01 percent of uses of CC licenses at the time involved the Developing Nations license, and the Sampling license was used by a relatively small community of remix artists and musicians. If eliminating two little-used niche licenses could neutralize objections from the open access and free software movements and achieve a greater philosophical and political solidarity in the “free world,” many CC partisans regarded a rescission of the licenses as a modest sacrifice, if not a net gain. ={remix works+1;music:remixes;Creative Commons (CC) licenses:music, for} -In June 2007, Creative Commons announced that it was officially retiring the two licenses.~{ Creative Commons “retired licenses page,” at http://creativecommons.org/ retiredlicenses. }~ In a formal statement, Lessig explained, “These licenses do not meet the minimum standards of the Open Access movement. Because this movement is so important to the spread of science and knowledge, we no longer believe it correct to promote a standalone version of this license.”~{ Lawrence Lessig, “Retiring standalone DevNations and One Sampling License,” message to CC International listserv, June 4, 2007. }~ The Creative Commons also revoked the Sampling license because it “only permits the remix of the licensed work, not the freedom to share it.” (Two other sampling licenses that permit noncommercial sharing— SamplingPlus and NonCommercial SamplingPlus — were retained.) +In June 2007, Creative Commons announced that it was officially retiring the two licenses.~{ Creative Commons “retired licenses page,” at http://creativecommons.org/retiredlicenses. }~ In a formal statement, Lessig explained, “These licenses do not meet the minimum standards of the Open Access movement. Because this movement is so important to the spread of science and knowledge, we no longer believe it correct to promote a standalone version of this license.”~{ Lawrence Lessig, “Retiring standalone DevNations and One Sampling License,” message to CC International listserv, June 4, 2007. }~ The Creative Commons also revoked the Sampling license because it “only permits the remix of the licensed work, not the freedom to share it.” (Two other sampling licenses that permit noncommercial sharing— SamplingPlus and NonCommercial SamplingPlus — were retained.) ={Lessig, Lawrence:CC International, and} Anyone could still use the Sampling or Developing Nations license if they wished; they still exist, after all. It’s just that the Creative Commons no longer supports them. While the actual impact of the license revocations was minor, it did have major symbolic and political significance in the commons world. It signaled that the Creative Commons was capitulating to objections by free software advocates and the concerns of open access publishing activists. @@ -2011,7 +2011,7 @@ Ironically, the Creative Commons is not itself a commons, nor do its licenses ne Is one type of commons superior to the others? Does one offer a superior vision of “freedom”? This philosophical issue has been a recurrent source of tension between the Free Software Foundation, the steward of the GPL, and the Creative Commons, whose licenses cater to individual choice. -Strictly speaking, a commons essentially offers a binary choice, explained Benkler: “You’re in the commons or you’re out of the commons.” By broadening that binary choice, the CC licenses make the commons a more complicated and ambiguous enterprise. This is precisely what some critics like Stallman have found objectionable about certain CC licenses. They don’t necessarily help forge a community of shared values and commitments. Or as two British critics, David Berry and Giles Moss, have put it, the CC licenses create “commons without commonality.”~{ David Berry and Giles Moss, “On the ‘Creative Commons’: A Critique of the Commons without Commonality,” Free Software Magazine, July 15, 2005, at http://www.freesoftwaremagazine.com/articles/commons_without_com monality. }~ +Strictly speaking, a commons essentially offers a binary choice, explained Benkler: “You’re in the commons or you’re out of the commons.” By broadening that binary choice, the CC licenses make the commons a more complicated and ambiguous enterprise. This is precisely what some critics like Stallman have found objectionable about certain CC licenses. They don’t necessarily help forge a community of shared values and commitments. Or as two British critics, David Berry and Giles Moss, have put it, the CC licenses create “commons without commonality.”~{ David Berry and Giles Moss, “On the ‘Creative Commons’: A Critique of the Commons without Commonality,” Free Software Magazine, July 15, 2005, at http://www.freesoftwaremagazine.com/articles/commons_without_commonality. }~ ={Benkler, Yochai:social movements, on;Berry, David;Moss, Giles;Stallman, Richard:criticisms by} Inviting authors to choose how their work may circulate can result in different types of “commons economies” that may or may not be interoperable. ShareAlike content is isolated from NoDerivatives content; NonCommercial content cannot be used for commercial purposes without explicit permission; and so on. CC-licensed works may themselves be incompatible with content licensed under other licenses, such as the GNU Free Documentation License. @@ -2030,7 +2030,7 @@ These are pivotal questions. The answers point toward different visions of free Some critics accuse Creative Commons of betraying the full potential of the commons because its licenses empower individual authors to decide how “shareable” their works can be. The licenses do not place the needs of the general culture or the commons first, as a matter of universal policy, and some licenses restrict how a work may be used. The lamentable result, say critics like Niva Elkin-Koren, is a segmented body of culture that encourages people to think of cultural works as property. People internalize the norms, such as “This is /{my work}/ and /{I’ll}/ decide how it shall be used by others.” ={Elkin-Koren, Niva;commoners:sharing by+1;Creative Commons (CC) licenses:critics of+2} -This can be seen in the actual choices that CC licensors tend to use. Some 67 percent of CC-licensed works do not allow commercial usage.~{ Based on Yahoo queries, June 13, 2006, at http://wiki.creativecommons.org/ License_Statistics. }~ Arguments go back and forth about whether the NC restriction enhances or shrinks freedom. Many musicians and writers want to promote their works on the Internet while retaining the possibility of commercial gain, however remote; this would seem a strike for freedom. Yet critics note that the NC license is often used indiscriminately, even when commercial sales are a remote possibility. This precludes even modest commercial reuses of a work, such as reposting of content on a blog with advertising.~{ Eric Muller, “The Case for Free Use: Reasons Not to Use a Creative Commons–NC License,” at http://freedomdefined.org/Licenses/NC. }~ +This can be seen in the actual choices that CC licensors tend to use. Some 67 percent of CC-licensed works do not allow commercial usage.~{ Based on Yahoo queries, June 13, 2006, at http://wiki.creativecommons.org/License_Statistics. }~ Arguments go back and forth about whether the NC restriction enhances or shrinks freedom. Many musicians and writers want to promote their works on the Internet while retaining the possibility of commercial gain, however remote; this would seem a strike for freedom. Yet critics note that the NC license is often used indiscriminately, even when commercial sales are a remote possibility. This precludes even modest commercial reuses of a work, such as reposting of content on a blog with advertising.~{ Eric Muller, “The Case for Free Use: Reasons Not to Use a Creative Commons–NC License,” at http://freedomdefined.org/Licenses/NC. }~ The larger point of criticism is that the Creative Commons licenses do not “draw a line in the sand” about what types of freedoms are inherent to the commons. In the interest of building a broad movement, Creative Commons does not insist upon a clear standard of freedom or prescribe how a commons should be structured. @@ -2051,9 +2051,9 @@ At one point, the philosophical disagreements between the Creative Commons and i Stallman objected to the Sampling license because, while it allowed a remix of a licensed work, it did not allow the freedom to share it. The Developing Nations license was objectionable because its freedoms to copy are limited to people in the developing world, and do not extend to everyone. Stallman also disliked the fact that the CC tag that licensors affix to their works did not specify /{which}/ license they were using. With no clear standard of “freedom” and now a mix of licenses that included two “non-free” licenses, Stallman regarded the CC tag as meaningless and the organization itself problematic. -“I used to support Creative Commons,” said Stallman on his blog in July 2005, “but then it adopted some additional licenses which do not give everyone that minimum freedom, and now I no longer endorse it as an activity. I agree with Mako Hill that they are taking the wrong approach by not insisting on any specific freedoms for the public.”~{ Richard Stallman, “Fireworks in Montreal,” at http://www.fsf.org/blogs/ rms/entry-20050920.html. }~ +“I used to support Creative Commons,” said Stallman on his blog in July 2005, “but then it adopted some additional licenses which do not give everyone that minimum freedom, and now I no longer endorse it as an activity. I agree with Mako Hill that they are taking the wrong approach by not insisting on any specific freedoms for the public.”~{ Richard Stallman, “Fireworks in Montreal,” at http://www.fsf.org/blogs/rms/entry-20050920.html. }~ -Mako Hill is a brilliant young hacker and Stallman acolyte who wrote a 2005 essay, “Towards a Standard of Freedom: Creative Commons and the Free Software Movement,”~{ Benjamin Mako Hill, “Towards a Standard of Freedom: Creative Commons and the Free Software Movement,” /{Advogato}/, July 29, 2005, at http://www .advogato.org/article/851.html. }~ a piece that shares Elkin-Koren’s complaint about the CC’s “ideological fuzziness.” Then enrolled in a graduate program at the MIT Media Lab, Hill has written a number of essays on the philosophy and social values of free software. (When he was an undergraduate at Hampshire College, I was an outside advisor for his senior thesis and remain friends with him.) +Mako Hill is a brilliant young hacker and Stallman acolyte who wrote a 2005 essay, “Towards a Standard of Freedom: Creative Commons and the Free Software Movement,”~{ Benjamin Mako Hill, “Towards a Standard of Freedom: Creative Commons and the Free Software Movement,” /{Advogato}/, July 29, 2005, at http://www.advogato.org/article/851.html. }~ a piece that shares Elkin-Koren’s complaint about the CC’s “ideological fuzziness.” Then enrolled in a graduate program at the MIT Media Lab, Hill has written a number of essays on the philosophy and social values of free software. (When he was an undergraduate at Hampshire College, I was an outside advisor for his senior thesis and remain friends with him.) ={Elkin-Koren, Niva;Hill, Benjamin Mako+2;free culture:differing visions of+31;free software:social movement, as+31} In his “Freedom’s Standard” essay, Hill wrote: “[D]espite CC’s stated desire to learn from and build upon the example of the free software movement, CC sets no defined limits and promises no freedoms, no rights, and no fixed qualities. Free software’s success is built on an ethical position. CC sets no such standard.” While CC prides itself on its more open-minded “some rights reserved” standard, Hill says that a real movement for freedom must make a bolder commitment to the rights of the audience and other creators— namely, that “essential rights are unreservable.”~{ Interview with Benjamin Mako Hill, June 1, 2007. }~ @@ -2069,7 +2069,7 @@ Lessig has argued many times that, just as the free software community decided f Elkin-Koren is not so sure we can segment the world according to creative sectors and let each determine how works shall circulate. “I don’t think we can separate the different sectors, as if we work in different sectors,” she told me. “We all work in the production of information. My ideas on copyright are really affected by the art that I use and the music that I listen to. . . . Information is essential not only for creating something functional or for selling a work of art, but for our citizenship and for our ability to participate in society. So it’s not as if we can say, ‘Well, this sector can decide for themselves.’”~{ Interview with Niva Elkin-Koren, January 30, 2007. }~ ={Elkin-Koren, Niva} -As Wikipedia began to take off in popularity, what might have been an unpleasant philosophical rift grew into a more serious fissure with potentially significant consequences. All Wikipedia content is licensed under the Free Software Foundation’s GNU Free Documentation License, or FDL,~{ Wikipedia entry on GNU Free Documentation license, at http://en.wiki pedia.org/wiki/GNU_Free_Documentation_License. }~ largely because the CC licenses did not exist when Wikipedia was launched in 2001. The FDL, originally intended for the documentation manuals that explicate software applications, is essentially the same as the CC ShareAlike license (any derivative works must also be released under the same license granting the freedom to share). But using the FDL can get cumbersome, especially as more video, audio, and photos are incorporated into a text; each artifact would require that the license be posted on it. As more content is shared, the potential for misuse of the content, and lawsuits over violations of licensing agreements, would grow.~{ Michael Fitzgerald, “Copyleft Hits a Snag,” /{Technology Review}/, December 21, 2005. }~ +As Wikipedia began to take off in popularity, what might have been an unpleasant philosophical rift grew into a more serious fissure with potentially significant consequences. All Wikipedia content is licensed under the Free Software Foundation’s GNU Free Documentation License, or FDL,~{ Wikipedia entry on GNU Free Documentation license, at http://en.wikipedia.org/wiki/GNU_Free_Documentation_License. }~ largely because the CC licenses did not exist when Wikipedia was launched in 2001. The FDL, originally intended for the documentation manuals that explicate software applications, is essentially the same as the CC ShareAlike license (any derivative works must also be released under the same license granting the freedom to share). But using the FDL can get cumbersome, especially as more video, audio, and photos are incorporated into a text; each artifact would require that the license be posted on it. As more content is shared, the potential for misuse of the content, and lawsuits over violations of licensing agreements, would grow.~{ Michael Fitzgerald, “Copyleft Hits a Snag,” /{Technology Review}/, December 21, 2005. }~ ={Free Documentation License+10;GNU Project+10;Wikipedia:GNU FDL, and+10|CC licenses, and+10} Unfortunately, as a legal matter, the FDL is incompatible with the CC licenses. This means that all content on Wikipedia and its sister Wikimedia projects (Wikispecies, Wikiquote, Wikinews, among other projects) cannot legally be combined with works licensed under CC licenses. Angered by the two “non-free” CC licenses, Stallman dug in his heels and defended Wikipedia’s use of the FDL. He also made it clear that he would remain a critic of Creative Commons unless it revoked or changed its licenses to conform with the Free Software Foundation’s standards of “freedom.” @@ -2103,7 +2103,7 @@ By May 2008 the details of the agreement to make Wikipedia’s entries, licensed As the Creative Commons has grown in popularity, a longer line has formed to take issue with some of its fundamental strategies. One line of criticism comes from anticapitalist ideologues, another from scholars of the underdeveloped nations of the South. -British academics Berry and Moss apparently hanker for a more bracing revolution in culture;they object to the commodification of culture in any form and to the role that copyright law plays in this drama. To them, Lessig is distressingly centrist. He is “always very keen to disassociate himself and the Creative Commons from the (diabolical) insinuation that he is (God forbid!) anti-market, anticapitalist, or communist,” Berry and Moss complain.~{ David Berry and Giles Moss, “On the ‘Creative Commons’: A Critique of the Commons Without Commonality,” /{Free Software Magazine}/, July 15, 2005, at http://www.freesoftwaremagagine.com/articles/commons_without_com monality }~ The gist of their objection: Why is Lessig collaborating with media corporations and neoclassical economists when there is a larger, more profound revolution that needs to be fought? A new social ethic and political struggle are needed, they write, “not lawyers exercising their legal vernacular and skills on complicated licenses, court cases and precedents.” +British academics Berry and Moss apparently hanker for a more bracing revolution in culture;they object to the commodification of culture in any form and to the role that copyright law plays in this drama. To them, Lessig is distressingly centrist. He is “always very keen to disassociate himself and the Creative Commons from the (diabolical) insinuation that he is (God forbid!) anti-market, anticapitalist, or communist,” Berry and Moss complain.~{ David Berry and Giles Moss, “On the ‘Creative Commons’: A Critique of the Commons Without Commonality,” /{Free Software Magazine}/, July 15, 2005, at http://www.freesoftwaremagagine.com/articles/commons_without_commonality }~ The gist of their objection: Why is Lessig collaborating with media corporations and neoclassical economists when there is a larger, more profound revolution that needs to be fought? A new social ethic and political struggle are needed, they write, “not lawyers exercising their legal vernacular and skills on complicated licenses, court cases and precedents.” ={Berry, David;Moss, Giles;Lessig, Lawrence:CC licenses, and} Dense diatribes against the antirevolutionary character of Creative Commons can be heard in various hacker venues and cultural blogs and Web sites. The argument tends to go along the lines sketched here by Anna Nimus of Berlin, Germany: @@ -2131,7 +2131,7 @@ A more radical and profound critique of the commons came in an open letter to _1 We appreciate and admire the determination with which you nurture your garden of licenses. The proliferation and variety of flowering contracts and clauses in your hothouses is astounding. But we find the paradox of a space that is called a commons and yet so fenced in, and in so many ways, somewhat intriguing. The number of times we had to ask for permission, and the number of security check posts we had to negotiate to enter even a corner of your commons was impressive. . . . Sometimes we found that when people spoke of “Common Property” it was hard to know where the commons ended and where property began . . . -_1 Strangely, the capacity to name something as “mine,” even if in order to “share” it, requires a degree of attainments that is not in itself evenly distributed. Not everyone comes into the world with the confidence that anything is “theirs” to share. This means that the “commons,” in your parlance, consists of an arrangement wherein only those who are in the magic circle of confident owners effectively get a share in that which is essentially, still a configuration of different bits of fenced in property. What they do is basically effect a series of swaps, based on a mutual understanding of their exclusive property rights. So I give you something of what I own, in exchange for which, I get something of what you own. The good or item in question never exits the circuit of property, even, paradoxically, when it is shared. Goods that are not owned, or those that have been taken outside the circuit of ownership, effectively cannot be shared, or even circulated.~{ “A Letter to the Commons, from the participants of the ‘Shades of the Commons Workshop,’ ” in /{In the Shade of the Commons:Towards a Culture of Open Networks}/ (Amsterdam, Netherlands: Waag Society, 2006), at http://www3 .fis.utoronto.ca/research/iprp/cracin/publications/pdfs/final/werbin_InThe Shade.pdf. }~ +_1 Strangely, the capacity to name something as “mine,” even if in order to “share” it, requires a degree of attainments that is not in itself evenly distributed. Not everyone comes into the world with the confidence that anything is “theirs” to share. This means that the “commons,” in your parlance, consists of an arrangement wherein only those who are in the magic circle of confident owners effectively get a share in that which is essentially, still a configuration of different bits of fenced in property. What they do is basically effect a series of swaps, based on a mutual understanding of their exclusive property rights. So I give you something of what I own, in exchange for which, I get something of what you own. The good or item in question never exits the circuit of property, even, paradoxically, when it is shared. Goods that are not owned, or those that have been taken outside the circuit of ownership, effectively cannot be shared, or even circulated.~{ “A Letter to the Commons, from the participants of the ‘Shades of the Commons Workshop,’ ” in /{In the Shade of the Commons:Towards a Culture of Open Networks}/ (Amsterdam, Netherlands: Waag Society, 2006), at http://www3.fis.utoronto.ca/research/iprp/cracin/publications/pdfs/final/werbin_InThe Shade.pdf. }~ The letter invites a deeper consideration of how humans form commons. However ingenious and useful the jerry-rigged legal mechanisms of the GPL and Creative Commons, the disembodied voice of the Non Legal Commons speaks, as if through the sewer grate, to remind us that the commons is about much more than law and civil society. It is part of the human condition. Yet the chaotic Asiatic street is not likely to yield conventional economic development without the rule of law, civil institutions, and some forms of legal property. The question posed by the informal commons remains a necessary one to ponder: What balance of commons and property rights, and in what forms, is best for a society? @@ -2143,10 +2143,10 @@ Walk through the blossoming schools of commons thought and it quickly becomes cl It is a compelling argument, but in fact only an indirect criticism of Creative Commons. For filmmakers who need to use film clips from existing films and musicians who want to use a riff from another performer, the fair use doctrine is indeed more important than any CC license. Peter Jaszi, the law professor at American University’s Washington School of Law, believes that even with growing bodies of CC-licensed content, “teachers, filmmakers, editors, freelance critics and others need to do things with proprietary content.” As a practical matter, they need a strong, clear set of fair use guidelines. ={Jaszi, Peter+2} -Jaszi and his colleague Pat Aufderheide, a communications professor who runs the Center for Social Media at American University, have dedicated themselves to clarifying the scope and certainty of fair use. They have launched a major fair use project to get specific creative communities to define their “best practices in fair use.” If filmmakers, for example, can articulate their own artistic needs and professional interests in copying and sharing, then the courts are more likely to take those standards into consideration when they rule what is protected under the fair use doctrine.~{ Center for Social Media, at http://www.centerforsocialmedia.org/fairuse. See also Pat Aufderheide and Peter Jaszi, “Fair Use and Best Practices: Surprising Success,” /{Intellectual Property Today}/, October 2007, at http://www.iptoday .com/articles/2007-10-aufderheide.asp; and Peter Jaszi, “Copyright, Fair Use and Motion Pictures,” /{Utah Law Review}/ 3, no. 715 (2007), and which also appeared in R. Kolker, ed., /{Oxford Handbook of Film and Media Studies}/ (2007), at http://www.centerforsocialmedia.org/files/pdf/fairuse_motionpictures.pdf. }~ A set of respectable standards for a given field can help stabilize and expand the application of fair use. +Jaszi and his colleague Pat Aufderheide, a communications professor who runs the Center for Social Media at American University, have dedicated themselves to clarifying the scope and certainty of fair use. They have launched a major fair use project to get specific creative communities to define their “best practices in fair use.” If filmmakers, for example, can articulate their own artistic needs and professional interests in copying and sharing, then the courts are more likely to take those standards into consideration when they rule what is protected under the fair use doctrine.~{ Center for Social Media, at http://www.centerforsocialmedia.org/fairuse. See also Pat Aufderheide and Peter Jaszi, “Fair Use and Best Practices: Surprising Success,” /{Intellectual Property Today}/, October 2007, at http://www.iptoday.com/articles/2007-10-aufderheide.asp; and Peter Jaszi, “Copyright, Fair Use and Motion Pictures,” /{Utah Law Review}/ 3, no. 715 (2007), and which also appeared in R. Kolker, ed., /{Oxford Handbook of Film and Media Studies}/ (2007), at http://www.centerforsocialmedia.org/files/pdf/fairuse_motionpictures.pdf. }~ A set of respectable standards for a given field can help stabilize and expand the application of fair use. ={Aufderheide, Pat+1} -Inspired in part by a professional code developed by news broadcasters, some of the nation’s most respected filmmakers prepared the Documentary Filmmakers’ Statement of Best Practices in Fair Use, which was released in November 2005. The guidelines have since been embraced by the film industry, television programmers, and insurance companies (who insure against copyright violations) as a default definition about what constitutes fair use in documentary filmmaking.~{ Aufderheide and Jaszi, /{Intellectual Property Today}/, October 2007, at http:// www.iptoday.com/articles/2007-10-aufderheide.asp. }~ Aufderheide and Jaszi are currently exploring fair use projects for other fields, such as teaching, as a way to make fair use a more reliable legal tool for sharing and reuse of works. +Inspired in part by a professional code developed by news broadcasters, some of the nation’s most respected filmmakers prepared the Documentary Filmmakers’ Statement of Best Practices in Fair Use, which was released in November 2005. The guidelines have since been embraced by the film industry, television programmers, and insurance companies (who insure against copyright violations) as a default definition about what constitutes fair use in documentary filmmaking.~{ Aufderheide and Jaszi, /{Intellectual Property Today}/, October 2007, at http://www.iptoday.com/articles/2007-10-aufderheide.asp. }~ Aufderheide and Jaszi are currently exploring fair use projects for other fields, such as teaching, as a way to make fair use a more reliable legal tool for sharing and reuse of works. Lessig has been highly supportive of the fair use project and, indeed, he oversees his own fair use law clinic at Stanford Law School, which litigates cases frequently. “It’s not as if I don’t think fair use is important,” said Lessig, “but I do think that if the movement focuses on fair use, we don’t attract the people we need. . . . From my perspective, long-term success in changing the fundamental perspectives around copyright depends on something like Creative Commons as opposed to legal action, and even quasi-legal action, like the Fair Use Project.” ={Lessig, Lawrence:fair use, on+5} @@ -2205,11 +2205,11 @@ For the short term, the fledgling models in these fields are likely to be seen a Entrepreneur John Buckman concedes that his Internet record label, Magnatune, amounts to “building a business model on top of chaos.”~{ John Buckman presentation at iCommons Summit, Dubrovnik, Croatia, June 15, 2007. }~ That is to say, he makes money by honoring open networks and people’s natural social inclinations. The company rejects the proprietary muscle games used by its mainstream rivals, and instead holds itself to an ethical standard that verges on the sanctimonious: “We are not evil.” In the music industry these days, a straight shooter apparently has to be that blunt. ={Buckman, John+4;Magnatune+8;music:CC licenses for+8;Creative Commons (CC) licenses:music, for+8} -Magnatune is a four-person enterprise based in Berkeley, California, that since 2003 has been pioneering a new open business model for identifying and distributing high-quality new music. It does not lock up the music with anticopying technology or digital rights management. It does not exploit its artists with coercive, unfair contracts. It does not harass its customers for making unauthorized copies. Internet users can in fact listen to all of Magnatune’s music for free (not just music snippets) via online streaming.~{ John Buckman entry in Wikipedia, at http://en.wikipedia.org/wiki/John_ Buckman. }~ +Magnatune is a four-person enterprise based in Berkeley, California, that since 2003 has been pioneering a new open business model for identifying and distributing high-quality new music. It does not lock up the music with anticopying technology or digital rights management. It does not exploit its artists with coercive, unfair contracts. It does not harass its customers for making unauthorized copies. Internet users can in fact listen to all of Magnatune’s music for free (not just music snippets) via online streaming.~{ John Buckman entry in Wikipedia, at http://en.wikipedia.org/wiki/John_Buckman. }~ -Buckman, a former software programmer turned entrepreneur in his thirties, previously founded and ran Lyris Technologies, an e-mail list management company that he sold in 2005. In deciding to start Magnatune, he took note of the obvious realities that the music industry has tried to ignore: radio is boring, CDs cost too much, record labels exploit their artists, file sharing is not going to go away, people love to share music, and listening to music on the Internet is too much work. “I thought, why not make a record label that has a clue?” said Buckman.~{ John Buckman at Magnatune home page, at http://www.magnatune.com/ info/why. }~ +Buckman, a former software programmer turned entrepreneur in his thirties, previously founded and ran Lyris Technologies, an e-mail list management company that he sold in 2005. In deciding to start Magnatune, he took note of the obvious realities that the music industry has tried to ignore: radio is boring, CDs cost too much, record labels exploit their artists, file sharing is not going to go away, people love to share music, and listening to music on the Internet is too much work. “I thought, why not make a record label that has a clue?” said Buckman.~{ John Buckman at Magnatune home page, at http://www.magnatune.com/info/why. }~ -Well before the band Radiohead released its In /{Rainbows}/ album with a “pay what you want” experiment, Magnatune was inviting its customers to choose the amount they would be willing to pay, from $5 to $18, for any of Magnatune’s 547 albums. Buckman explains that the arrangement signals a respect for customers who, after all, have lots of free music choices. It also gives them a chance to express their appreciation for artists, who receive 50 percent of the sales price. “It turns out that people are quite generous and they pay on average about $8.40, and they really don’t get anything more for paying more other than feeling like they’re doing the right thing,” said Buckman.~{ John Buckman, interview with Matthew Magee of Out-Law.com, radio podcast, September 13, 2007, at http://www.out-law.com/page-8468. }~ About 20 percent pay more than $12.~{ John Buckman at iCommons, June 15, 2007. For an extensive profile of Buckman and Magnatune, see http://www.openrightsgroup.org/creative business/index.php/John_Buckman:_Magnatune. }~ +Well before the band Radiohead released its In /{Rainbows}/ album with a “pay what you want” experiment, Magnatune was inviting its customers to choose the amount they would be willing to pay, from $5 to $18, for any of Magnatune’s 547 albums. Buckman explains that the arrangement signals a respect for customers who, after all, have lots of free music choices. It also gives them a chance to express their appreciation for artists, who receive 50 percent of the sales price. “It turns out that people are quite generous and they pay on average about $8.40, and they really don’t get anything more for paying more other than feeling like they’re doing the right thing,” said Buckman.~{ John Buckman, interview with Matthew Magee of Out-Law.com, radio podcast, September 13, 2007, at http://www.out-law.com/page-8468. }~ About 20 percent pay more than $12.~{ John Buckman at iCommons, June 15, 2007. For an extensive profile of Buckman and Magnatune, see http://www.openrightsgroup.org/creativebusiness/index.php/John_Buckman:_Magnatune. }~ ={Radiohead} “The reality is today nobody really needs to pay for music at all,” he acknowledges. “If you choose to hit the ‘buy’ button at Magnatune then you’re one of the people who has decided to actually pay for music. Shouldn’t we reflect that honest behavior back and say, well, if you’re one of the honest people how much do you want to pay?”~{ John Buckman, interview with Matthew Magee, September 13, 2007. }~ The set-your-own-price approach is part of Magnatune’s larger strategy of building the business by cultivating open, interactive relationships with its customers and artists. “If you set up a trusting world,” explains Buckman, “you can be rewarded.” @@ -2237,16 +2237,16 @@ Even as broadcast networks decry the posting of copyrighted television programs Why this inexorable trend toward openness? Because on open networks, excessive control can be counterproductive. The overall value that can be created through interoperability is usually greater than the value that any single player may reap from maintaining its own “walled network.”~{ See Elliot E. Maxwell, “Open Standards, Open Source, and Open Innovation: Harnessing the Benefits of Openness,” /{Innovations:Technology, Governance, Globalization}/ 1, no. 3 (Summer 2006), at http://www.emaxwell.net. }~ For a company to reap value from interoperability, however, it must be willing to compete on an open platform and it must be willing to share technical standards, infrastructure, or content with others. Once this occurs, proprietary gains come from competing to find more sophisticated ways to add value in the production chain, rather than fighting to monopolize basic resources. Advantage also accrues to the company that develops trusting relationships with a community of customers. ={open business models:value created in+9;value:creation of+9} -Free software was one of the earliest demonstrations of the power of online commons as a way to create value. In his classic 1997 essay “The Cathedral and the Bazaar,” hacker Eric S. Raymond provided a seminal analysis explaining how open networks make software development more cost-effective and innovative than software developed by a single firm.~{ Eric Raymond, “The Cathedral and the Bazaar,” May 1997, at http:// www.catb.org/~esr/writings/cathedral-bazaar. The essay has been translated into nineteen languages to date. }~ A wide-open “bazaar” such as the global Linux community can construct a more versatile operating system than one designed by a closed “cathedral” such as Microsoft. “With enough eyes, all bugs are shallow,” Raymond famously declared. Yochai Benkler gave a more formal economic reckoning of the value proposition of open networks in his pioneering 2002 essay “Coase’s Penguin, or, Linux and the Nature of the Firm.”~{ Yochai Benkler, “Coase’s Penguin, or, Linux and the Nature of the Firm,” /{Yale Law Journal}/ 112, no. 369 (2002), at http://www.benkler.org/CoasesPen guin.html. }~ The title is a puckish commentary on how GNU/Linux, whose mascot is a penguin, poses an empirical challenge to economist Ronald Coase’s celebrated “transaction cost” theory of the firm. In 1937, Coase stated that the economic rationale for forming a business enterprise is its ability to assert clear property rights and manage employees and production more efficiently than contracting out to the marketplace. +Free software was one of the earliest demonstrations of the power of online commons as a way to create value. In his classic 1997 essay “The Cathedral and the Bazaar,” hacker Eric S. Raymond provided a seminal analysis explaining how open networks make software development more cost-effective and innovative than software developed by a single firm.~{ Eric Raymond, “The Cathedral and the Bazaar,” May 1997, at http://www.catb.org/~esr/writings/cathedral-bazaar. The essay has been translated into nineteen languages to date. }~ A wide-open “bazaar” such as the global Linux community can construct a more versatile operating system than one designed by a closed “cathedral” such as Microsoft. “With enough eyes, all bugs are shallow,” Raymond famously declared. Yochai Benkler gave a more formal economic reckoning of the value proposition of open networks in his pioneering 2002 essay “Coase’s Penguin, or, Linux and the Nature of the Firm.”~{ Yochai Benkler, “Coase’s Penguin, or, Linux and the Nature of the Firm,” /{Yale Law Journal}/ 112, no. 369 (2002), at http://www.benkler.org/CoasesPenguin.html. }~ The title is a puckish commentary on how GNU/Linux, whose mascot is a penguin, poses an empirical challenge to economist Ronald Coase’s celebrated “transaction cost” theory of the firm. In 1937, Coase stated that the economic rationale for forming a business enterprise is its ability to assert clear property rights and manage employees and production more efficiently than contracting out to the marketplace. ={Benkler, Yochai:open networks, on+3;Raymond, Eric S.:“The Cathedral and the Bazaar”;free software:creation of value, and;Linux:open business models, and;Microsoft:competition against;Coase, Ronald;GNU/Linux:open business models, and;transaction costs:theory of;open business models:“transaction cost” theory, and} What is remarkable about peer production on open networks, said Benkler, is that it undercuts the economic rationale for the firm; commons-based peer production can perform certain tasks more efficiently than a corporation. Those tasks must be modular and divisible into small components and capable of being efficiently integrated, Benkler stipulated. The larger point is that value is created on open networks in very different ways than in conventional markets. Asserting proprietary control on network platforms may prevent huge numbers of people from giving your work (free) social visibility, contributing new value to it, or remixing it. “The only thing worse than being sampled on the Internet,” said Siva Vaidhyanathan, with apologies to Oscar Wilde, “is not being sampled on the Internet.” ={Vaidhyanathan, Siva} -The /{New York Times}/'s experience with its paid subscription service, TimesSelect, offers a great example. The /{Times}/ once charged about fifty dollars a year for online access to its premier columnists and news archives. Despite attracting more than 227,000 subscribers and generating about $10 million a year in revenue, the /{Times}/ discontinued the service in 2007.~{ Richard Pérez-Peña, “Times to Stop Charging for Parts of Its Web Site,” /{New York Times}/, September 18, 2007. }~ A /{Times}/ executive explained that lost subscription revenues would be more than offset by advertising to a much larger online readership with free access. The /{Financial Times}/ and the /{Economist}/ have dropped their paywalls, and the /{Wall Street Journal}/ in effect has done so by allowing free access via search engines and link sites. From some leading citadels of capitalism, a rough consensus had emerged: exclusivity can /{decrease}/ the value of online content.~{ Frank Ahrens, “Web Sites, Tear Down That Wall,” /{Washington Post}/, November 16, 2007, p. D1. See also Farhad Manjoo, “The Wall Street Journal’s Website Is Already (Secretly) Free,” /{Salon}/, March 21, 2008, at http://machinist.salon .com/blog/2008/03/21/wsj/index.html. }~ +The /{New York Times}/'s experience with its paid subscription service, TimesSelect, offers a great example. The /{Times}/ once charged about fifty dollars a year for online access to its premier columnists and news archives. Despite attracting more than 227,000 subscribers and generating about $10 million a year in revenue, the /{Times}/ discontinued the service in 2007.~{ Richard Pérez-Peña, “Times to Stop Charging for Parts of Its Web Site,” /{New York Times}/, September 18, 2007. }~ A /{Times}/ executive explained that lost subscription revenues would be more than offset by advertising to a much larger online readership with free access. The /{Financial Times}/ and the /{Economist}/ have dropped their paywalls, and the /{Wall Street Journal}/ in effect has done so by allowing free access via search engines and link sites. From some leading citadels of capitalism, a rough consensus had emerged: exclusivity can /{decrease}/ the value of online content.~{ Frank Ahrens, “Web Sites, Tear Down That Wall,” /{Washington Post}/, November 16, 2007, p. D1. See also Farhad Manjoo, “The Wall Street Journal’s Website Is Already (Secretly) Free,” /{Salon}/, March 21, 2008, at http://machinist.salon.com/blog/2008/03/21/wsj/index.html. }~ ={New York Times} -While enormous value can be created on open networks, it can take different forms, notes David P. Reed, who studies information architectures.~{ David P. Reed, “The Sneaky Exponential — Beyond Metcalfe’s Law to the Power of Community Building,” at http://www.reed.com/Papers/GFN/ reedslaw.html. }~ One of the most powerful types of network value is what Reed calls “Group-Forming Networks,” or GFNs — or what Benkler might call commons-based peer production and I would call, less precisely, the commons. Reed talks about “scale-driven value shifts” that occur as a network grows in size. Greater value is created as a network moves from a broadcast model (where “content is king”) to peer production (where transactions dominate) and finally, to a group-forming network or commons (where jointly constructed value is produced and shared). +While enormous value can be created on open networks, it can take different forms, notes David P. Reed, who studies information architectures.~{ David P. Reed, “The Sneaky Exponential — Beyond Metcalfe’s Law to the Power of Community Building,” at http://www.reed.com/Papers/GFN/reedslaw.html. }~ One of the most powerful types of network value is what Reed calls “Group-Forming Networks,” or GFNs — or what Benkler might call commons-based peer production and I would call, less precisely, the commons. Reed talks about “scale-driven value shifts” that occur as a network grows in size. Greater value is created as a network moves from a broadcast model (where “content is king”) to peer production (where transactions dominate) and finally, to a group-forming network or commons (where jointly constructed value is produced and shared). ={Reed, David P.;Benkler, Yochai:The Wealth of Networks;commons-based peer production+3;group-forming networks (GFNs)} It is unclear, as a theoretical matter, how to characterize the size and behavior of various “value networks” on the Web today. For simplicity’s stake — and because Web platforms are evolving so rapidly — I refer to two general value propositions, Web 2.0 and the commons. Web 2.0 is about creating new types of value through participation in distributed open networks; the commons is a subset of Web 2.0 that describes fairly distinct, self-governed communities that focus on their own interests, which usually do not involve moneymaking. @@ -2282,7 +2282,7 @@ Netscape was one of the first to demonstrate the power of this model with its re Today, sharing and openness are key to many business strategies. “Open Source: Now It’s an Ecosystem,” wrote /{BusinessWeek}/ in 2005, describing the “gold rush” of venture capital firms investing in startups with open-source products. Most of them planned to give away their software via the Web and charge for premium versions or for training, maintenance, and support.~{ “Open Source: Now It’s an Ecosystem,” BusinessWeek Online, October 3, 2005. }~ -The pioneers in using open platforms to develop commercial ecosystems on the Internet are Amazon, Google, Yahoo, and eBay. Each has devised systems that let third-party software developers and businesses extend their platform with new applications and business synergies. Each uses systems that dynamically leverage users’ social behaviors and so stimulate business — for example, customer recommendations about books, search algorithms that identify the most popular Web sites, and reputation systems that enhance consumer confidence in sellers. Even Microsoft, eager to expand the ecology of developers using its products, has released 150 of its source code distributions under three “Shared Source” licenses, two of which meet the Free Software Foundation’s definition of “free.”~{ Microsoft’s Shared Source Licenses, at http://www.microsoft.com/resources/ sharedsource/licensingbasics/sharedsourcelicenses.mspx; see also Lessig blog, “Microsoft Releases Under ShareAlike,” June 24, 2005, at http://lessig .org/blog/2005/06/microsoft_releases_under_share.html. }~ +The pioneers in using open platforms to develop commercial ecosystems on the Internet are Amazon, Google, Yahoo, and eBay. Each has devised systems that let third-party software developers and businesses extend their platform with new applications and business synergies. Each uses systems that dynamically leverage users’ social behaviors and so stimulate business — for example, customer recommendations about books, search algorithms that identify the most popular Web sites, and reputation systems that enhance consumer confidence in sellers. Even Microsoft, eager to expand the ecology of developers using its products, has released 150 of its source code distributions under three “Shared Source” licenses, two of which meet the Free Software Foundation’s definition of “free.”~{ Microsoft’s Shared Source Licenses, at http://www.microsoft.com/resources/sharedsource/licensingbasics/sharedsourcelicenses.mspx; see also Lessig blog, “Microsoft Releases Under ShareAlike,” June 24, 2005, at http://lessig.org/blog/2005/06/microsoft_releases_under_share.html. }~ ={Amazon;eBay;Microsoft:“Shared Source” licenses of;Yahoo;Google;World Wide Web:social activity on} More recently, Facebook has used its phenomenal reach — more than 80 million active users worldwide — as a platform for growing a diversified ecology of applications. The company allows software developers to create custom software programs that do such things as let users share reviews of favorite books, play Scrabble or poker with others online, or send virtual gifts to friends. Some apps are just for fun; others are the infrastructure for independent businesses that sell products and services or advertise. In September 2007, Facebook had more than two thousand software applications being used by at least one hundred people.~{ Vauhini Vara, “Facebook Gets Help from Its Friends,” Wall Street Journal, June 22, 2007. See also Riva Richmond, “Why So Many Want to Create Facebook Applications,” /{Wall Street Journal}/, September 4, 2007. }~ @@ -2313,10 +2313,10 @@ The rise of CC+ and associated companies brings to mind Niva Elkin-Koren’s war Revver is another company that has developed an ingenious way to promote the sharing of content, yet still monetize it based on the scale of its circulation. Revver is a Los Angeles–based startup that hosts user-generated video. All videos are embedded with a special tracking tag that displays an ad at the end. Like Google’s AdWords system, which charges advertisers for user “click-throughs” on ad links adjacent to Web content, Revver charges advertisers for every time a viewer clicks on an ad. The number of ad views can be tabulated, and Revver splits ad revenues 50-50 with video creators. Key to the whole business model is the use of the CC AttributionNonCommercial-No Derivatives license. The license allows the videos to be legally shared, but prohibits anyone from modifying them or using them for commercial purposes. ={Revver+2;Google;videos and film+2;Internet:videos and films on+2;World Wide Web:videos and film on+2} -One of the most-viewed videos on Revver sparked a minor pop trend. It showed kids dropping Mentos candies into bottles of CocaCola, which produces an explosive chemical reaction. The video is said to have generated around $30,000.~{ Revver entry at Wikipedia, at http://en.wikipedia.org/wiki/Revver. }~ So is new media going to feature silly cat videos and stupid stunts? Steven Starr, a co-founder of Revver, concedes the ubiquity of such videos, but is quick to point to “budding auteurs like Goodnight Burbank, Happy Slip, Studio8 and LoadingReadyRun, all building audiences.” He also notes that online, creators “can take incredible risks with format and genre, can grow their own audience at a fraction of network costs, can enjoy free syndication, hosting, audience-building and ad services at their disposal.”~{ Interview with Steven Starr, “Is Web TV a Threat to TV?” Wall Street Journal, August 7, 2007, at http://online.wsj.com/article/SB118530221391976425 .html. }~ +One of the most-viewed videos on Revver sparked a minor pop trend. It showed kids dropping Mentos candies into bottles of CocaCola, which produces an explosive chemical reaction. The video is said to have generated around $30,000.~{ Revver entry at Wikipedia, at http://en.wikipedia.org/wiki/Revver. }~ So is new media going to feature silly cat videos and stupid stunts? Steven Starr, a co-founder of Revver, concedes the ubiquity of such videos, but is quick to point to “budding auteurs like Goodnight Burbank, Happy Slip, Studio8 and LoadingReadyRun, all building audiences.” He also notes that online, creators “can take incredible risks with format and genre, can grow their own audience at a fraction of network costs, can enjoy free syndication, hosting, audience-building and ad services at their disposal.”~{ Interview with Steven Starr, “Is Web TV a Threat to TV?” Wall Street Journal, August 7, 2007, at http://online.wsj.com/article/SB118530221391976425.html. }~ ={Starr, Steven} -Blip.tv is another video content-sharing Web site that splits ad revenues with video creators (although it is not automatic; users must “opt in”). Unlike many videos on YouTube and Revver, blip.tv tends to feature more professional-quality productions and serialized episodes, in part because its founders grew out of the “videoblogging” community. Blip.tv espouses an open business ethic, with shout-outs to “democratization, openness, and sustainability.” While there is a tradition for companies to spout their high-minded principles, blip.tv puts some bite into this claim by offering an open platform that supports many video formats and open metadata standards. And it allows content to be downloaded and shared on other sites. Users can also apply Creative Commons licenses to their videos, which can then be identified by CC-friendly search engines. For all these reasons, Lessig has singled out blip.tv as a “true sharing site,” in contrast to YouTube, which he calls a “faking sharing site” that “gives you tools to /{make}/ it seem as if there’s sharing, but in fact, all the tools drive traffic and control back to a single site.”~{ Lessig blog post, “The Ethics of Web 2.0,” October 20, 2006, at http:// www.lessig.org/blog/archives/003570.shtml. }~ +Blip.tv is another video content-sharing Web site that splits ad revenues with video creators (although it is not automatic; users must “opt in”). Unlike many videos on YouTube and Revver, blip.tv tends to feature more professional-quality productions and serialized episodes, in part because its founders grew out of the “videoblogging” community. Blip.tv espouses an open business ethic, with shout-outs to “democratization, openness, and sustainability.” While there is a tradition for companies to spout their high-minded principles, blip.tv puts some bite into this claim by offering an open platform that supports many video formats and open metadata standards. And it allows content to be downloaded and shared on other sites. Users can also apply Creative Commons licenses to their videos, which can then be identified by CC-friendly search engines. For all these reasons, Lessig has singled out blip.tv as a “true sharing site,” in contrast to YouTube, which he calls a “faking sharing site” that “gives you tools to /{make}/ it seem as if there’s sharing, but in fact, all the tools drive traffic and control back to a single site.”~{ Lessig blog post, “The Ethics of Web 2.0,” October 20, 2006, at http://www.lessig.org/blog/archives/003570.shtml. }~ ={blip.tv+1;YouTube+1;Web 2.0:open business, and+3;open business models:open networks and;Lessig, Lawrence:open business sites, and+4} Lessig’s blog post on blip.tv provoked a heated response from blogger Nicholas Carr, a former executive editor of the /{Harvard Business Review}/. The contretemps is worth a close look because it illuminates the tensions between Web 2.0 as a business platform and Web 2.0 as a commons platform. In castigating YouTube as a “fake sharing site,” Carr accused Lessig of sounding like Chairman Mao trying to root out counterrevolutionary forces (that is, capitalism) with “the ideology of digital communalism.” @@ -2324,7 +2324,7 @@ Lessig’s blog post on blip.tv provoked a heated response from blogger Nicholas _1 Like Mao, Lessig and his comrades are not only on the wrong side of human nature and the wrong side of culture; they’re also on the wrong side of history. They fooled themselves into believing that Web 2.0 was introducing a new economic system — a system of “social production” — that would serve as the foundation of a democratic, utopian model of culture creation. They were wrong. Web 2.0’s economic system has turned out to be, in effect if not intent, a system of exploitation rather than a system of emancipation. By putting the means of production into the hands of the masses but withholding from those same masses any ownership over the product of their work, Web 2.0 provides an incredibly efficient mechanism to harvest the economic value of the free labor provided by the very, very many and concentrate it into the hands of the very, very few. -_1 The Cultural Revolution is over. It ended before it even began. The victors are the counterrevolutionaries. And they have $1.65 billion [a reference to the sale price of YouTube to Google] to prove it.~{ Nicholas G. Carr, “Web 2.0lier than Thou,” Rough Type blog, October 23, 2006. Joichi Ito has a thoughtful response in his blog, “Is YouTube Web 2.0?” October 22, 2006, at http://joi.ito.com/archives/2006/10/22/is_youtube _web_20.html; and Lessig responded to Carr in his blog, at http://lessig .org/blog/2006/10/stuck_in_the_20th_century_or_t.html. The “communism discourse” persists, and not just among critics of free culture. Lawrence Liang of CC India used this epigraph in a book on open-content licenses: “There is a specter haunting cultural production, the specter of open content licensing.” which he attributes to “Karl Marx (reworked for the digital era).” From Liang, /{Guide to Open Content Licenses}/ (Rotterdam, Netherlands: Piet Zwart Institute, Institute for Postgraduate Studies and Research, Willem de Kooning Academy Hogeschool, 2004). }~ +_1 The Cultural Revolution is over. It ended before it even began. The victors are the counterrevolutionaries. And they have $1.65 billion [a reference to the sale price of YouTube to Google] to prove it.~{ Nicholas G. Carr, “Web 2.0lier than Thou,” Rough Type blog, October 23, 2006. Joichi Ito has a thoughtful response in his blog, “Is YouTube Web 2.0?” October 22, 2006, at http://joi.ito.com/archives/2006/10/22/is_youtube_web_20.html; and Lessig responded to Carr in his blog, at http://lessig.org/blog/2006/10/stuck_in_the_20th_century_or_t.html. The “communism discourse” persists, and not just among critics of free culture. Lawrence Liang of CC India used this epigraph in a book on open-content licenses: “There is a specter haunting cultural production, the specter of open content licensing.” which he attributes to “Karl Marx (reworked for the digital era).” From Liang, /{Guide to Open Content Licenses}/ (Rotterdam, Netherlands: Piet Zwart Institute, Institute for Postgraduate Studies and Research, Willem de Kooning Academy Hogeschool, 2004). }~ Lessig’s response, a warm-up for a new book, /{Remix}/, released in late 2008, pointed out that there are really /{three}/ different economies on the Internet — commercial, sharing, and hybrid. The hybrid economy now emerging is difficult to understand, he suggested, because it “neither gives away everything, nor does it keep everything.” The challenge of open business models, Lessig argues, is to discover the “golden mean.” ={Lessig, Lawrence:Remix;Internet:hybrid economy enabled by+1|sharing economy of+1|commercial economy of+1} @@ -2342,7 +2342,7 @@ The Brazilian /{tecnobrega}/ music scene discussed briefly in chapter 7 is anoth Artists make most of their money from these live performances, not from CDs, said Lemos. Bands earn an average of $1,100 per solo performance at these events, and $700 when playing with other bands — this, in a region where the average monthly income is $350. Altogether, Lemos estimates that the sound system parties as a business sector earn $1.5 million per month, on fixed assets of $8 million. -“The band Calypso has been approached several times by traditional record labels,” said Lemos, “but they turned down all the offers. The reason is that they make more money by means of the existing business model. In an interview with the largest Brazilian newspaper, the singer of the band said, ‘We do not fight the pirates. We have become big /{because}/ of piracy, which has taken our music to cities where they would never have been.’ ” Calypso has sold more than 5 million albums in Brazil and is known for attracting as many as fifty thousand people to its concerts, Lemos said.~{ Ronaldo Lemos, “From Legal Commons to Social Commons: Developing Countries and the Cultural Industry in the 21st Century,” 2006, at http://www.icommons.org/resources/from-legal-commons-to-social-comm ons-brazil-and-the-cultural-industry-1. See Paula Martini post on iCommons blog, “Over the Top: The New (and Bigger) Cultural Industry in Brazil,” September 28, 2007, at http://www.icommons.org/articles/over-the-top-thenew-and-bigger-cultural-industry-in-brazil. }~ +“The band Calypso has been approached several times by traditional record labels,” said Lemos, “but they turned down all the offers. The reason is that they make more money by means of the existing business model. In an interview with the largest Brazilian newspaper, the singer of the band said, ‘We do not fight the pirates. We have become big /{because}/ of piracy, which has taken our music to cities where they would never have been.’ ” Calypso has sold more than 5 million albums in Brazil and is known for attracting as many as fifty thousand people to its concerts, Lemos said.~{ Ronaldo Lemos, “From Legal Commons to Social Commons: Developing Countries and the Cultural Industry in the 21st Century,” 2006, at http://www.icommons.org/resources/from-legal-commons-to-social-commons-brazil-and-the-cultural-industry-1. See Paula Martini post on iCommons blog, “Over the Top: The New (and Bigger) Cultural Industry in Brazil,” September 28, 2007, at http://www.icommons.org/articles/over-the-top-thenew-and-bigger-cultural-industry-in-brazil. }~ ={piracy} Another highly successful open business model in the Brazilian music scene is TramaVirtual, an open platform on which more than 15,000 musicians have uploaded some 35,000 albums. Fans can then download the music for free. While this does not sound like a promising business proposition, it makes a lot of sense in the context of Brazil’s music marketplace. Major record labels release a minuscule number of new Brazilian music CDs each year, and they sell for about $10 to $15.~{ Ibid. }~ Only the cultured elite can afford music CDs, and the native musical talent — which is plentiful in Brazil — has no place to go. With such a constricted marketplace, TramaVirtual has become hugely popular by showcasing new and interesting music. @@ -2369,7 +2369,7 @@ Virtually all the albums on Jamendo use one or more of the six basic CC licenses For businesses operating on open networks, it is a mistake to regard people merely as customers; they are collaborators and even coinvestors. As more companies learn to interact closely with their customers, it is only natural that conversations about the product or service become more intimate and collaborative. The roles of the “consumer” and “producer” are starting to blur, leading to what some business analysts call the “prosumer”~{ Don Tapscott and Anthony D. Williams, /{Wikinomics: How Mass Collaboration Changes Everything}/ (New York Portfolio, 2006), chapter 5, “The Prosumers.” }~ and the “decentralized co-creation of value.”~{ David Bollier, /{The Rise of Collective Intelligence: Decentralized Co-creation of Value as a New Paradigm of Commerce and Culture}/ (Washington, DC: Aspen Institute Communications and Society Program, 2008).}~ The basic idea is that online social communities are becoming staging areas for the advancement of business objectives. Businesses see these communities as cost-effective ways to identify promising innovations, commercialize them more rapidly, tap into more reliable market intelligence, and nurture customer goodwill. -Amateurs who share with one another through a loose social commons have always been a source of fresh ideas. Tech analyst Elliot Maxwell (citing Lessig) notes how volunteers helped compile the /{Oxford English Dictionary}/ by contributing examples of vernacular usage; how the Homebrew Computer Club in the San Francisco Bay area developed many elements of the first successful personal computer; and how sharing among auto enthusiasts helped generate many of the most important early automotive innovations.~{ Elliot Maxwell, “Open Standards, Open Source, and Open Innovation: Harnessing the Benefits of Openness,” /{Innovations:Technology, Governance, Globalization}/ 1, no. 3 (Summer 2006), at http://www.emaxwell.net, p. 150. }~ In our time, hackers were the ones who developed ingenious ways to use unlicensed electromagnetic spectrum as a commons, which we now know as Wi-Fi. They tinkered with the iPod to come up with podcasts, a new genre of broadcasting that commercial broadcasters now emulate.~{ Elliot E. Maxwell drew my attention to these examples in his excellent essay “Open Standards, Open Source, and Open Innovation.” }~ Numerous self-organized commons have incubated profitable businesses. Two movie buffs created the Internet Movie Database as separate Usenet newsgroups in 1989; six years later they had grown so large that they had merged and converted into a business that was later sold to Amazon.~{ Wikipedia entry, IMDB, at http://en.wikipedia.org/wiki/Internet_Movie _Database. }~ The Compact Disc Database was a free database of software applications that looks up information about audio CDs via the Internet. It was originally developed by a community of music fans as a shared database, but in 2000 it had grown big enough that it was sold and renamed Gracenote.~{ Wikipedia entry, CDDB, at http://en.wikipedia.org/wiki/CDDB. }~ +Amateurs who share with one another through a loose social commons have always been a source of fresh ideas. Tech analyst Elliot Maxwell (citing Lessig) notes how volunteers helped compile the /{Oxford English Dictionary}/ by contributing examples of vernacular usage; how the Homebrew Computer Club in the San Francisco Bay area developed many elements of the first successful personal computer; and how sharing among auto enthusiasts helped generate many of the most important early automotive innovations.~{ Elliot Maxwell, “Open Standards, Open Source, and Open Innovation: Harnessing the Benefits of Openness,” /{Innovations:Technology, Governance, Globalization}/ 1, no. 3 (Summer 2006), at http://www.emaxwell.net, p. 150. }~ In our time, hackers were the ones who developed ingenious ways to use unlicensed electromagnetic spectrum as a commons, which we now know as Wi-Fi. They tinkered with the iPod to come up with podcasts, a new genre of broadcasting that commercial broadcasters now emulate.~{ Elliot E. Maxwell drew my attention to these examples in his excellent essay “Open Standards, Open Source, and Open Innovation.” }~ Numerous self-organized commons have incubated profitable businesses. Two movie buffs created the Internet Movie Database as separate Usenet newsgroups in 1989; six years later they had grown so large that they had merged and converted into a business that was later sold to Amazon.~{ Wikipedia entry, IMDB, at http://en.wikipedia.org/wiki/Internet_Movie_Database. }~ The Compact Disc Database was a free database of software applications that looks up information about audio CDs via the Internet. It was originally developed by a community of music fans as a shared database, but in 2000 it had grown big enough that it was sold and renamed Gracenote.~{ Wikipedia entry, CDDB, at http://en.wikipedia.org/wiki/CDDB. }~ ={Amazon;Gracenote;Homebrew Computer Club;iPod;Maxwell, Elliot;Oxford English Dictionary;Wi-Fi;hackers:community of;commons:sources of new ideas, as+11} A commons can be highly generative because its participants are tinkering and innovating for their own sake — for fun, to meet a challenge, to help someone out. Amateurs are not constrained by conventional business ideas about what may be marketable and profitable. They do not have to meet the investment expectations of venture capitalists and Wall Street. Yet once promising new ideas do surface in the commons, market players can play a useful role in supplying capital and management expertise to develop, improve, and commercialize an invention. @@ -2389,7 +2389,7 @@ Lego decided to write a “right to hack” provision into the Mindstorms softwa Another improbable success in distributed, user-driven innovation is Threadless, a Chicago-based t-shirt company. Threadless sells hundreds of original t-shirt designs, each of which is selected by the user community from among more than eight hundred designs submitted every week. The proposed designs are rated on a scale of one to five by the Web site’s more than 600,000 active users. Winners receive cash awards, recognition on the Web site, and their names on the t-shirt label. Every week, Threadless offers six to ten new t-shirts featuring the winning designs. ={Threadless+1} -In 2006, the company sold more than 1.5 million t-shirts without any traditional kind of marketing. Its business model is so rooted in the user community that Threadless co-founders Jake Nickell and Jacob DeHart have declined offers to sell their t-shirts through conventional, big-name retailers. Threadless’s business model has helped it overcome two major challenges in the apparel industry, write Harvard Business School professor Karim R. Lakhani and consultant Jill A. Panetta — the ability “to attract the right design talent at the right time to create recurring fashion hits,” and the ability “to forecast sales so as to be better able to match production cycles with demand cycles.”~{ Karim R. Lakhani and Jill A. Panetta, “The Principles of Distributed Innovation,” Research Publication No. 2007-7, Berkman Center for Internet & Society, Harvard Law School, October 2007, at http://papers.ssrn.com/abstract _id=1021034. See also Darren Dahl, “Nice Threads,” /{Southwest Airlines Spirit}/, December 2006. }~ +In 2006, the company sold more than 1.5 million t-shirts without any traditional kind of marketing. Its business model is so rooted in the user community that Threadless co-founders Jake Nickell and Jacob DeHart have declined offers to sell their t-shirts through conventional, big-name retailers. Threadless’s business model has helped it overcome two major challenges in the apparel industry, write Harvard Business School professor Karim R. Lakhani and consultant Jill A. Panetta — the ability “to attract the right design talent at the right time to create recurring fashion hits,” and the ability “to forecast sales so as to be better able to match production cycles with demand cycles.”~{ Karim R. Lakhani and Jill A. Panetta, “The Principles of Distributed Innovation,” Research Publication No. 2007-7, Berkman Center for Internet & Society, Harvard Law School, October 2007, at http://papers.ssrn.com/abstract_id=1021034. See also Darren Dahl, “Nice Threads,” /{Southwest Airlines Spirit}/, December 2006. }~ ={DeHart, Jacob;Nickell, Jake;Lakhani, Karim R.;Panetta, Jill A.} A number of companies have started successful enterprises based on the use of wikis, the open Web platforms that allow anyone to contribute and edit content and collaborate. Evan Prodromou, the founder of Wikitravel, a free set of worldwide travel guides, has identified four major types of wiki businesses: service providers who sell access to wikis (Wikispace, wetpaint, PBwiki); content hosters of wikis (wikiHow, Wikitravel, Wikia); consultants who advise companies how to run their own wikis (Socialtext); and content developers (WikiBiz, an offshoot of Wikipedia). @@ -2418,7 +2418,7 @@ What has changed in recent years is our perceptions. The actual role of the comm /{Web 2.0 tools, open access, and CC licenses are helping to accelerate scientific discovery.}/ -It was one of those embarrassing episodes in science: Two sets of researchers published papers in a German organic chemistry journal, /{Angewandte Chemie}/, announcing that they had synthesized a strange new substance with “12-membered rings.” Then, as blogger and chemist Derek Lowe tells the story, “Professor Manfred Cristl of Wurzburg, who apparently knows his pyridinium chemistry pretty well, recognized this as an old way to make further pyridinium salts, not funky twelve-membered rings. He recounts how over the last couple of months he exchanged awkward emails with the two sets of authors, pointing out that they seem to have rediscovered a 100-year-old reaction. . . .”~{ Derek Lowe, “Neat! Wish It Were True!” /{In the Pipeline}/ [blog], November 29, 2007, at http://pipeline.corante.com. See also, Donna Wentworth, “Why We Need to Figure Out What We Already Know,” Science Commons blog, January 4, 2008, at http://sciencecommons.org/weblog/archives/2008/01/04/ why-we-need-to-figure-out-what-we-already-know. }~ +It was one of those embarrassing episodes in science: Two sets of researchers published papers in a German organic chemistry journal, /{Angewandte Chemie}/, announcing that they had synthesized a strange new substance with “12-membered rings.” Then, as blogger and chemist Derek Lowe tells the story, “Professor Manfred Cristl of Wurzburg, who apparently knows his pyridinium chemistry pretty well, recognized this as an old way to make further pyridinium salts, not funky twelve-membered rings. He recounts how over the last couple of months he exchanged awkward emails with the two sets of authors, pointing out that they seem to have rediscovered a 100-year-old reaction. . . .”~{ Derek Lowe, “Neat! Wish It Were True!” /{In the Pipeline}/ [blog], November 29, 2007, at http://pipeline.corante.com. See also, Donna Wentworth, “Why We Need to Figure Out What We Already Know,” Science Commons blog, January 4, 2008, at http://sciencecommons.org/weblog/archives/2008/01/04/why-we-need-to-figure-out-what-we-already-know. }~ ={Lowe, Derek} In the Internet age, people generally assume that these kinds of things can’t happen. All you have to do is run a Web search for “pyridinium,” right? But as scientists in every field are discovering, the existence of some shard of highly specialized knowledge does not necessarily mean that it can be located or understood. After all, a Google search for “pyridinium” turns up 393,000 results. And even peer reviewers for journals (who may have been partly at fault in this instance) have the same problem as any researcher: the unfathomable vastness of the scientific and technical literature makes it difficult to know what humankind has already discovered. @@ -2444,12 +2444,12 @@ Perhaps the most salient example of the power of open science was the Human Geno A 2008 report by the Committee for Economic Development identified a number of other notable open research projects.~{ Committee for Economic Development, /{Harnessing Openness to Transform American Health Care}/ (Washington, DC: CED, 2008). }~ There is the PubChem database, which amasses data on chemical genomics from a network of researchers; the Cancer Biomedical Informatics Grid, a network of several dozen cancer research centers and other organizations that shares data, research tools, and software applications; and TDR Targets a Web clearinghouse sponsored by the World Health Organization that lets researchers share genetic data on neglected diseases such as malaria and sleeping sickness. It is telling that Bill Gates, who in his commercial life is a staunch advocate of proprietary control of information, has been a leader, through his Bill & Melinda Gates Foundation, in requiring research grantees to share their data. ={Gates, Bill} -There has even been the emergence of open-source biotechnology, which is applying the principles of free software development to agricultural biotech and pharmaceutical development.~{ See, e.g., Rockefeller Foundation, “2005 Bellagio Meeting on Open Source Models of Collaborative Innovation in the Life Sciences” [report], Bellagio, Italy, September 2005. See also Janet Elizabeth Hope, “Open Source Biotechnology,” Ph.D. diss., Australian National University, December 2004. }~ Richard Jefferson, the founder of Cambia, a nonprofit research institute in Australia, launched the “kernel” of what he calls the first opensource biotech toolkit. It includes patented technologies such as TransBacter, which is a method for transferring genes to plants, and GUSPlus, which is a tool for visualizing genes and understanding their functions.~{ Interview with Richard Jefferson, September 7, 2006. See also http://www .cambia.org. }~ By licensing these patented research tools for open use, Jefferson hopes to enable researchers anywhere in the world— not just at large biotech companies or universities — to develop their own crop improvement technologies. +There has even been the emergence of open-source biotechnology, which is applying the principles of free software development to agricultural biotech and pharmaceutical development.~{ See, e.g., Rockefeller Foundation, “2005 Bellagio Meeting on Open Source Models of Collaborative Innovation in the Life Sciences” [report], Bellagio, Italy, September 2005. See also Janet Elizabeth Hope, “Open Source Biotechnology,” Ph.D. diss., Australian National University, December 2004. }~ Richard Jefferson, the founder of Cambia, a nonprofit research institute in Australia, launched the “kernel” of what he calls the first opensource biotech toolkit. It includes patented technologies such as TransBacter, which is a method for transferring genes to plants, and GUSPlus, which is a tool for visualizing genes and understanding their functions.~{ Interview with Richard Jefferson, September 7, 2006. See also http://www.cambia.org. }~ By licensing these patented research tools for open use, Jefferson hopes to enable researchers anywhere in the world— not just at large biotech companies or universities — to develop their own crop improvement technologies. ={Jefferson, Richard} 2~ The Viral Spiral in Science -Sociologist Robert Merton is often credited with identifying the social values and norms that make science such a creative, productive enterprise. In a notable 1942 essay, Merton described scientific knowledge as “common property” that depends critically upon an open, ethical, peer-driven process.~{ Robert Merton, “Science and Democratic Social Structure,” in /{Social Theory and Social Structure}/, 3d ed. (New York: Free Press, 1968), pp. 604–15. }~ Science is an engine of discovery precisely because research is available for all to see and replicate. It has historically tried to keep some distance from the marketplace for fear that corporate copyrights, patents, or contractual agreements will lock up knowledge that should be available to everyone, especially future scientists.~{ Richard R. Nelson, “The Market Economy and the Scientific Commons,” /{Research Policy}/ 33, no. 3 (April 2004), pp. 455–71. See also Karim R. Lakhani et al., “The Value of Openness in Scientific Problem Solving,” Harvard Business School Working Paper 07-050, January 2007, at http://www.hbs.edu/ research/pdf/07-050.pdf. }~ Secrecy can also make it difficult for the scientific community to verify research results. +Sociologist Robert Merton is often credited with identifying the social values and norms that make science such a creative, productive enterprise. In a notable 1942 essay, Merton described scientific knowledge as “common property” that depends critically upon an open, ethical, peer-driven process.~{ Robert Merton, “Science and Democratic Social Structure,” in /{Social Theory and Social Structure}/, 3d ed. (New York: Free Press, 1968), pp. 604–15. }~ Science is an engine of discovery precisely because research is available for all to see and replicate. It has historically tried to keep some distance from the marketplace for fear that corporate copyrights, patents, or contractual agreements will lock up knowledge that should be available to everyone, especially future scientists.~{ Richard R. Nelson, “The Market Economy and the Scientific Commons,” /{Research Policy}/ 33, no. 3 (April 2004), pp. 455–71. See also Karim R. Lakhani et al., “The Value of Openness in Scientific Problem Solving,” Harvard Business School Working Paper 07-050, January 2007, at http://www.hbs.edu/research/pdf/07-050.pdf. }~ Secrecy can also make it difficult for the scientific community to verify research results. ={Merton, Robert;science:cientific knowledge+2} Although scientific knowledge eventually becomes publicly available, it usually flows in semi-restricted ways, at least initially, because scientists usually like to claim personal credit for their discoveries. They may refuse to share their latest research lest a rival team of scientists gain a competitive advantage. They may wish to claim patent rights in their discoveries. @@ -2457,13 +2457,13 @@ Although scientific knowledge eventually becomes publicly available, it usually So scientific knowledge is not born into the public sphere, but there is a strong presumption that it ought to be treated as a shared resource as quickly as possible. As law scholar Robert Merges noted in 1996, “Science is not so much given freely to the public as shared under a largely implicit code of conduct among a more or less well identified circle of similarly situated scientists. In other words . . . science is more like a limited-access commons than a truly open public domain.”~{ Robert Merges, “Property Rights Theory and the Commons: The Case of Scientific Research,” /{Social Philosophy and Policy}/ 13, no. 2 (Summer 1996), pp. 145–61. }~ In certain disciplines, especially those involving large capital equipment such as telescopes and particle accelerators, the sharing of research is regarded as a kind of membership rule for belonging to a club. ={Merges, Robert} -As Web 2.0 innovations have demonstrated the power of the Great Value Shift, the convergence of open source, open access, and open science has steadily gained momentum.~{ John Willinsky, “The Unacknowledged Convergence of Open Source, Open Access and Open Science,” /{First Monday}/ 10, no. 8 (August 2005), at http:// firstmonday.org/issues/issue10_8/willinsky/index.html. }~ Creative Commons was mindful of this convergence from its beginnings, but it faced formidable practical challenges in doing anything about it. “From the very first meetings of Creative Commons,” recalled law professor James Boyle, a CC board member, “we thought that science could be the killer app. We thought that science could be the place where Creative Commons could really make a difference, save lives, and have a dramatic impact on the world. There is massive, unnecessary friction in science and we think we can deal with it. Plus, there’s the Mertonian ideal of science, with which Creative Commons couldn’t fit more perfectly.”~{ Interview with James Boyle, August 15, 2006. }~ +As Web 2.0 innovations have demonstrated the power of the Great Value Shift, the convergence of open source, open access, and open science has steadily gained momentum.~{ John Willinsky, “The Unacknowledged Convergence of Open Source, Open Access and Open Science,” /{First Monday}/ 10, no. 8 (August 2005), at http://firstmonday.org/issues/issue10_8/willinsky/index.html. }~ Creative Commons was mindful of this convergence from its beginnings, but it faced formidable practical challenges in doing anything about it. “From the very first meetings of Creative Commons,” recalled law professor James Boyle, a CC board member, “we thought that science could be the killer app. We thought that science could be the place where Creative Commons could really make a difference, save lives, and have a dramatic impact on the world. There is massive, unnecessary friction in science and we think we can deal with it. Plus, there’s the Mertonian ideal of science, with which Creative Commons couldn’t fit more perfectly.”~{ Interview with James Boyle, August 15, 2006. }~ ={Merton, Robert;Boyle, James:Science Commons, and+1;Great Value Shift;Web 2.0:Great Value Shift, and} But despite its early interest in making the Web more research-friendly, Creative Commons realized that science is a special culture unto itself, one that has so many major players and niche variations that it would be foolhardy for an upstart nonprofit to try to engage with it. So in 2002 Creative Commons shelved its ambitions to grapple with science as a commons, and focused instead on artistic and cultural sectors. By January 2005, however, the success of the CC licenses emboldened the organization to revisit its initial idea. As a result of deep personal engagement by several Creative Commons board members — computer scientist Hal Abelson, law professors James Boyle and Michael Carroll, and film producer Eric Saltzman — Creative Commons decided to launch a spin-off project, Science Commons. The new initiative would work closely with scientific disciplines and organizations to try to build what it now calls “the Research Web.” ={Abelson, Hall:CC board, on;Carroll, Michael W.;Saltzman, Eric;Science Commons:CC Commons spinoff, and+5} -Science Commons aims to redesign the “information space” — the technologies, legal rules, institutional practices, and social norms — so that researchers can more easily share their articles, datasets, and other resources. The idea is to reimagine and reinvent the “cognitive infrastructures” that are so critical to scientific inquiry. Dismayed by the pressures exerted by commercial journal publishers, open-access publishing advocate Jean-Claude Guédon has called on librarians to become “epistemological engineers.”~{ Jean-Claude Guédon, “In Oldenburg’s Long Shadow: Librarians, Research Scientists, Publishers and the Control of Scientific Publishing,” at http:// www.arl.org/resources/pubs/mmproceedings/138guedon.shtml. }~ They need to design better systems (technical, institutional, legal, and social) for identifying, organizing, and using knowledge. The payoff? Speedier research and greater scientific discovery and innovation. It turns out that every scientific discipline has its own special set of impediments to address. The recurring problem is massive, unnecessary transaction costs. There is an enormous waste of time, expense, bureaucracy, and logistics in acquiring journal articles, datasets, presentations, and physical specimens. +Science Commons aims to redesign the “information space” — the technologies, legal rules, institutional practices, and social norms — so that researchers can more easily share their articles, datasets, and other resources. The idea is to reimagine and reinvent the “cognitive infrastructures” that are so critical to scientific inquiry. Dismayed by the pressures exerted by commercial journal publishers, open-access publishing advocate Jean-Claude Guédon has called on librarians to become “epistemological engineers.”~{ Jean-Claude Guédon, “In Oldenburg’s Long Shadow: Librarians, Research Scientists, Publishers and the Control of Scientific Publishing,” at http://www.arl.org/resources/pubs/mmproceedings/138guedon.shtml. }~ They need to design better systems (technical, institutional, legal, and social) for identifying, organizing, and using knowledge. The payoff? Speedier research and greater scientific discovery and innovation. It turns out that every scientific discipline has its own special set of impediments to address. The recurring problem is massive, unnecessary transaction costs. There is an enormous waste of time, expense, bureaucracy, and logistics in acquiring journal articles, datasets, presentations, and physical specimens. ={Science Commons:libraries, and+5;science:transaction costs in+1;transaction costs:in science+1;libraries:Science Commons, and} If transaction costs could be overcome, scientists could vastly accelerate their research cycles. They could seek answers in unfamiliar bodies of research literature. They could avoid duplicating other people’s flawed research strategies. They could formulate more imaginative hypotheses and test them more rapidly. They could benefit from a broader, more robust conversation (as in free software — “with enough eyes, all bugs are shallow”) and use computer networks to augment and accelerate the entire scientific process. @@ -2511,7 +2511,7 @@ Not surprisingly, many commercial publishers regard OA publishing as a disruptiv It remains an open question whether the OA business model will work in fields where little research is directly funded (and thus upfront payments are not easily made). As Suber reports, “There are hundreds of OA journals in the humanities, but very, very few of them charge a fee on the author’s side; most of them have institutional subsidies from a university say, or a learned society.”~{ Interview with Peter Suber, June 28, 2006. }~ Yet such subsidies, in the overall scheme of things, may be more attractive to universities or learned societies than paying high subscription fees for journals or online access. ={Suber, Peter+1} -The tension between commercial publishers and academic authors has intensified over the past decade, fueling interest in OA alternatives. The most salient point of tension is the so-called “serials crisis.” From 1986 to 2006, libraries that belong to the Association of Research Libraries saw the cost of serial journals rise 321 percent, or about 7.5 percent a year for twenty consecutive years.~{ Association of Research Libraries, /{ARL Statistics}/ 2005–06, at http://www.arl .org/stats/annualsurveys/ar/stats/arlstats06.shtml. }~ This rate is four times higher than the inflation rate for those years. Some commercial journal publishers reap profits of nearly 40 percent a year.~{ Peter Suber, “Creating an Intellectual Commons through Open Access,” in Charlotte Hess and Elinor Ostrom, eds., /{Understanding Knowledge as a Commons: From Theory to Practice}/ (Cambridge, MA: MIT Press, 2007), p. 175. }~ By 2000 subscription rates were so crushing that the Association of American Universities and the Association of Research Libraries issued a joint statement that warned, “The current system of scholarly publishing has become too costly for the academic community to sustain.”~{ Association of Research Libraries, “Tempe Principles for Emerging Systems of Scholarly Publishing,” May 10, 2000, at http://www.arl.org/resources/pubs/ tempe/index.shtml. }~ Three years later, the high price of journals prompted Harvard, the University of California, Cornell, MIT, Duke, and other elite research universities to cancel hundreds of journal subscriptions — a conspicuous act of rebellion by the library community. +The tension between commercial publishers and academic authors has intensified over the past decade, fueling interest in OA alternatives. The most salient point of tension is the so-called “serials crisis.” From 1986 to 2006, libraries that belong to the Association of Research Libraries saw the cost of serial journals rise 321 percent, or about 7.5 percent a year for twenty consecutive years.~{ Association of Research Libraries, /{ARL Statistics}/ 2005–06, at http://www.arl.org/stats/annualsurveys/ar/stats/arlstats06.shtml. }~ This rate is four times higher than the inflation rate for those years. Some commercial journal publishers reap profits of nearly 40 percent a year.~{ Peter Suber, “Creating an Intellectual Commons through Open Access,” in Charlotte Hess and Elinor Ostrom, eds., /{Understanding Knowledge as a Commons: From Theory to Practice}/ (Cambridge, MA: MIT Press, 2007), p. 175. }~ By 2000 subscription rates were so crushing that the Association of American Universities and the Association of Research Libraries issued a joint statement that warned, “The current system of scholarly publishing has become too costly for the academic community to sustain.”~{ Association of Research Libraries, “Tempe Principles for Emerging Systems of Scholarly Publishing,” May 10, 2000, at http://www.arl.org/resources/pubs/tempe/index.shtml. }~ Three years later, the high price of journals prompted Harvard, the University of California, Cornell, MIT, Duke, and other elite research universities to cancel hundreds of journal subscriptions — a conspicuous act of rebellion by the library community. ={libraries:“serials crisis”, and|Science Commons, and;Science Commons:libraries, and} As journal prices have risen, the appeal of OA publishing has only intensified. Unfortunately, migrating to OA journals is not simply an economic issue. Within academia, the reputation of a journal is deeply entwined with promotion and tenure decisions. A scientist who publishes an article in /{Cell}/ or /{Nature}/ earns far more prestige than she might for publishing in a little-known OA journal. @@ -2522,14 +2522,14 @@ So while publishing in OA journals may be economically attractive, it flouts the One of the first major salvos of the movement came in 2000, when biomedical scientists Harold E. Varmus, Patrick O. Brown, and Michael B. Eisen called on scientific publishers to make their literature available through free online public archives such as the U.S. National Library of Medicine’s PubMed Central. Despite garnering support from nearly 34,000 scientists in 180 countries, the measure did not stimulate the change sought. It did alert the scientific world, governments, and publishers about the virtues of OA publishing, however, and galvanized scientists to explore next steps. ={Brown, Patrick O.;Varmus, Harold E.} -At the time, a number of free, online peer-reviewed journals and free online archives were under way.~{ http://www.earlham.edu/~peters/fos/timeline.htm. }~ But much of the momentum for organized OA movement began in 2001, when the Open Society Institute convened a group of leading librarians, scientists, and other academics in Hungary. In February 2002 the group released the Budapest Open Access Initiative, a statement that formally describes “open access” as the freedom of users to “read, download, copy, distribute, print, search or link to the full texts of . . . articles, crawl them for indexing, pass them as data to software, or use them for any other lawful purpose, without financial, legal or technical barriers other than those inseparable from gaining access to the Internet itself.”~{ The Budapest Open Access Initiative can be found at http://www.soros.org/ openaccess. }~ Two subsequent statements, the Bethesda Declaration and the Berlin Declaration, in June 2003 and October 2003, respectively, expanded upon the definitions of open access and gave the idea new prominence. (Suber calls the three documents the “BBB definition” of open access.)~{ http://www.earlham.edu/~peters/fos/overview.htm. }~ +At the time, a number of free, online peer-reviewed journals and free online archives were under way.~{ http://www.earlham.edu/~peters/fos/timeline.htm. }~ But much of the momentum for organized OA movement began in 2001, when the Open Society Institute convened a group of leading librarians, scientists, and other academics in Hungary. In February 2002 the group released the Budapest Open Access Initiative, a statement that formally describes “open access” as the freedom of users to “read, download, copy, distribute, print, search or link to the full texts of . . . articles, crawl them for indexing, pass them as data to software, or use them for any other lawful purpose, without financial, legal or technical barriers other than those inseparable from gaining access to the Internet itself.”~{ The Budapest Open Access Initiative can be found at http://www.soros.org/openaccess. }~ Two subsequent statements, the Bethesda Declaration and the Berlin Declaration, in June 2003 and October 2003, respectively, expanded upon the definitions of open access and gave the idea new prominence. (Suber calls the three documents the “BBB definition” of open access.)~{ http://www.earlham.edu/~peters/fos/overview.htm. }~ ={Suber, Peter;Budapest Open Access Initiative (2002);libraries:open access movement, and} Creative Commons licenses have been critical tools in the evolution of OA publishing because they enable scientists and scholars to authorize in advance the sharing, copying, and reuse of their work, compatible with the BBB definition. The Attribution (BY) and Attribution-Non-Commercial (BY-NC) licenses are frequently used; many OA advocates regard the Attribution license as the preferred choice. The protocols for “metadata harvesting” issued by the Open Archives Initiative are another useful set of tools in OA publishing. When adopted by an OA journal, these standardized protocols help users more easily find research materials without knowing in advance which archives they reside in, or what they contain. There is no question that OA is transforming the market for scholarly publishing, especially as pioneering models develop. The Public Library of Science announced its first two open-access journals in December 2002. The journals represented a bold, high-profile challenge by highly respected scientists to the subscription-based model that has long dominated scientific publishing. Although Elsevier and other publishers scoffed at the economic model, the project has expanded and now publishes seven OA journals, for biology, computational biology, genetics, pathogens, and neglected tropical diseases, among others. -OA received another big boost in 2004 when the National Institutes for Health proposed that all NIH-funded research be made available for free one year after its publication in a commercial journal. The $28 billion that the NIH spends on research each year (more than the domestic budget of 142 nations!) results in about 65,000 peer-reviewed articles, or 178 every day. Unfortunately, commercial journal publishers succeeded in making the proposed OA policy voluntary. The battle continued in Congress, but it became clear that the voluntary approach was not working. Only 4 percent of researchers published their work under OA standards, largely because busy, working scientists did not consider it a priority and their publishers were not especially eager to help. So Congress in December 2007 required NIH to mandate open access for its research within a year of publication.~{ Peter Suber has an excellent account of the final OA legislation in /{SPARC Open Access Newsletter}/, no. 17, January 2, 2008, at http://www.earlham.edu/ ~peters/fos/newsletter/01-02-08.htm. }~ +OA received another big boost in 2004 when the National Institutes for Health proposed that all NIH-funded research be made available for free one year after its publication in a commercial journal. The $28 billion that the NIH spends on research each year (more than the domestic budget of 142 nations!) results in about 65,000 peer-reviewed articles, or 178 every day. Unfortunately, commercial journal publishers succeeded in making the proposed OA policy voluntary. The battle continued in Congress, but it became clear that the voluntary approach was not working. Only 4 percent of researchers published their work under OA standards, largely because busy, working scientists did not consider it a priority and their publishers were not especially eager to help. So Congress in December 2007 required NIH to mandate open access for its research within a year of publication.~{ Peter Suber has an excellent account of the final OA legislation in /{SPARC Open Access Newsletter}/, no. 17, January 2, 2008, at http://www.earlham.edu/~peters/fos/newsletter/01-02-08.htm. }~ ={National Institutes for Health (NIH)} What may sound like an arcane policy battle in fact has serious implications for ordinary Americans. The breast cancer patient seeking the best peer-reviewed articles online, or the family of a person with Huntington’s disease, can clearly benefit if they can acquire, for free, the latest medical research. Scientists, journalists, health-care workers, physicians, patients, and many others cannot access the vast literature of publicly funded scientific knowledge because of high subscription rates or per-article fees. A freely available body of online literature is the best, most efficient way to help science generate more reliable answers, new discoveries, and commercial innovations. @@ -2593,7 +2593,7 @@ It is still too early to judge how well the CC0 program is working, but initial 2~ The Neurocommons -Every day there is so much new scientific literature generated that it would take a single person 106 years to read it all.~{ Brian Athey, University of Michigan, presentation at Commons of Science conference, National Academy of Science, Washington, DC, October 3, 2006. }~ In a single year, over twenty-four thousand peer-reviewed journals publish about 2.5 million research articles.~{ Stevan Harnad, “Maximizing Research Impact Through Institutional and National Open-Access Self-Archiving Mandates,” /{Electronics & Computer Science E-Prints Repository}/, May 2006, available at http://eprints.ecs.soron.ac.uk/ 12093/02/harnad-crisrey.pdf. }~ Our ability to generate content has far outstripped our ability to comprehend it. We are suffering from a cognitive overload — one that can only be addressed by using software and computer networks in innovative ways to organize, search, and access information. For many years, Sir Tim Berners-Lee, the celebrated inventor of the World Wide Web, and his colleagues at the World Wide Web Consortium (W3C), based at MIT, have been trying to solve the problem of information overload by developing a “new layer” of code for the Web. +Every day there is so much new scientific literature generated that it would take a single person 106 years to read it all.~{ Brian Athey, University of Michigan, presentation at Commons of Science conference, National Academy of Science, Washington, DC, October 3, 2006. }~ In a single year, over twenty-four thousand peer-reviewed journals publish about 2.5 million research articles.~{ Stevan Harnad, “Maximizing Research Impact Through Institutional and National Open-Access Self-Archiving Mandates,” /{Electronics & Computer Science E-Prints Repository}/, May 2006, available at http://eprints.ecs.soron.ac.uk/12093/02/harnad-crisrey.pdf. }~ Our ability to generate content has far outstripped our ability to comprehend it. We are suffering from a cognitive overload — one that can only be addressed by using software and computer networks in innovative ways to organize, search, and access information. For many years, Sir Tim Berners-Lee, the celebrated inventor of the World Wide Web, and his colleagues at the World Wide Web Consortium (W3C), based at MIT, have been trying to solve the problem of information overload by developing a “new layer” of code for the Web. ={Berners-Lee, Tim;World Wide Web Consortium (W3C)} This visionary project, the so-called Semantic Web, aspires to develop a framework for integrating a variety of systems, so they can communicate with one another, machine to machine. The goal is to enable computers to identify and capture information from anywhere on the Web, and then organize the results in sophisticated and customized ways. “If you search for ‘signal transduction genes in parameter neurons,’ ” said John Wilbanks of Science Commons, “Google sucks. It will get you 190,000 Web pages.” The goal of the Semantic Web is to deliver a far more targeted and useful body of specialized information. @@ -2629,7 +2629,7 @@ The problem with a field like neuroscience, which has so many exploding frontier Science is not just about text and data, of course. It also involves lots of tangible /{stuff}/ needed to conduct experiments. Typical materials include cell lines, monoclonal antibodies, reagents, animal models, synthetic materials, nano-materials, clones, laboratory equipment, and much else. Here, too, sharing and collaboration are important to the advance of science. But unlike digital bits, which are highly malleable, the physical materials needed for experiments have to be located, approved for use, and shipped. Therein lies another tale of high transaction costs impeding the progress of science. As Thinh Nguyen, counsel for Science Commons, describes the problem: ={Nguyen, Thinh+1} -_1 The ability to locate materials based on their descriptions in journal articles is often limited by lack of sufficient information about origin and availability, and there is no standard citation for such materials. In addition, the process of legal negotiation that may follow can be lengthy and unpredictable. This can have important implications for science policy, especially when delays or inability to obtain research materials result in lost time, productivity and research opportunities.~{ Thinh Nguyen, “Science Commons: Material Transfer Agreement Project,” /{Innovations}/, Summer 2007, pp. 137–43, at http://www.mitpressjournals.org/ doi/pdf/10.1162/itgg.2007.2.3.137. }~ +_1 The ability to locate materials based on their descriptions in journal articles is often limited by lack of sufficient information about origin and availability, and there is no standard citation for such materials. In addition, the process of legal negotiation that may follow can be lengthy and unpredictable. This can have important implications for science policy, especially when delays or inability to obtain research materials result in lost time, productivity and research opportunities.~{ Thinh Nguyen, “Science Commons: Material Transfer Agreement Project,” /{Innovations}/, Summer 2007, pp. 137–43, at http://www.mitpressjournals.org/doi/pdf/10.1162/itgg.2007.2.3.137. }~ To the nonscientist, this transactional subculture is largely invisible. But to scientists whose lab work requires access to certain physical materials, the uncertainties, variations, and delays can be crippling. Normally, the transfer of materials from one scientist to another occurs through a Material Transfer Agreement, or MTA. The technology transfer office at one research university will grant, or not grant, an MTA so that a cell line or tissue specimen can be shipped to a researcher at another university. Typically, permission must be granted for the researcher to publish, disseminate, or use research results, and to license their use for commercialization. ={Material Transfer Agreements (MTAs)+7;science:Material Transfer Agreements (MTAs)+7} @@ -2712,7 +2712,7 @@ MIT also realized the dangers of propertizing college courses and teaching mater School officials stressed that using MIT courseware on the Web is not the same as an MIT education. Indeed, the free materials underscore the fact that what really distinguishes an MIT education is one’s participation in a learning community. Unlike the Connexions content, MIT’s OpenCourseWare is a fairly static set of course materials; they are not modular or constantly updated. In addition, they are licensed under a CC BY-NC-SA (AttributionNonCommercial-ShareAlike.) license. While this prevents businesses from profiting from MIT course materials, it also prevents other educational institutions from remixing them into new courses or textbooks. ={communities:learning;education:learning community, in a} -Despite these limitations, MIT’s OCW materials have been profoundly influential. The course Laboratory in Software Engineering, for example, has been used by students in Karachi, Pakistan; the island of Mauritius; Vienna, Austria; and Kansas City, Missouri, among scores of other places around the world.~{ David Diamond, “MIT Everyware,” /{Wired}/, September 2003. }~ Ten of the leading Chinese universities now use hundreds of MIT courses, leading three noted OER experts, Daniel E. Atkins, John Seely Brown, and Allen L. Hammond, to conclude that MIT’s OCW “has had a major impact on Chinese education.”~{ Daniel E. Atkins, John Seely Brown, and Allen L. Hammond, “A Review of the Open Educational Resources (OER) Movement: Achievements, Challenges and New Opportunities,” February 2007, at http://www.oerderves .org/wp-content/uploads/2007/03/a-review-of-the-open-educational-re sources-oer-movement_final.pdf, p. 23. }~ Noting the life-changing impact that OCW has had on students in rural villages in China and West Africa, Atkins and his co-authors cite “the power of the OCW as a means for cross-cultural engagement.” Over the course of four years, from October 2003 through 2007, the OCW site received nearly 16 million visits; half were newcomers and half were repeat visits. +Despite these limitations, MIT’s OCW materials have been profoundly influential. The course Laboratory in Software Engineering, for example, has been used by students in Karachi, Pakistan; the island of Mauritius; Vienna, Austria; and Kansas City, Missouri, among scores of other places around the world.~{ David Diamond, “MIT Everyware,” /{Wired}/, September 2003. }~ Ten of the leading Chinese universities now use hundreds of MIT courses, leading three noted OER experts, Daniel E. Atkins, John Seely Brown, and Allen L. Hammond, to conclude that MIT’s OCW “has had a major impact on Chinese education.”~{ Daniel E. Atkins, John Seely Brown, and Allen L. Hammond, “A Review of the Open Educational Resources (OER) Movement: Achievements, Challenges and New Opportunities,” February 2007, at http://www.oerderves.org/wp-content/uploads/2007/03/a-review-of-the-open-educational-re sources-oer-movement_final.pdf, p. 23. }~ Noting the life-changing impact that OCW has had on students in rural villages in China and West Africa, Atkins and his co-authors cite “the power of the OCW as a means for cross-cultural engagement.” Over the course of four years, from October 2003 through 2007, the OCW site received nearly 16 million visits; half were newcomers and half were repeat visits. ={Atkins, Daniel E.;Brown, John Seely;Hammond, Allen L.;education:OER movement;Open Educational Resources (OER) movement} OCW is becoming a more pervasive international ethic now that more than 120 educational institutions in twenty nations have banded together to form the OpenCourseWare Consortium. Its goal is to create “a broad and deep body of open educational content using a shared model.”~{ OpenCourseWare Consortium, at http://www.ocwconsortium.org. }~ Although plenty of universities are still trying to make money from distance education courses, a growing number of colleges and universities realize that OCW helps faculty connect with other interested faculty around the world, build a college’s public recognition and recruitment, and advance knowledge as a public good. @@ -2757,7 +2757,7 @@ Next, Delia Browne of the National Education Access Licence for Schools, or NEAL /{Tweet! Tweet!}/ Neeru Paharia, a former executive director of the Creative Commons, introduced her fledgling project, AcaWiki. Paharia is concerned that too many academic articles are locked behind paywalls and are not readily accessible to everyone. AcaWiki plans to recruit graduate students, academics, and citizens to write summaries of academic papers. Since many grad students make abstracts as part of their routine research, it would not be difficult to pool thousands of summaries into a highly useful, searchable Web collection. ={Paharia, Neeru} -The speed geekers in Dubrovnik were sweaty and overstimulated at the end, but gratified to learn that there are a great many OER projects under way throughout the world; they just aren’t very well known or coordinated with one another. Two of the participants — J. Philipp Schmidt of the University of the Western Cape and Mark Surman of the Shuttleworth Foundation, both of South Africa — conceded that “there is still a great deal of fuzziness about what this movement includes,” and that “we don’t yet have a good ‘map’ of open education.” But the significance of grassroots initiatives is unmistakable. “There is a movement afoot here,” they concluded, “and it is movement with an aim no less than making learning accessible and adaptable for all.”~{ J. Philipp Schmidt and Mark Surman, “Open Sourcing Education: Learning and Wisdom from the iSummit 2007,” September 2, 2007, at http://icommons .org/download_banco/open-sourcing-education-learning-and-wisdom-from -isummit-2007. }~ “Education,” another participant predicted, “will drive the future of the Commons movement.” +The speed geekers in Dubrovnik were sweaty and overstimulated at the end, but gratified to learn that there are a great many OER projects under way throughout the world; they just aren’t very well known or coordinated with one another. Two of the participants — J. Philipp Schmidt of the University of the Western Cape and Mark Surman of the Shuttleworth Foundation, both of South Africa — conceded that “there is still a great deal of fuzziness about what this movement includes,” and that “we don’t yet have a good ‘map’ of open education.” But the significance of grassroots initiatives is unmistakable. “There is a movement afoot here,” they concluded, “and it is movement with an aim no less than making learning accessible and adaptable for all.”~{ J. Philipp Schmidt and Mark Surman, “Open Sourcing Education: Learning and Wisdom from the iSummit 2007,” September 2, 2007, at http://icommons.org/download_banco/open-sourcing-education-learning-and-wisdom-from-isummit-2007. }~ “Education,” another participant predicted, “will drive the future of the Commons movement.” ={Schmidt, J. Philipp;Surman, Mark} In a sign that the OER movement is getting serious as a movement, thirty of its leaders met in Cape Town, South Africa, and in January 2008 issued the Cape Town Open Education Declaration.~{ http://www.capetowndeclaration.org. Schmidt and Surman, “Open Sourcing Education.” }~ The declaration is a call to make learning materials more freely available online, and to improve education and learning by making them more collaborative, flexible, and locally relevant. The declaration outlines the challenge: “Many educators remain unaware of the growing pool of open educational resources. Many governments and educational institutions are either unaware or unconvinced of the benefits of open education. Differences among licensing schemes for open resources create confusion and incompatibility. And, of course, the majority of the world does not have access to the computers and networks that are integral to most current open education efforts.” @@ -2835,15 +2835,15 @@ I call the new sorts of citizen behaviors “history-making” because ordinary These behaviors exist in some measure in offline realms, of course, but they are a growing norm in the digital republic. A few examples will suffice to make the point. The Web helped create and propel a handful of cause-oriented candidacies — Howard Dean, Ron Paul, Ned Lamont~[* Lamont was an insurgent candidate for U.S. Senate from Connecticut challenging Senator Joseph Lieberman in a campaign that helped culturally validate opposition to the U.S. war in Iraq.]~ — who rapidly raised enormous sums of money, galvanized large numbers of passionate supporters, and altered mainstream political discourse. Although none prevailed in their races, Barack Obama made a quantum leap in online organizing in 2008, raising $50 million in a single month from supporters via the Internet. Obama’s candidacy was buoyed by the rise of the “netroots” — Web activists with a progressive political agenda— whose size and credibility enable them to sway votes in Congress, raise significant amounts of campaign funds, and influence local activism. The stories are now legion about blogs affecting political life — from the resignation of Senate majority leader Trent Lott after he praised the racist past of Senator Strom Thurmond at his hundredth birthday party, to the electoral defeat of Senate candidate George Allen after his uttering of an ethnic slur, /{macaca}/, was posted on YouTube. ={Dean, Howard;Lamont, Ned;Obama, Barack;Paul, Ron;Internet:political campaigns on;Allen, George;Lott, Trent;YouTube} -Citizens are now able to initiate their own policy initiatives without first persuading the mainstream media or political parties to validate them as worthy. For example, a handful of citizens troubled by evidence of “hackable” electronic voting machines exposed the defects of the Diebold machines and the company’s efforts to thwart public scrutiny and reforms.~{ See, e.g.,Yochai Benkler, /{The Wealth of Networks}/, pp. 225–32. }~ (The effort has led to a nationwide citizen effort, www.blackboxvoting.org, to expose security problems with voting machines and vote counting.) An ad hoc group of activists, lawyers, academics, and journalists spontaneously formed around a public wiki dealing with the lethal side effects of a bestselling antipsychotic drug Zyprexa, and the manufacturer’s allegedly illegal conduct in suppressing evidence of the drug’s risks. (Prosecutors later sought a $1 billion fine against Eli Lilly.)~{ Jonah Bossewitch, “The Zyprexa Kills Campaign: Peer Production and the Frontiers of Radical Pedagogy,” /{Re-public}/, at http://www.re-public.gr/en/ ?p=144. }~ +Citizens are now able to initiate their own policy initiatives without first persuading the mainstream media or political parties to validate them as worthy. For example, a handful of citizens troubled by evidence of “hackable” electronic voting machines exposed the defects of the Diebold machines and the company’s efforts to thwart public scrutiny and reforms.~{ See, e.g.,Yochai Benkler, /{The Wealth of Networks}/, pp. 225–32. }~ (The effort has led to a nationwide citizen effort, www.blackboxvoting.org, to expose security problems with voting machines and vote counting.) An ad hoc group of activists, lawyers, academics, and journalists spontaneously formed around a public wiki dealing with the lethal side effects of a bestselling antipsychotic drug Zyprexa, and the manufacturer’s allegedly illegal conduct in suppressing evidence of the drug’s risks. (Prosecutors later sought a $1 billion fine against Eli Lilly.)~{ Jonah Bossewitch, “The Zyprexa Kills Campaign: Peer Production and the Frontiers of Radical Pedagogy,” /{Re-public}/, at http://www.re-public.gr/en/?p=144. }~ The Web is giving individuals extra-institutional public platforms for articulating their own facts and interpretations of culture. It is enabling them to go far beyond voting and citizen vigilance, to mount citizen-led interventions in politics and governance. History-making citizens can compete with the mass media as an arbiter of cultural and political reality. They can expose the factual errors and lack of independence of /{New York Times}/ reporters; reveal the editorial biases of the “MSM” — mainstream media — by offering their own videotape snippets on YouTube; they can even be pacesetters for the MSM, as the blog Firedoglake did in its relentless reporting of the “Scooter” Libby trial (Libby, one of Vice President Cheney’s top aides, was convicted of obstruction of justice and perjury in connection with press leaks about CIA agent Valerie Plame.) Citizen-journalists, amateur videographers, genuine experts who have created their own Web platforms, parodists, dirty tricksters, and countless others are challenging elite control of the news agenda. It is no wonder that commercial journalism is suffering an identity crisis. Institutional authority is being trumped by the “social warranting” of online communities, many of which function as a kind of participatory meritocracy. ={Libby, “Scooter”;YouTube} -History-making citizenship is not without its deficiencies. Rumors, misinformation, and polarized debate are common in this more open, unmediated environment. Its crowning virtue is its potential ability to mobilize the energies and creativity of huge numbers of people. GNU/Linux improbably drew upon the talents of tens of thousands of programmers; certainly our contemporary world with its countless problems could use some of this elixir— platforms that can elicit distributed creativity, specialized talent, passionate commitment, and social legitimacy. In 2005 Joi Ito, then chairman of the board of the Creative Commons, wrote: “Traditional forms of representative democracy can barely manage the scale, complexity and speed of the issues in the world today. Representatives of sovereign nations negotiating with each other in global dialog are limited in their ability to solve global issues. The monolithic media and its increasingly simplistic representation of the world cannot provide the competition of ideas necessary to reach informed, viable consensus.”~{ Joichi Ito, “Emergent Democracy,” chapter 1 in John Lebkowsky and Mitch Ratcliffe, eds., /{Extreme Democracy}/ (Durham, NC: Lulu.com, 2005), at http:// extremedemocracy.com/chapters/Chapter%20One-Ito.pdf. }~ Ito concluded that a new, not-yetunderstood model of “emergent democracy” is likely to materialize as the digital revolution proceeds. A civic order consisting of “intentional blog communities, ad hoc advocacy coalitions and activist networks” could begin to tackle many urgent problems. +History-making citizenship is not without its deficiencies. Rumors, misinformation, and polarized debate are common in this more open, unmediated environment. Its crowning virtue is its potential ability to mobilize the energies and creativity of huge numbers of people. GNU/Linux improbably drew upon the talents of tens of thousands of programmers; certainly our contemporary world with its countless problems could use some of this elixir— platforms that can elicit distributed creativity, specialized talent, passionate commitment, and social legitimacy. In 2005 Joi Ito, then chairman of the board of the Creative Commons, wrote: “Traditional forms of representative democracy can barely manage the scale, complexity and speed of the issues in the world today. Representatives of sovereign nations negotiating with each other in global dialog are limited in their ability to solve global issues. The monolithic media and its increasingly simplistic representation of the world cannot provide the competition of ideas necessary to reach informed, viable consensus.”~{ Joichi Ito, “Emergent Democracy,” chapter 1 in John Lebkowsky and Mitch Ratcliffe, eds., /{Extreme Democracy}/ (Durham, NC: Lulu.com, 2005), at http://extremedemocracy.com/chapters/Chapter%20One-Ito.pdf. }~ Ito concluded that a new, not-yetunderstood model of “emergent democracy” is likely to materialize as the digital revolution proceeds. A civic order consisting of “intentional blog communities, ad hoc advocacy coalitions and activist networks” could begin to tackle many urgent problems. ={Ito, Joichi;NU/Linux;democracy:emergent+1|traditional forms of+5} -Clearly, the first imperative in developing a new framework to host representative democracy is to ensure that the electronic commons be allowed to exist in the first place. Without net neutrality, citizens could very well be stifled in their ability to participate on their own terms, in their own voices. If proprietary policies or technologies are allowed to override citizen interests (Verizon Wireless in 2007 prevented the transmission of abortion rights messages on its text-messaging system, for example~{ Adam Liptak, “Verizon Reverses Itself on Abortion Messages,” /{New York Times}/, September 27, 2007, at http://www.nytimes.com/2007/09/27/busi ness/27cnd-verizon.html. }~), then any hope for historymaking citizenship will be stillborn. +Clearly, the first imperative in developing a new framework to host representative democracy is to ensure that the electronic commons be allowed to exist in the first place. Without net neutrality, citizens could very well be stifled in their ability to participate on their own terms, in their own voices. If proprietary policies or technologies are allowed to override citizen interests (Verizon Wireless in 2007 prevented the transmission of abortion rights messages on its text-messaging system, for example~{ Adam Liptak, “Verizon Reverses Itself on Abortion Messages,” /{New York Times}/, September 27, 2007, at http://www.nytimes.com/2007/09/27/business/27cnd-verizon.html. }~), then any hope for historymaking citizenship will be stillborn. Beyond such near-term concerns, however, the emerging digital republic is embroiled in a much larger structural tension with –terrestrial “real world” governments. The commoner is likely to regard the rules forged in online commons as more legitimate and appropriate than those mandated by government. Again, David R. Johnson: ={Johnson, David R.} @@ -2868,7 +2868,7 @@ If Lessig is going to succeed in using the tools of the digital republic to refo It is hard to get a fix on this long-term transformation because the struggles to actualize an emergent democracy, as envisioned by Ito, are strangely apolitical and intensely political at the same time. They are apolitical in the sense that commoners are chiefly focused on the pragmatic technical challenges of their individual projects; they are not usually involved in official policymaking in legislatures or before courts and government agencies. Yet free software and free culture projects are highly political in the sense that commons projects, taken together over time, represent a profound challenge to the conventional market order and political culture. For example, Wikitravel, Jamendo, and open-access journals arguably provide better value than the commercial alternatives. The success of free software punctures the foundational assumptions of copyright law, making it easier to challenge new expansions of copyright law. Participatory commons are diverting viewer “eyeballs” away from commercial media and its genres of culture, spurring the growth of new hybrid forms of user-generated content. These kinds of effects, which advance project by project, month by month, are likely to have a longterm transformational impact. A new social ethic is taking root. ={Ito, Joichi;free software:FOSS/FLOSS+2;FOSS/FLOSS+2;copyright law:assumptions of;democracy:emergent} -Free culture, though culturally progressive, is fairly nonjudgmental about ideological politics. When American conservatives decided they wanted to start Conservapedia because they found Wikipedia too liberal, Wikipedia founder Jimmy Wales was happy to bless it: “Free culture knows no bounds . . . We welcome the reuse of our work to build variants. That’s directly in line with our mission.”~{ Robert Mackey, “Conservapedia: The Word Says it All,” /{New York Times}/, March 8, 2007, at http://thelede.blogs.nytimes.com/2007/03/08/conserva pedia-the-word-says-it-all/?scp=1&sq=wales+conservapedia. }~ Anthropology professor E. Gabriella Coleman has found a similar ecumenicism in the free software movement, which is agnostic about conventional politics but adamant about its own polity of freedom.~{ E. Gabriella Coleman, “The Political Agnosticism of Free and Open Source Software and the Inadvertent Politics of Contrast,” /{Anthropology Quarterly}/ 77, no. 3 (Summer 2004), pp. 507–19. See also her Ph.D. dissertation, “The Social Construction of Freedom in Free and Open Source Software: Hackers, Ethics and the Liberal Tradition,” abstract at http://healthhacker.org/biella/cole man-abstract.pdf. }~ Thus, the FOSS movement has no position with respect to social justice or globalization issues, but it does demand a strict commitment to the “four freedoms” of software development. Johan Söderberg makes much the same case in his book /{Hacking Capitalism}/.~{ Johan Söderberg, /{Hacking Capitalism: The Free and Open Source Software Movement}/ (New York: Routledge, 2007). }~ +Free culture, though culturally progressive, is fairly nonjudgmental about ideological politics. When American conservatives decided they wanted to start Conservapedia because they found Wikipedia too liberal, Wikipedia founder Jimmy Wales was happy to bless it: “Free culture knows no bounds . . . We welcome the reuse of our work to build variants. That’s directly in line with our mission.”~{ Robert Mackey, “Conservapedia: The Word Says it All,” /{New York Times}/, March 8, 2007, at http://thelede.blogs.nytimes.com/2007/03/08/conservapedia-the-word-says-it-all/?scp=1&sq=wales+conservapedia. }~ Anthropology professor E. Gabriella Coleman has found a similar ecumenicism in the free software movement, which is agnostic about conventional politics but adamant about its own polity of freedom.~{ E. Gabriella Coleman, “The Political Agnosticism of Free and Open Source Software and the Inadvertent Politics of Contrast,” /{Anthropology Quarterly}/ 77, no. 3 (Summer 2004), pp. 507–19. See also her Ph.D. dissertation, “The Social Construction of Freedom in Free and Open Source Software: Hackers, Ethics and the Liberal Tradition,” abstract at http://healthhacker.org/biella/coleman-abstract.pdf. }~ Thus, the FOSS movement has no position with respect to social justice or globalization issues, but it does demand a strict commitment to the “four freedoms” of software development. Johan Söderberg makes much the same case in his book /{Hacking Capitalism}/.~{ Johan Söderberg, /{Hacking Capitalism: The Free and Open Source Software Movement}/ (New York: Routledge, 2007). }~ ={Coleman, E. Gabriella;Wales, Jimmy;Söderberg, Johan} As projects like GNU/Linux, Wikipedia, open courseware, open-access journals, open databases, municipal Wi-Fi, collections of CC-licensed content, and other commons begin to cross-link and coalesce, the commons paradigm is migrating from the margins of culture to the center. The viral spiral, after years of building its infrastructure and social networks, may be approaching a Cambrian explosion, an evolutionary leap. @@ -2898,7 +2898,7 @@ The GPL and the CC licenses are ingenious hacks because they navigate this indet The beauty of this “ideological straddle” is that it enables a diverse array of players into the same tent without inciting sectarian acrimony. (There is some, of course, but mostly at the margins.) Ecumenical tolerance is the norm because orthodoxies cannot take root at the periphery where innovation is constantly being incubated. In any case, there is a widespread realization in the networked world that shared goals are likely to require variable implementations, depending on specific needs and contexts. -It may appear that the free software hacker, blogger, tech entrepreneur, celebrity musician, college professor, and biological researcher have nothing in common. In truth, each is participating in social practices that are incrementally and collectively bringing into being a new sort of democratic polity. French sociologist Bruno Latour calls it the “pixellation of politics,”~{ Bruno Latour, “We Are All Reactionaries Today,” Re-public, at http://www .republic.gr/en/?p=129. }~ which conjures up a pointillist painting slowly materializing. The new polity is more open, participatory, dynamically responsive, and morally respected by “the governed” than the nominal democracies of nation-states. The bureaucratic state tends to be too large and remote to be responsive to local circumstances and complex issues; it is ridiculed and endured. But who dares to aspire to transcend it? +It may appear that the free software hacker, blogger, tech entrepreneur, celebrity musician, college professor, and biological researcher have nothing in common. In truth, each is participating in social practices that are incrementally and collectively bringing into being a new sort of democratic polity. French sociologist Bruno Latour calls it the “pixellation of politics,”~{ Bruno Latour, “We Are All Reactionaries Today,” Re-public, at http://www.republic.gr/en/?p=129. }~ which conjures up a pointillist painting slowly materializing. The new polity is more open, participatory, dynamically responsive, and morally respected by “the governed” than the nominal democracies of nation-states. The bureaucratic state tends to be too large and remote to be responsive to local circumstances and complex issues; it is ridiculed and endured. But who dares to aspire to transcend it? ={Latour, Bruno} Sooner or later, history-making citizenship is likely to take up such a challenge. It already has. What is the digital republic, after all, but a federation of self-organized communities, each seeking to fulfill its members’ dreams by developing its own indigenous set of tools, rules, and ethics? The power of the commons stems from its role as an organizing template, and not an ideology. Because it is able to host a diverse and robust ecosystem of talent without squeezing it into an ideological straitjacket, the commons is flexible and resilient. It is based on people’s sincerest passions, not on remote institutional imperatives or ideological shibboleths. It therefore has a foundational support and energy that can outperform “mainstream” institutions. -- cgit v1.2.3 From c9f8bc67faa18a583124aab0ea84828f9aaa4f07 Mon Sep 17 00:00:00 2001 From: Ralph Amissah Date: Sun, 22 Aug 2010 17:58:53 -0400 Subject: markup samples, corrections to book indexes --- ...lman_crusade_for_free_software.sam_williams.sst | 2 +- .../the_wealth_of_networks.yochai_benkler.sst | 68 ++++++++++++++-------- data/v1/samples/two_bits.christopher_kelty.sst | 36 ++++++------ .../democratizing_innovation.eric_von_hippel.sst | 20 +++---- ...lman_crusade_for_free_software.sam_williams.sst | 2 +- .../the_wealth_of_networks.yochai_benkler.sst | 32 +++++----- data/v2/samples/two_bits.christopher_kelty.sst | 36 ++++++------ 7 files changed, 107 insertions(+), 89 deletions(-) diff --git a/data/v1/samples/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams.sst b/data/v1/samples/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams.sst index e94a4cc..8ef920b 100644 --- a/data/v1/samples/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams.sst +++ b/data/v1/samples/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams.sst @@ -2028,7 +2028,7 @@ Although not the first person to view software as public property, Stallman is g Predicting the future is risky sport, but most people, when presented with the question, seemed eager to bite. "One hundred years from now, Richard and a couple of other people are going to deserve more than a footnote," says Moglen. "They're going to be viewed as the main line of the story." The "couple other people" Moglen nominates for future textbook chapters include John Gilmore, Stallman's GPL advisor and future founder of the Electronic Frontier Foundation, and Theodor Holm Nelson, a.k.a. Ted Nelson, author of the 1982 book, Literary Machines. Moglen says Stallman, Nelson, and Gilmore each stand out in historically significant, nonoverlapping ways. He credits Nelson, commonly considered to have coined the term "hypertext," for identifying the predicament of information ownership in the digital age. Gilmore and Stallman, meanwhile, earn notable credit for identifying the negative political effects of information control and building organizations-the Electronic Frontier Foundation in the case of Gilmore and the Free Software Foundation in the case of Stallman-to counteract those effects. Of the two, however, Moglen sees Stallman's activities as more personal and less political in nature. -={Electronic Frontier Foundation;Gilmore, John;Nelson, Theodor Holm+2;Nelson Ted+2} +={Electronic Frontier Foundation;Gilmore, John;Nelson, Theodor Holm+2;Nelson, Ted+2} "Richard was unique in that the ethical implications of unfree software were particularly clear to him at an early moment," says Moglen. "This has a lot to do with Richard's personality, which lots of people will, when writing about him, try to depict as epiphenomenal or even a drawback in Richard Stallman's own life work." diff --git a/data/v1/samples/the_wealth_of_networks.yochai_benkler.sst b/data/v1/samples/the_wealth_of_networks.yochai_benkler.sst index a7ae407..e9e087a 100644 --- a/data/v1/samples/the_wealth_of_networks.yochai_benkler.sst +++ b/data/v1/samples/the_wealth_of_networks.yochai_benkler.sst @@ -80,7 +80,7 @@ Much of the early work in this project was done at New York University, whose la Since 2001, first as a visitor and now as a member, I have had the remarkable pleasure of being part of the intellectual community that is Yale Law School. The book in its present form, structure, and emphasis is a direct reflection of my immersion in this wonderful community. Practically every single one of my colleagues has read articles I have written over this period, attended workshops where I presented my work, provided comments that helped to improve the articles--and through them, this book, as well. I owe each and every one of them thanks, not least to Tony Kronman, who made me see that it would be so. To list them all would be redundant. To list some would inevitably underrepresent the various contributions they have made. Still, I will try to say a few of the special thanks, owing much yet to ,{[pg xii]}, those I will not name. Working out the economics was a precondition of being able to make the core political claims. Bob Ellickson, Dan Kahan, and Carol Rose all engaged deeply with questions of reciprocity and commonsbased production, while Jim Whitman kept my feet to the fire on the relationship to the anthropology of the gift. Ian Ayres, Ron Daniels during his visit, Al Klevorick, George Priest, Susan Rose-Ackerman, and Alan Schwartz provided much-needed mixtures of skepticism and help in constructing the arguments that would allay it. Akhil Amar, Owen Fiss, Jerry Mashaw, Robert Post, Jed Rubenfeld, Reva Siegal, and Kenji Yoshino helped me work on the normative and constitutional questions. The turn I took to focusing on global development as the core aspect of the implications for justice, as it is in chapter 9, resulted from an invitation from Harold Koh and Oona Hathaway to speak at their seminar on globalization, and their thoughtful comments to my paper. The greatest influence on that turn has been Amy Kapczynski's work as a fellow at Yale, and with her, the students who invited me to work with them on university licensing policy, in particular, Sam Chaifetz. -Oddly enough, I have never had the proper context in which to give two more basic thanks. My father, who was swept up in the resistance to British colonialism and later in Israel's War of Independence, dropped out of high school. He was left with a passionate intellectual hunger and a voracious appetite for reading. He died too young to even imagine sitting, as I do today with my own sons, with the greatest library in human history right there, at the dinner table, with us. But he would have loved it. Another great debt is to David Grais, who spent many hours mentoring me in my first law job, bought me my first copy of Strunk and White, and, for all practical purposes, taught me how to write in English; as he reads these words, he will be mortified, I fear, to be associated with a work of authorship as undisciplined as this, with so many excessively long sentences, replete with dependent clauses and unnecessarily complex formulations of quite simple ideas. +Oddly enough, I have *{never had the proper context}* in which to give two more basic thanks. My father, who was swept up in the resistance to British colonialism and later in Israel's War of Independence, dropped out of high school. He was left with a passionate intellectual hunger and a voracious appetite for reading. He died too young to even imagine sitting, as I do today with my own sons, with the greatest library in human history right there, at the dinner table, with us. But he would have loved it. Another great debt is to David Grais, who spent many hours mentoring me in my first law job, bought me my first copy of Strunk and White, and, for all practical purposes, taught me how to write in English; as he reads these words, he will be mortified, I fear, to be associated with a work of authorship as undisciplined as this, with so many excessively long sentences, replete with dependent clauses and unnecessarily complex formulations of quite simple ideas. Finally, to my best friend and tag-team partner in this tussle we call life, Deborah Schrag, with whom I have shared nicely more or less everything since we were barely adults. ,{[pg 1]}, @@ -94,7 +94,7 @@ A series of changes in the technologies, economic organization, and social pract The rise of greater scope for individual and cooperative nonmarket production of information and culture, however, threatens the incumbents of the industrial information economy. At the beginning of the twenty-first century, we find ourselves in the midst of a battle over the institutional ecology of the digital environment. A wide range of laws and institutions-- from broad areas like telecommunications, copyright, or international trade regulation, to minutiae like the rules for registering domain names or whether digital television receivers will be required by law to recognize a particular code--are being tugged and warped in efforts to tilt the playing field toward one way of doing things or the other. How these battles turn out over the next decade or so will likely have a significant effect on how we come to know what is going on in the world we occupy, and to what extent and in what forms we will be able--as autonomous individuals, as citizens, and as participants in cultures and communities--to affect how we and others see the world as it is and as it might be. 2~ THE EMERGENCE OF THE NETWORKED INFORMATION ECONOMY -={information economy:emergence of+9;networked environment policy+52;networked environment policy:emergence of+9} +={information economy:emergence of+9;networked information economy+52|emergence of+9} The most advanced economies in the world today have made two parallel shifts that, paradoxically, make possible a significant attenuation of the limitations that market-based production places on the pursuit of the political ,{[pg 3]}, values central to liberal societies. The first move, in the making for more than a century, is to an economy centered on information (financial services, accounting, software, science) and cultural (films, music) production, and the manipulation of symbols (from making sneakers to branding them and manufacturing the cultural significance of the Swoosh). The second is the move to a communications environment built on cheap processors with high computation capabilities, interconnected in a pervasive network--the phenomenon we associate with the Internet. It is this second shift that allows for an increasing role for nonmarket production in the information and cultural production sector, organized in a radically more decentralized pattern than was true of this sector in the twentieth century. The first shift means that these new patterns of production--nonmarket and radically decentralized--will emerge, if permitted, at the core, rather than the periphery of the most advanced economies. It promises to enable social production and exchange to play a much larger role, alongside property- and marketbased production, than they ever have in modern democracies. ={nonmarket information producers+4;physical constraints on information production+2;production of information:physical constraints on+2} @@ -121,7 +121,7 @@ In the networked information economy, the physical capital required for producti Because the presence and importance of nonmarket production has become so counterintuitive to people living in market-based economies at the end of the twentieth century, part I of this volume is fairly detailed and technical; overcoming what we intuitively "know" requires disciplined analysis. Readers who are not inclined toward economic analysis should at least read the introduction to part I, the segments entitled "When Information Production Meets the Computer Network" and "Diversity of Strategies in our Current Production System" in chapter 2, and the case studies in chapter 3. These should provide enough of an intuitive feel for what I mean by the diversity of production strategies for information and the emergence of nonmarket individual and cooperative production, to serve as the basis for the more normatively oriented parts of the book. Readers who are genuinely skeptical of the possibility that nonmarket production is sustainable and effective, and in many cases is an efficient strategy for information, knowledge, and cultural production, should take the time to read part I in its entirety. The emergence of precisely this possibility and practice lies at the very heart of my claims about the ways in which liberal commitments are translated into lived experiences in the networked environment, and forms the factual foundation of the political-theoretical and the institutional-legal discussion that occupies the remainder of the book. 2~ NETWORKED INFORMATION ECONOMY AND LIBERAL, DEMOCRATIC SOCIETIES -={democratic societies+15;information economy:democracy and liberalism+15;liberal societies+15;networked environment policy:democracy and liberalism+15} +={democratic societies+15;information economy:democracy and liberalism+15;liberal societies+15;networked information economy:democracy and liberalism+15} How we make information, how we get it, how we speak to others, and how others speak to us are core components of the shape of freedom in any society. Part II of this book provides a detailed look at how the changes in the technological, economic, and social affordances of the networked information environment affect a series of core commitments of a wide range of liberal democracies. The basic claim is that the diversity of ways of organizing information production and use opens a range of possibilities for pursuing % ,{[pg 8]}, the core political values of liberal societies--individual freedom, a more genuinely participatory political system, a critical culture, and social justice. These values provide the vectors of political morality along which the shape and dimensions of any liberal society can be plotted. Because their practical policy implications are often contradictory, rather than complementary, the pursuit of each places certain limits on how we pursue the others, leading different liberal societies to respect them in different patterns. How much a society constrains the democratic decision-making powers of the majority in favor of individual freedom, or to what extent it pursues social justice, have always been attributes that define the political contours and nature of that society. But the economics of industrial production, and our pursuit of productivity and growth, have imposed a limit on how we can pursue any mix of arrangements to implement our commitments to freedom and justice. Singapore is commonly trotted out as an extreme example of the trade-off of freedom for welfare, but all democracies with advanced capitalist economies have made some such trade-off. Predictions of how well we will be able to feed ourselves are always an important consideration in thinking about whether, for example, to democratize wheat production or make it more egalitarian. Efforts to push workplace democracy have also often foundered on the shoals--real or imagined--of these limits, as have many plans for redistribution in the name of social justice. Market-based, proprietary production has often seemed simply too productive to tinker with. The emergence of the networked information economy promises to expand the horizons of the feasible in political imagination. Different liberal polities can pursue different mixtures of respect for different liberal commitments. However, the overarching constraint represented by the seeming necessity of the industrial model of information and cultural production has significantly shifted as an effective constraint on the pursuit of liberal commitments. @@ -167,10 +167,10 @@ The networked information economy also allows for the emergence of a more critic ={Balkin, Jack;communities:critical culture and self-reflection+1;critical culture and self-reflection+1;culture:criticality of (self-reflection)+1;democratic societies:critical culture and social relations+1;Fisher, William (Terry);Koren, Niva Elkin;Lessig, Lawrence (Larry);self-organization: See clusters in network topology self-reflection+1;liberal societies:critical culture and social relations} Throughout much of this book, I underscore the increased capabilities of individuals as the core driving social force behind the networked information economy. This heightened individual capacity has raised concerns by many that the Internet further fragments community, continuing the long trend of industrialization. A substantial body of empirical literature suggests, however, that we are in fact using the Internet largely at the expense of television, and that this exchange is a good one from the perspective of social ties. We use the Internet to keep in touch with family and intimate friends, both geographically proximate and distant. To the extent we do see a shift in social ties, it is because, in addition to strengthening our strong bonds, we are also increasing the range and diversity of weaker connections. Following ,{[pg 16]}, Manuel Castells and Barry Wellman, I suggest that we have become more adept at filling some of the same emotional and context-generating functions that have traditionally been associated with the importance of community with a network of overlapping social ties that are limited in duration or intensity. -={attention fragmentation;Castells, Manuel;fragmentation of communication;norms (social): fragments of communication;regulation by social norms: fragmentation of communication;social relations and norms:fragmentation of communication;communities: fragmentation of;diversity:fragmentation of communication;Castells, Manuel} +={attention fragmentation;Castells, Manuel;fragmentation of communication;norms (social): fragmentation of communication;regulation by social norms: fragmentation of communication;social relations and norms:fragmentation of communication;communities: fragmentation of;diversity:fragmentation of communication;Castells, Manuel} 2~ FOUR METHODOLOGICAL COMMENTS -={information economy:methodological choices+14;networked environmental policy. See policy networked information economy:methodological choices+14} +={information economy:methodological choices+14;networked environmental policy:See policy;networked information economy:methodological choices+14} There are four methodological choices represented by the thesis that I have outlined up to this point, and therefore in this book as a whole, which require explication and defense. The first is that I assign a very significant role to technology. The second is that I offer an explanation centered on social relations, but operating in the domain of economics, rather than sociology. The third and fourth are more internal to liberal political theory. The third is that I am offering a liberal political theory, but taking a path that has usually been resisted in that literature--considering economic structure and the limits of the market and its supporting institutions from the perspective of freedom, rather than accepting the market as it is, and defending or criticizing adjustments through the lens of distributive justice. Fourth, my approach heavily emphasizes individual action in nonmarket relations. Much of the discussion revolves around the choice between markets and nonmarket social behavior. In much of it, the state plays no role, or is perceived as playing a primarily negative role, in a way that is alien to the progressive branches of liberal political thought. In this, it seems more of a libertarian or an anarchistic thesis than a liberal one. I do not completely discount the state, as I will explain. But I do suggest that what is special about our moment is the rising efficacy of individuals and loose, nonmarket affiliations as agents of political economy. Just like the market, the state will have to adjust to this new emerging modality of human action. Liberal political theory must first recognize and understand it before it can begin to renegotiate its agenda for the liberal state, progressive or otherwise. ={capabilities of individuals:technology and human affairs+5;human affairs, technology and+5;individual capabilities and action: technology and human affairs+5} @@ -212,7 +212,7 @@ The important new fact about the networked environment, however, is the efficacy ={collaborative authorship: See also peer production collective social action} 2~ THE STAKES OF IT ALL: THE BATTLE OVER THE INSTITUTIONAL ECOLOGY OF THE DIGITAL ENVIRONMENT -={commercial model of communication+9;industrial model of communication+9;information economy:institutional ecology+9;institutional ecology of digital environment+9;networked environment policy:institutional ecology+9;proprietary rights+9;traditional model of communication+9} +={commercial model of communication+9;industrial model of communication+9;information economy:institutional ecology+9;institutional ecology of digital environment+9;networked information economy:institutional ecology+9;proprietary rights+9;traditional model of communication+9} No benevolent historical force will inexorably lead this technologicaleconomic moment to develop toward an open, diverse, liberal equilibrium. ,{[pg 23]}, If the transformation I describe as possible occurs, it will lead to substantial redistribution of power and money from the twentieth-century industrial producers of information, culture, and communications--like Hollywood, the recording industry, and perhaps the broadcasters and some of the telecommunications services giants--to a combination of widely diffuse populations around the globe, and the market actors that will build the tools that make this population better able to produce its own information environment rather than buying it ready-made. None of the industrial giants of yore are taking this reallocation lying down. The technology will not overcome their resistance through an insurmountable progressive impulse. The reorganization of production and the advances it can bring in freedom and justice will emerge, therefore, only as a result of social and political action aimed at protecting the new social patterns from the incumbents' assaults. It is precisely to develop an understanding of what is at stake and why it is worth fighting for that I write this book. I offer no reassurances, however, that any of this will in fact come to pass. @@ -220,7 +220,7 @@ The battle over the relative salience of the proprietary, industrial models of i ={property ownership+5;commons} This is not to say that property is in some sense inherently bad. Property, together with contract, is the core institutional component of markets, and ,{[pg 24]}, a core institutional element of liberal societies. It is what enables sellers to extract prices from buyers, and buyers to know that when they pay, they will be secure in their ability to use what they bought. It underlies our capacity to plan actions that require use of resources that, without exclusivity, would be unavailable for us to use. But property also constrains action. The rules of property are circumscribed and intended to elicit a particular datum--willingness and ability to pay for exclusive control over a resource. They constrain what one person or another can do with regard to a resource; that is, use it in some ways but not others, reveal or hide information with regard to it, and so forth. These constraints are necessary so that people must transact with each other through markets, rather than through force or social networks, but they do so at the expense of constraining action outside of the market to the extent that it depends on access to these resources. -={constrains of information production:physical+2;physical constraints on information production+2} +={constrains of information production, physical+2;physical constraints on information production+2} Commons are another core institutional component of freedom of action in free societies, but they are structured to enable action that is not based on exclusive control over the resources necessary for action. For example, I can plan an outdoor party with some degree of certainty by renting a private garden or beach, through the property system. Alternatively, I can plan to meet my friends on a public beach or at Sheep's Meadow in Central Park. I can buy an easement from my neighbor to reach a nearby river, or I can walk around her property using the public road that makes up our transportation commons. Each institutional framework--property and commons--allows for a certain freedom of action and a certain degree of predictability of access to resources. Their complementary coexistence and relative salience as institutional frameworks for action determine the relative reach of the market and the domain of nonmarket action, both individual and social, in the resources they govern and the activities that depend on access to those resources. Now that material conditions have enabled the emergence of greater scope for nonmarket action, the scope and existence of a core common infrastructure that includes the basic resources necessary to produce and exchange information will shape the degree to which individuals will be able to act in all the ways that I describe as central to the emergence of a networked information economy and the freedoms it makes possible. ={commons} @@ -236,7 +236,7 @@ Social and economic organization is not infinitely malleable. Neither is it alwa This book is offered, then, as a challenge to contemporary liberal democracies. We are in the midst of a technological, economic, and organizational transformation that allows us to renegotiate the terms of freedom, justice, and productivity in the information society. How we shall live in this new environment will in some significant measure depend on policy choices that we make over the next decade or so. To be able to understand these choices, to be able to make them well, we must recognize that they are part of what is fundamentally a social and political choice--a choice about how to be free, equal, productive human beings under a new set of technological and ,{[pg 28]}, economic conditions. As economic policy, allowing yesterday's winners to dictate the terms of tomorrow's economic competition would be disastrous. As social policy, missing an opportunity to enrich democracy, freedom, and justice in our society while maintaining or even enhancing our productivity would be unforgivable. ,{[pg 29]}, -:C~ Part One - The Networked Information Economy +:B~ Part One - The Networked Information Economy 1~p1 Introduction ={communities:technology-defined social structure+9;norms (social):technology-defined structure+9;regulation by social norms: technology-defined structure+9;social relations and norms: technology-defined structure+9;social structure, defined by technology+9;technology:social structure defined by+9} @@ -299,6 +299,8 @@ The actual universe of information production in the economy then, is not as dep The ideal-type strategy that underlies patents and copyrights can be thought of as the "Romantic Maximizer." It conceives of the information producer as a single author or inventor laboring creatively--hence romantic--but in expectation of royalties, rather than immortality, beauty, or truth. An individual or small start-up firm that sells software it developed to a larger firm, or an author selling rights to a book or a film typify this model. The second ideal type that arises within exclusive-rights based industries, "Mickey," is a larger firm that already owns an inventory of exclusive rights, some through in-house development, some by buying from Romantic Maximizers. ,{[pg 43]}, ={Mickey model+3;Romantic Maximizer model+2} +<:pb> + !_ Table 2.1: Ideal-Type Information Production Strategies ={demand-side effects of information production;Joe Einstein model+1;learning networks+1;limited sharing networks+1;Los Alamos model+1;nonmarket information producers:strategies for information production+1;RCA strategy+1;Scholarly Lawyers model+1;sharing:limited sharing networks} @@ -491,7 +493,7 @@ How are we to know that the content produced by widely dispersed individuals is ={accreditation:Amazon+1;Amazon+1;filtering:Amazon+1;relevance filtering:Amazon+1} Amazon uses a mix of mechanisms to get in front of their buyers of books and other products that the users are likely to purchase. A number of these mechanisms produce relevance and accreditation by harnessing the users themselves. At the simplest level, the recommendation "customers who bought items you recently viewed also bought these items" is a mechanical means of extracting judgments of relevance and accreditation from the actions of many individuals, who produce the datum of relevance as byproduct of making their own purchasing decisions. Amazon also allows users to create topical lists and track other users as their "friends and favorites." Amazon, like many consumer sites today, also provides users with the ability ,{[pg 76]}, to rate books they buy, generating a peer-produced rating by averaging the ratings. More fundamentally, the core innovation of Google, widely recognized as the most efficient general search engine during the first half of the 2000s, was to introduce peer-based judgments of relevance. Like other search engines at the time, Google used a text-based algorithm to retrieve a given universe of Web pages initially. Its major innovation was its PageRank algorithm, which harnesses peer production of ranking in the following way. The engine treats links from other Web sites pointing to a given Web site as votes of confidence. Whenever someone who authors a Web site links to someone else's page, that person has stated quite explicitly that the linked page is worth a visit. Google's search engine counts these links as distributed votes of confidence in the quality of the page pointed to. Pages that are heavily linked-to count as more important votes of confidence. If a highly linked-to site links to a given page, that vote counts for more than the vote of a site that no one else thinks is worth visiting. The point to take home from looking at Google and Amazon is that corporations that have done immensely well at acquiring and retaining users have harnessed peer production to enable users to find things they want quickly and efficiently. -={accreditation:Google;communities:critical culture and self-reflection+1;culture:critically of (self-reflection)+1;filtering:Google;Google;relevance filtering:Google} +={accreditation:Google;communities:critical culture and self-reflection+1;culture:criticality of (self-reflection)+1;filtering:Google;Google;relevance filtering:Google} The most prominent example of a distributed project self-consciously devoted to peer production of relevance is the Open Directory Project. The site relies on more than sixty thousand volunteer editors to determine which links should be included in the directory. Acceptance as a volunteer requires application. Quality relies on a peer-review process based substantially on seniority as a volunteer and level of engagement with the site. The site is hosted and administered by Netscape, which pays for server space and a small number of employees to administer the site and set up the initial guidelines. Licensing is free and presumably adds value partly to America Online's (AOL's) and Netscape's commercial search engine/portal and partly through goodwill. Volunteers are not affiliated with Netscape and receive no compensation. They spend time selecting sites for inclusion in the directory (in small increments of perhaps fifteen minutes per site reviewed), producing the most comprehensive, highest-quality human-edited directory of the Web--at this point outshining the directory produced by the company that pioneered human edited directories of the Web: Yahoo!. ={accreditation:Open Directory Project (ODP);critical culture and self-reflection:Open Directory Project;filtering:Open Directory Project (ODP);ODP (Open Directory Project);Open Directory Project (ODP);relevance filtering:Open Directory Project (ODP);self-organization:Open Directory Project} @@ -629,7 +631,7 @@ The independence of Web sites is what marks their major difference from more org ={Slashdot+1;accreditation:Slashdot+1;filtering:Slashdot+1;relevance filtering:Slashdot+1;peer production:maintenance of cooperation+1;structured production:maintenance of cooperation+1} Cooperation in peer-production processes is usually maintained by some combination of technical architecture, social norms, legal rules, and a technically backed hierarchy that is validated by social norms. /{Wikipedia}/ is the strongest example of a discourse-centric model of cooperation based on social norms. However, even /{Wikipedia}/ includes, ultimately, a small number of people with system administrator privileges who can eliminate accounts or block users in the event that someone is being genuinely obstructionist. This technical fallback, however, appears only after substantial play has been given to self-policing by participants, and to informal and quasi-formal communitybased dispute resolution mechanisms. Slashdot, by contrast, provides a strong model of a sophisticated technical system intended to assure that no one can "defect" from the cooperative enterprise of commenting and moderating comments. It limits behavior enabled by the system to avoid destructive behavior before it happens, rather than policing it after the fact. The Slash code does this by technically limiting the power any given person has to moderate anyone else up or down, and by making every moderator the subject of a peer review system whose judgments are enforced technically-- that is, when any given user is described by a sufficiently large number of other users as unfair, that user automatically loses the technical ability to moderate the comments of others. The system itself is a free software project, licensed under the GPL (General Public License)--which is itself the quintessential example of how law is used to prevent some types of defection from the common enterprise of peer production of software. The particular type of defection that the GPL protects against is appropriation of the joint product by any single individual or firm, the risk of which would make it less attractive for anyone to contribute to the project to begin with. The GPL assures that, as a legal matter, no one who contributes to a free software project need worry that some other contributor will take the project and make it exclusively their own. The ultimate quality judgments regarding what is incorporated into the "formal" releases of free software projects provide the clearest example of the extent to which a meritocratic hierarchy can be used to integrate diverse contributions into a finished single product. In the case of the Linux kernel development project (see chapter 3), it was always within the power of Linus Torvalds, who initiated the project, to decide which contributions should be included in a new release, and which should not. But it is a funny sort of hierarchy, whose quirkiness Steve Weber ,{[pg 105]}, well explicates.~{ Steve Weber, The Success of Open Source (Cambridge, MA: Harvard University Press, 2004). }~ Torvalds's authority is persuasive, not legal or technical, and certainly not determinative. He can do nothing except persuade others to prevent them from developing anything they want and add it to their kernel, or to distribute that alternative version of the kernel. There is nothing he can do to prevent the entire community of users, or some subsection of it, from rejecting his judgment about what ought to be included in the kernel. Anyone is legally free to do as they please. So these projects are based on a hierarchy of meritocratic respect, on social norms, and, to a great extent, on the mutual recognition by most players in this game that it is to everybody's advantage to have someone overlay a peer review system with some leadership. -={Wikipedia project;Torvalds, Linus;Weber, Steve;General Public License (GPL):See also fre software;GPL (General Public License):See Also free software;licensing:GPL (General Public License)} +={Wikipedia project;Torvalds, Linus;Weber, Steve;General Public License (GPL):See also free software;GPL (General Public License):See Also free software;licensing:GPL (General Public License)} In combination then, three characteristics make possible the emergence of information production that is not based on exclusive proprietary claims, not aimed toward sales in a market for either motivation or information, and not organized around property and contract claims to form firms or market exchanges. First, the physical machinery necessary to participate in information and cultural production is almost universally distributed in the population of the advanced economies. Certainly, personal computers as capital goods are under the control of numbers of individuals that are orders of magnitude larger than the number of parties controlling the use of massproduction-capable printing presses, broadcast transmitters, satellites, or cable systems, record manufacturing and distribution chains, and film studios and distribution systems. This means that the physical machinery can be put in service and deployed in response to any one of the diverse motivations individual human beings experience. They need not be deployed in order to maximize returns on the financial capital, because financial capital need not be mobilized to acquire and put in service any of the large capital goods typical of the industrial information economy. Second, the primary raw materials in the information economy, unlike the industrial economy, are public goods--existing information, knowledge, and culture. Their actual marginal social cost is zero. Unless regulatory policy makes them purposefully expensive in order to sustain the proprietary business models, acquiring raw materials also requires no financial capital outlay. Again, this means that these raw materials can be deployed for any human motivation. They need not maximize financial returns. Third, the technical architectures, organizational models, and social dynamics of information production and exchange on the Internet have developed so that they allow us to structure the solution to problems--in particular to information production problems--in ways ,{[pg 106]}, that are highly modular. This allows many diversely motivated people to act for a wide range of reasons that, in combination, cohere into new useful information, knowledge, and cultural goods. These architectures and organizational models allow both independent creation that coexists and coheres into usable patterns, and interdependent cooperative enterprises in the form of peer-production processes. ={computers;hardware;personal computers;physical machinery and computers} @@ -803,7 +805,7 @@ The other quite basic change wrought by the emergence of social production, from The overarching point is that social production is reshaping the market conditions under which businesses operate. To some of the incumbents of the industrial information economy, the pressure from social production is experienced as pure threat. It is the clash between these incumbents and the new practices that was most widely reported in the media in the first five years of the twenty-first century, and that has driven much of policy making, legislation, and litigation in this area. But the much more fundamental effect on the business environment is that social production is changing the relationship of firms to individuals outside of them, and through this changing the strategies that firms internally are exploring. It is creating new sources of inputs, and new tastes and opportunities for outputs. Consumers are changing into users--more active and productive than the consumers of the ,{[pg 127]}, industrial information economy. The change is reshaping the relationships necessary for business success, requiring closer integration of users into the process of production, both in inputs and outputs. It requires different leadership talents and foci. By the time of this writing, in 2005, these new opportunities and adaptations have begun to be seized upon as strategic advantages by some of the most successful companies working around the Internet and information technology, and increasingly now around information and cultural production more generally. Eric von Hippel's work has shown how the model of user innovation has been integrated into the business model of innovative firms even in sectors far removed from either the network or from information production--like designing kite-surfing equipment or mountain bikes. As businesses begin to do this, the platforms and tools for collaboration improve, the opportunities and salience of social production increases, and the political economy begins to shift. And as these firms and social processes coevolve, the dynamic accommodation they are developing provides us with an image of what the future stable interface between market-based businesses and the newly salient social production is likely to look like. ,{[pg 128]}, ,{[pg 129]}, ={von Hippel, Eric} -:C~ Part Two - The Political Economy of Property and Commons +:B~ Part Two - The Political Economy of Property and Commons 1~p2 Introduction ={commons+5;property ownership+5} @@ -1232,7 +1234,7 @@ Another dimension that is less well developed in the United States than it is in ={Gilmore, Dan;Pantic, Drazen;Rheingold, Howard;mobile phones;text messaging} 2~ NETWORKED INFORMATION ECONOMY MEETS THE PUBLIC SPHERE -={information economy:effects on public sphere+21;networked environment policy:effects on public sphere+21} +={information economy:effects on public sphere+21;networked information economy:effects on public sphere+21} The networked public sphere is not made of tools, but of social production practices that these tools enable. The primary effect of the Internet on the ,{[pg 220]}, public sphere in liberal societies relies on the information and cultural production activity of emerging nonmarket actors: individuals working alone and cooperatively with others, more formal associations like NGOs, and their feedback effect on the mainstream media itself. These enable the networked public sphere to moderate the two major concerns with commercial mass media as a platform for the public sphere: (1) the excessive power it gives its owners, and (2) its tendency, when owners do not dedicate their media to exert power, to foster an inert polity. More fundamentally, the social practices of information and discourse allow a very large number of actors to see themselves as potential contributors to public discourse and as potential actors in political arenas, rather than mostly passive recipients of mediated information who occasionally can vote their preferences. In this section, I offer two detailed stories that highlight different aspects of the effects of the networked information economy on the construction of the public sphere. The first story focuses on how the networked public sphere allows individuals to monitor and disrupt the use of mass-media power, as well as organize for political action. The second emphasizes in particular how the networked public sphere allows individuals and groups of intense political engagement to report, comment, and generally play the role traditionally assigned to the press in observing, analyzing, and creating political salience for matters of public interest. The case studies provide a context both for seeing how the networked public sphere responds to the core failings of the commercial, mass-media-dominated public sphere and for considering the critiques of the Internet as a platform for a liberal public sphere. @@ -1573,6 +1575,8 @@ If culture is indeed part of how we form a shared sense of unexamined common kno If you run a search for "Barbie" on three separate search engines--Google, Overture, and Yahoo!--you will get quite different results. Table 8.1 lists these results in the order in which they appear on each search engine. Overture is a search engine that sells placement to the parties who are being searched. Hits on this search engine are therefore ranked based on whoever paid Overture the most in order to be placed highly in response to a query. On this list, none of the top ten results represent anything other than sales-related Barbie sites. Critical sites begin to appear only around the twentyfifth result, presumably after all paying clients have been served. Google, as we already know, uses a radically decentralized mechanism for assigning relevance. It counts how many sites on the Web have linked to a particular site that has the search term in it, and ranks the search results by placing a site with a high number of incoming links above a site with a low number of incoming links. In effect, each Web site publisher "votes" for a site's ,{[pg 286]}, ,{[pg 287]}, relevance by linking to it, and Google aggregates these votes and renders them on their results page as higher ranking. The little girl who searches for Barbie on Google will encounter a culturally contested figure. The same girl, searching on Overture, will encounter a commodity toy. In each case, the underlying efforts of Mattel, the producer of Barbie, have not changed. What is different is that in an environment where relevance is measured in nonmarket action--placing a link to a Web site because you deem it relevant to whatever you are doing with your Web site--as opposed to in dollars, Barbie has become a more transparent cultural object. It is easier for the little girl to see that the doll is not only a toy, not only a symbol of beauty and glamour, but also a symbol of how norms of female beauty in our society can be oppressive to women and girls. The transparency does not force the girl to choose one meaning of Barbie or another. It does, however, render transparent that Barbie can have multiple meanings and that choosing meanings is a matter of political concern for some set of people who coinhabit this culture. Yahoo! occupies something of a middle ground--its algorithm does link to two of the critical sites among the top ten, and within the top twenty, identifies most of the sites that appear on Google's top ten that are not related to sales or promotion. ={Barbie (doll), culture of+4} +<:pb> + % table moved after paragraph !_ Table 8.1: Results for "Barbie" - Google versus Overture and Yahoo! @@ -1629,7 +1633,7 @@ Only two encyclopedias focus explicitly on Barbie's cultural meaning: Britannica The relative emphasis of Google and /{Wikipedia}/, on the one hand, and Overture, Yahoo!, and the commercial encyclopedias other than Britannica, on the other hand, is emblematic of a basic difference between markets and social conversations with regard to culture. If we focus on the role of culture as "common knowledge" or background knowledge, its relationship to the market--at least for theoretical economists--is exogenous. It can be taken as given and treated as "taste." In more practical business environments, culture is indeed a source of taste and demand, but it is not taken as exogenous. Culture, symbolism, and meaning, as they are tied with marketbased goods, become a major focus of advertising and of demand management. No one who has been exposed to the advertising campaigns of Coca-Cola, Nike, or Apple Computers, as well as practically to any one of a broad range of advertising campaigns over the past few decades, can fail to see that these are not primarily a communication about the material characteristics or qualities of the products or services sold by the advertisers. ,{[pg 290]}, They are about meaning. These campaigns try to invest the act of buying their products or services with a cultural meaning that they cultivate, manipulate, and try to generalize in the practices of the society in which they are advertising, precisely in order to shape taste. They offer an opportunity to generate rents, because the consumer has to have this company's shoe rather than that one, because that particular shoe makes the customer this kind of person rather than that kind--cool rather than stuffy, sophisticated rather than common. Neither the theoretical economists nor the marketing executives have any interest in rendering culture transparent or writable. Whether one treats culture as exogenous or as a domain for limiting the elasticity of demand for one's particular product, there is no impetus to make it easier for consumers to see through the cultural symbols, debate their significance, or make them their own. If there is business reason to do anything about culture, it is to try to shape the cultural meaning of an object or practice, in order to shape the demand for it, while keeping the role of culture hidden and assuring control over the careful cultural choreography of the symbols attached to the company. Indeed, in 1995, the U.S. Congress enacted a new kind of trademark law, the Federal Antidilution Act, which for the first time disconnects trademark protection from protecting consumers from confusion by knockoffs. The Antidilution Act of 1995 gives the owner of any famous mark--and only famous marks--protection from any use that dilutes the meaning that the brand owner has attached to its own mark. It can be entirely clear to consumers that a particular use does not come from the owner of the brand, and still, the owner has a right to prevent this use. While there is some constitutional free-speech protection for criticism, there is also a basic change in the understanding of trademark law-- from a consumer protection law intended to assure that consumers can rely on the consistency of goods marked in a certain way, to a property right in controlling the meaning of symbols a company has successfully cultivated so that they are, in fact, famous. This legal change marks a major shift in the understanding of the role of law in assigning control for cultural meaning generated by market actors. -={Antidilutation Act of 1995;branding:trademark dilutation;dilutation of trademaks;logical layer of institutional ecology:trademark dilutation;proprietary rights:trademark dilutation;trademark dilutation;information production, market-based:cultural change, transparency of+4;market-based information producers: cultural change, transparency of+4;nonmarket information producers:cultural change, transparency of+4} +={Antidilutation Act of 1995;branding:trademark dilutation;dilutation of trademarks;logical layer of institutional ecology:trademark dilutation;proprietary rights:trademark dilutation;trademark dilutation;information production, market-based:cultural change, transparency of+4;market-based information producers: cultural change, transparency of+4;nonmarket information producers:cultural change, transparency of+4} Unlike market production of culture, meaning making as a social, nonmarket practice has no similar systematic reason to accept meaning as it comes. Certainly, some social relations do. When girls play with dolls, collect them, or exhibit them, they are rarely engaged in reflection on the meaning of the dolls, just as fans of Scarlett O'Hara, of which a brief Internet search suggests there are many, are not usually engaged in critique of Gone with the ,{[pg 291]}, Wind as much as in replication and adoption of its romantic themes. Plainly, however, some conversations we have with each other are about who we are, how we came to be who we are, and whether we view the answers we find to these questions as attractive or not. In other words, some social interactions do have room for examining culture as well as inhabiting it, for considering background knowledge for what it is, rather than taking it as a given input into the shape of demand or using it as a medium for managing meaning and demand. People often engage in conversations with each other precisely to understand themselves in the world, their relationship to others, and what makes them like and unlike those others. One major domain in which this formation of self- and group identity occurs is the adoption or rejection of, and inquiry into, cultural symbols and sources of meaning that will make a group cohere or splinter; that will make people like or unlike each other. @@ -1681,10 +1685,10 @@ We can analyze the implications of the emergence of the networked information ec The opportunities that the network information economy offers, however, often run counter to the central policy drive of both the United States and the European Union in the international trade and intellectual property systems. These two major powers have systematically pushed for ever-stronger proprietary protection and increasing reliance on strong patents, copyrights, and similar exclusive rights as the core information policy for growth and development. Chapter 2 explains why such a policy is suspect from a purely economic perspective concerned with optimizing innovation. ,{[pg 303]}, A system that relies too heavily on proprietary approaches to information production is not, however, merely inefficient. It is unjust. Proprietary rights are designed to elicit signals of people's willingness and ability to pay. In the presence of extreme distribution differences like those that characterize the global economy, the market is a poor measure of comparative welfare. A system that signals what innovations are most desirable and rations access to these innovations based on ability, as well as willingness, to pay, overrepresents welfare gains of the wealthy and underrepresents welfare gains of the poor. Twenty thousand American teenagers can simply afford, and will be willing to pay, much more for acne medication than the more than a million Africans who die of malaria every year can afford to pay for a vaccine. A system that relies too heavily on proprietary models for managing information production and exchange is unjust because it is geared toward serving small welfare increases for people who can pay a lot for incremental improvements in welfare, and against providing large welfare increases for people who cannot pay for what they need. 2~ LIBERAL THEORIES OF JUSTICE AND THE NETWORKED INFORMATION ECONOMY -={human development and justice:liberal theories of+7;human welfare:liberal theories of justice+7;information economy:justice, liberal theories of+7;justice and human development:liberal theories of+7;liberal societies:theories of justice+7;networked environment policy:justice, liberal theories of+7;welfare:liberal theories of justice+7|see also justice and human development} +={human development and justice:liberal theories of+7;human welfare:liberal theories of justice+7;information economy:justice, liberal theories of+7;justice and human development:liberal theories of+7;liberal societies:theories of justice+7;welfare:liberal theories of justice+7|see also justice and human development} Liberal theories of justice can be categorized according to how they characterize the sources of inequality in terms of luck, responsibility, and structure. By luck, I mean reasons for the poverty of an individual that are beyond his or her control, and that are part of that individual's lot in life unaffected by his or her choices or actions. By responsibility, I mean causes for the poverty of an individual that can be traced back to his or her actions or choices. By structure, I mean causes for the inequality of an individual that are beyond his or her control, but are traceable to institutions, economic organizations, or social relations that form a society's transactional framework and constrain the behavior of the individual or undermine the efficacy of his or her efforts at self-help. -={background knowledge:see culture bad luck, justice and+2;DSL:see broadband networks dumb luck, justice and+2;luck, justice and+2;misfortune, justice and+2;organizational structure:justice and+2;structure of organizations:justice and+2} +={background knowledge:see culture bad luck, justice and+2;DSL:see broadband networks dumb luck, justice and+2;luck, justice and+2;misfortune, justice and+2;organization structure:justice and+2;structure of organizations:justice and+2} We can think of John Rawls's /{Theory of Justice}/ as based on a notion that the poorest people are the poorest because of dumb luck. His proposal for a systematic way of defending and limiting redistribution is the "difference principle." A society should organize its redistribution efforts in order to make those who are least well-off as well-off as they can be. The theory of desert is that, because any of us could in principle be the victim of this dumb luck, we would all have agreed, if none of us had known where we ,{[pg 304]}, would be on the distribution of bad luck, to minimize our exposure to really horrendous conditions. The practical implication is that while we might be bound to sacrifice some productivity to achieve redistribution, we cannot sacrifice too much. If we did that, we would most likely be hurting, rather than helping, the weakest and poorest. Libertarian theories of justice, most prominently represented by Robert Nozick's entitlement theory, on the other hand, tend to ignore bad luck or impoverishing structure. They focus solely on whether the particular holdings of a particular person at any given moment are unjustly obtained. If they are not, they may not justly be taken from the person who holds them. Explicitly, these theories ignore the poor. As a practical matter and by implication, they treat responsibility as the source of the success of the wealthy, and by negation, the plight of the poorest--leading them to be highly resistant to claims of redistribution. ={Rawls, John+1;Nozick, Robert;redistribution theory+1} @@ -1953,17 +1957,31 @@ group{ Notes: -a. Large ambiguity results because technology transfer office reports increased revenues for yearend 2003 as $178M without reporting expenses; University Annual Report reports licensing revenue with all "revenue from other educational and research activities," and reports a 10 percent decline in this category, "reflecting an anticipated decline in royalty and license income" from the $133M for the previous year-end, 2002. The table reflects an assumed net contribution to university revenues between $100-120M (the entire decline in the category due to royalty/royalties decreased proportionately with the category). - -b. University of California Annual Report of the Office of Technology Transfer is more transparent than most in providing expenses--both net legal expenses and tech transfer direct operating expenses, which allows a clear separation of net revenues from technology transfer activities. +a. Large ambiguity results because technology transfer office reports increased +revenues for yearend 2003 as $178M without reporting expenses; University +Annual Report reports licensing revenue with all "revenue from other +educational and research activities," and reports a 10 percent decline in this +category, "reflecting an anticipated decline in royalty and license income" +from the $133M for the previous year-end, 2002. The table reflects an assumed +net contribution to university revenues between $100-120M (the entire decline +in the category due to royalty/royalties decreased proportionately with the +category). + +b. University of California Annual Report of the Office of Technology Transfer +is more transparent than most in providing expenses--both net legal expenses +and tech transfer direct operating expenses, which allows a clear separation of +net revenues from technology transfer activities. c. Minus direct expenses, not including expenses for unlicensed inventions. d. Federal- and nonfederal-sponsored research. -e. Almost half of this amount is in income from a single Initial Public Offering, and therefore does not represent a recurring source of licensing revenue. +e. Almost half of this amount is in income from a single Initial Public +Offering, and therefore does not represent a recurring source of licensing +revenue. -f. Technology transfer gross revenue minus the one-time event of an initial public offering of LiquidMetal Technologies. +f. Technology transfer gross revenue minus the one-time event of an initial +public offering of LiquidMetal Technologies. }group @@ -2039,7 +2057,7 @@ Increased practical individual autonomy has been central to my claims throughout ={communities:virtual+9;virtual communities+9:see also social relations and norms} We are seeing two effects: first, and most robustly, we see a thickening of preexisting relations with friends, family, and neighbors, particularly with those who were not easily reachable in the pre-Internet-mediated environment. Parents, for example, use instant messages to communicate with their children who are in college. Friends who have moved away from each other are keeping in touch more than they did before they had e-mail, because email does not require them to coordinate a time to talk or to pay longdistance rates. However, this thickening of contacts seems to occur alongside a loosening of the hierarchical aspects of these relationships, as individuals weave their own web of supporting peer relations into the fabric of what might otherwise be stifling familial relationships. Second, we are beginning to see the emergence of greater scope for limited-purpose, loose relationships. These may not fit the ideal model of "virtual communities." They certainly do not fit a deep conception of "community" as a person's primary source of emotional context and support. They are nonetheless effective and meaningful to their participants. It appears that, as the digitally networked environment begins to displace mass media and telephones, its salient communications characteristics provide new dimensions to thicken existing social relations, while also providing new capabilities for looser and more fluid, but still meaningful social networks. A central aspect of this positive improvement in loose ties has been the technical-organizational shift from an information environment dominated by commercial mass media on a oneto-many model, which does not foster group interaction among viewers, to an information environment that both technically and as a matter of social practice enables user-centric, group-based active cooperation platforms of the kind that typify the networked information economy. This is not to say that the Internet necessarily effects all people, all social groups, and networks identically. The effects on different people in different settings and networks will likely vary, certainly in their magnitude. My purpose here, however, is ,{[pg 358]}, to respond to the concern that enhanced individual capabilities entail social fragmentation and alienation. The available data do not support that claim as a description of a broad social effect. -={communication:thickening of preexisting relations;displacement of real-world interactions;family relations, strengthening of;loose affiliations;neighborhood relations, strengthening of;networked public sphere:loose affiliations;norms (social):loose affiliations|thickening of preexisting relations;peer production:loose affiliations;preexisting relations, thickening of;public sphere:loose affiliations;regulation by social norms:loose affiliations|thickening of preexisting relations;scope of loose relationships;social relations and norms:loose affiliations|thickening of preexisting relations;supplantation of real-world interaction;thickening of preexisting relations} +={communication:thickening of preexisting relations;displacement of real-world interaction;family relations, strengthening of;loose affiliations;neighborhood relations, strengthening of;networked public sphere:loose affiliations;norms (social):loose affiliations|thickening of preexisting relations;peer production:loose affiliations;preexisting relations, thickening of;public sphere:loose affiliations;regulation by social norms:loose affiliations|thickening of preexisting relations;scope of loose relationships;social relations and norms:loose affiliations|thickening of preexisting relations;supplantation of real-world interaction;thickening of preexisting relations} 2~ FROM "VIRTUAL COMMUNITIES" TO FEAR OF DISINTEGRATION @@ -2068,7 +2086,7 @@ The concerns represented by these early studies of the effects of Internet use o ={Coleman, James;Granovetter, Mark;Putnum, Robert} There are, roughly speaking, two types of responses to these concerns. The first is empirical. In order for these concerns to be valid as applied to increasing use of Internet communications, it must be the case that Internet communications, with all of their inadequacies, come to supplant real-world human interactions, rather than simply to supplement them. Unless Internet connections actually displace direct, unmediated, human contact, there is no basis to think that using the Internet will lead to a decline in those nourishing connections we need psychologically, or in the useful connections we make socially, that are based on direct human contact with friends, family, and neighbors. The second response is theoretical. It challenges the notion that the socially embedded individual is a fixed entity with unchanging needs that are, or are not, fulfilled by changing social conditions and relations. Instead, it suggests that the "nature" of individuals changes over time, based on actual social practices and expectations. In this case, we are seeing a shift from individuals who depend on social relations that are dominated by locally embedded, thick, unmediated, given, and stable relations, into networked individuals--who are more dependent on their own combination of strong and weak ties, who switch networks, cross boundaries, and weave their own web of more or less instrumental, relatively fluid relationships. Manuel Castells calls this the "networked society,"~{ Manuel Castells, The Rise of Networked Society 2d ed. (Malden, MA: Blackwell Publishers, Inc., 2000). }~ Barry Wellman, "networked individualism."~{ Barry Wellman et al., "The Social Affordances of the Internet for Networked Individualism," Journal of Computer Mediated Communication 8, no. 3 (April 2003). }~ To simplify vastly, it is not that people cease to depend on others and their context for both psychological and social wellbeing and efficacy. It is that the kinds of connections that we come to rely on for these basic human needs change over time. Comparisons of current practices to the old ways of achieving the desiderata of community, and fears regarding the loss of community, are more a form of nostalgia than a diagnosis of present social malaise. ,{[pg 363]}, -={Castells, Manuel;Wellman, Barry;displacement of real-world interaction+5;family relations, strengthening of+5;loose affiliations;neighborhood relations, strengthening of+5;networked public sphere:loose affiliations;norms (social):loose affiliations;peer production:loose affiliations;public sphere:loose affiliations;regulations by social norms:loose affiliations;social relations and norms:loose affiliations;supplantation of real-world interaction+5;thickening of preexisting relations+5} +={Castells, Manuel;Wellman, Barry;displacement of real-world interaction+5;family relations, strengthening of+5;loose affiliations;neighborhood relations, strengthening of+5;networked public sphere:loose affiliations;norms (social):loose affiliations;peer production:loose affiliations;public sphere:loose affiliations;regulation by social norms:loose affiliations;social relations and norms:loose affiliations;supplantation of real-world interaction+5;thickening of preexisting relations+5} 3~ Users Increase Their Connections with Preexisting Relations ={e-mail:thickening of preexisting relations+4;social capital:thickening of preexisting relations+4} @@ -2139,7 +2157,7 @@ Empirically, it seems that the Internet is allowing us to eat our cake and have The conceptual answer has been that the image of "community" that seeks a facsimile of a distant pastoral village is simply the wrong image of how we interact as social beings. We are a networked society now--networked individuals connected with each other in a mesh of loosely knit, overlapping, flat connections. This does not leave us in a state of anomie. We are welladjusted, networked individuals; well-adjusted socially in ways that those who seek community would value, but in new and different ways. In a substantial departure from the range of feasible communications channels available in the twentieth century, the Internet has begun to offer us new ways of connecting to each other in groups small and large. As we have come to take advantage of these new capabilities, we see social norms and software coevolving to offer new, more stable, and richer contexts for forging new relationships beyond those that in the past have been the focus of our social lives. These do not displace the older relations. They do not mark a fundamental shift in human nature into selfless, community-conscious characters. We continue to be complex beings, radically individual and self-interested ,{[pg 377]}, at the same time that we are entwined with others who form the context out of which we take meaning, and in which we live our lives. However, we now have new scope for interaction with others. We have new opportunities for building sustained limited-purpose relations, weak and intermediate-strength ties that have significant roles in providing us with context, with a source of defining part of our identity, with potential sources for support, and with human companionship. That does not mean that these new relationships will come to displace the centrality of our more immediate relationships. They will, however, offer increasingly attractive supplements as we seek new and diverse ways to embed ourselves in relation to others, to gain efficacy in weaker ties, and to interpolate different social networks in combinations that provide us both stability of context and a greater degree of freedom from the hierarchical and constraining aspects of some of our social relations. ,{[pg 378]}, ,{[pg 379]}, -:C~ Part Three - Policies of Freedom at a Moment of Transformation +:B~ Part Three - Policies of Freedom at a Moment of Transformation 1~p3 Introduction @@ -2185,7 +2203,7 @@ The first two parts of this book explained why the introduction of digital compu ={commercial model of communication:mapping, framework for+13;industrial model of communication:mapping, framework for+13;institutional ecology of digital environment:mapping, framework for+13;layers of institutional ecology+13;policy:mapping institutional ecology+13;policy layers+13;traditional model of communication:mapping, framework for+13} Two specific examples will illustrate the various levels at which law can operate to shape the use of information and its production and exchange. The first example builds on the story from chapter 7 of how embarrassing internal e-mails from Diebold, the electronic voting machine maker, were exposed by investigative journalism conducted on a nonmarket and peerproduction model. After students at Swarthmore College posted the files, Diebold made a demand under the DMCA that the college remove the materials or face suit for contributory copyright infringement. The students were therefore forced to remove the materials. However, in order keep the materials available, the students asked students at other institutions to mirror the files, and injected them into the eDonkey, BitTorrent, and FreeNet filesharing and publication networks. Ultimately, a court held that the unauthorized publication of files that were not intended for sale and carried such high public value was a fair use. This meant that the underlying publication of the files was not itself a violation, and therefore the Internet service provider was not liable for providing a conduit. However, the case was decided on September 30, 2004--long after the information would have been relevant ,{[pg 390]}, to the voting equipment certification process in California. What kept the information available for public review was not the ultimate vindication of the students' publication. It was the fact that the materials were kept in the public sphere even under threat of litigation. Recall also that at least some of the earlier set of Diebold files that were uncovered by the activist who had started the whole process in early 2003 were zipped, or perhaps encrypted in some form. Scoop, the Web site that published the revelation of the initial files, published--along with its challenge to the Internet community to scour the files and find holes in the system--links to locations in which utilities necessary for reading the files could be found. -={Diebold Elections Systems+3;electronic voting machines (case study)+3;networked public sphere:Diebold Election Systems case study+3;policy:Diebold Election Systems case study+3;public sphere:Diebold Election Systems case study+3;voting, electronic+3} +={Diebold Election Systems+3;electronic voting machines (case study)+3;networked public sphere:Diebold Election Systems case study+3;policy:Diebold Election Systems case study+3;public sphere:Diebold Election Systems case study+3;voting, electronic+3} There are four primary potential points of failure in this story that could have conspired to prevent the revelation of the Diebold files, or at least to suppress the peer-produced journalistic mode that made them available. First, if the service provider--the college, in this case--had been a sole provider with no alternative physical transmission systems, its decision to block the materials under threat of suit would have prevented publication of the materials throughout the relevant period. Second, the existence of peer-to-peer networks that overlay the physical networks and were used to distribute the materials made expunging them from the Internet practically impossible. There was no single point of storage that could be locked down. This made the prospect of threatening other universities futile. Third, those of the original files that were not in plain text were readable with software utilities that were freely available on the Internet, and to which Scoop pointed its readers. This made the files readable to many more critical eyes than they otherwise would have been. Fourth, and finally, the fact that access to the raw materials--the e-mails--was ultimately found to be privileged under the fair-use doctrine in copyright law allowed all the acts that had been performed in the preceding period under a shadow of legal liability to proceed in the light of legality. @@ -2205,7 +2223,7 @@ The remainder of this chapter provides a more or less detailed presentation of t A quick look at table 11.1 reveals that there is a diverse set of sources of openness. A few of these are legal. Mostly, they are based on technological and social practices, including resistance to legal and regulatory drives toward enclosure. Examples of policy interventions that support an open core common infrastructure are the FCC's increased permission to deploy open wireless networks and the various municipal broadband initiatives. The former is a regulatory intervention, but its form is largely removal of past prohibitions on an entire engineering approach to building wireless systems. Municipal efforts to produce open broadband networks are being resisted at the state legislation level, with statutes that remove the power to provision broadband from the home rule powers of municipalities. For the most part, the drive for openness is based on individual and voluntary cooperative action, not law. The social practices of openness take on a quasi-normative face when practiced in standard-setting bodies like the Internet Engineering Task Force (IETF) or the World Wide Web Consortium (W3C). However, none of these have the force of law. Legal devices also support openness when used in voluntaristic models like free software licensing and Creative Commons?type licensing. However, most often when law has intervened in its regulatory force, as opposed to its contractual-enablement force, it has done so almost entirely on the side of proprietary enclosure. Another characteristic of the social-economic-institutional struggle is an alliance between a large number of commercial actors and the social sharing culture. We see this in the way that wireless equipment manufacturers are selling into a market of users of WiFi and similar unlicensed wireless devices. We see this in the way that personal computer manufacturers are competing ,{[pg 395]}, over decreasing margins by producing the most general-purpose machines that would be most flexible for their users, rather than machines that would most effectively implement the interests of Hollywood and the recording industry. We see this in the way that service and equipment-based firms, like IBM and Hewlett-Packard (HP), support open-source and free software. The alliance between the diffuse users and the companies that are adapting their business models to serve them as users, instead of as passive consumers, affects the political economy of this institutional battle in favor of openness. On the other hand, security consciousness in the United States has led to some efforts to tip the balance in favor of closed proprietary systems, apparently because these are currently perceived as more secure, or at least more amenable to government control. While orthogonal in its political origins to the battle between proprietary and commons-based strategies for information production, this drive does tilt the field in favor of enclosure, at least at the time of this writing in 2005. -={commercial model of communication:security related policy;industrial model of communication:security-related policy;institutional ecology of digital environment:security-related policy;policy:security-related;security-related policy;traditional model of communication:security-related policy} +={commercial model of communication:security-related policy;industrial model of communication:security-related policy;institutional ecology of digital environment:security-related policy;policy:security-related;security-related policy;traditional model of communication:security-related policy} % paragraph end moved above table diff --git a/data/v1/samples/two_bits.christopher_kelty.sst b/data/v1/samples/two_bits.christopher_kelty.sst index 39e34b6..1c833c8 100644 --- a/data/v1/samples/two_bits.christopher_kelty.sst +++ b/data/v1/samples/two_bits.christopher_kelty.sst @@ -108,7 +108,7 @@ At first glance, the thread tying these projects together seems to be the Intern ={Internet+12:relation to Free Software;Free Software:relation to Internet;public sphere:theories of} Both the Internet and Free Software are historically specific, that is, not just any old new media or information technology. But the Internet is many, many specific things to many, many specific people. As one reviewer of an early manuscript version of this book noted, "For most people, the Internet is porn, stock quotes, Al Jazeera clips of executions, Skype, seeing pictures of the grandkids, porn, never having to buy another encyclopedia, MySpace, e-mail, online housing listings, Amazon, Googling potential romantic interests, etc. etc." It is impossible to explain all of these things; the meaning and significance of the proliferation of digital pornography is a very different concern than that of the fall of the print encyclopedia ,{[pg 5]}, and the rise of Wikipedia. Yet certain underlying practices relate these diverse phenomena to one another and help explain why they have occurred at this time and in this technical, legal, and social context. By looking carefully at Free Software and its modulations, I suggest, one can come to a better understanding of the changes affecting pornography, Wikipedia, stock quotes, and many other wonderful and terrifying things.~{ Wikipedia is perhaps the most widely known and generally familiar example of what this book is about. Even though it is not identified as such, it is in fact a Free Software project and a "modulation" of Free Software as I describe it here. The non-technically inclined reader might keep Wikipedia in mind as an example with which to follow the argument of this book. I will return to it explicitly in part 3. However, for better or for worse, there will be no discussion of pornography. }~ -={Wikipedia} +={Wikipedia (collaborative encyclopedia)} Two Bits has three parts. Part I of this book introduces the reader to the concept of recursive publics by exploring the lives, works, and discussions of an international community of geeks brought together by their shared interest in the Internet. Chapter 1 asks, in an ethnographic voice, "Why do geeks associate with one another?" The answer—told via the story of Napster in 2000 and the standards process at the heart of the Internet—is that they are making a recursive public. Chapter 2 explores the words and attitudes of geeks more closely, focusing on the strange stories they tell (about the Protestant Reformation, about their practical everyday polymathy, about progress and enlightenment), stories that make sense of contemporary political economy in sometimes surprising ways. Central to part I is an explication of the ways in which geeks argue about technology but also argue with and through it, by building, modifying, and maintaining the very software, networks, and legal tools within which and by which they associate with one another. It is meant to give the reader a kind of visceral sense of why certain arrangements of technology, organization, and law—specifically that of the Internet and Free Software—are so vitally important to these geeks. ={geeks;Napster;technology:as argument} @@ -223,7 +223,7 @@ The study of distributed phenomena does not necessarily imply the detailed, loca ={Weber, Max} It is in this sense that the ethnographic object of this study is not geeks and not any particular project or place or set of people, but Free Software and the Internet. Even more precisely, the ethnographic object of this study is "recursive publics"—except that this concept is also the work of the ethnography, not its preliminary object. I could not have identified "recursive publics" as the object of the ethnography at the outset, and this is nice proof that ethnographic work is a particular kind of epistemological encounter, an encounter that requires considerable conceptual work during and after the material labor of fieldwork, and throughout the material labor of writing and rewriting, in order to make sense of and reorient it into a question that will have looked deliberate and ,{[pg 21]}, answerable in hindsight. Ethnography of this sort requires a long-term commitment and an ability to see past the obvious surface of rapid transformation to a more obscure and slower temporality of cultural significance, yet still pose questions and refine debates about the near future.~{ Despite what might sound like a "shoot first, ask questions later" approach, the design of this project was in fact conducted according to specific methodologies. The most salient is actor-network theory: Latour, Science in Action; Law, "Technology and Heterogeneous Engineering"; Callon, "Some Elements of a Sociology of Translation"; Latour, Pandora’s Hope; Latour, Re-assembling the Social; Callon, Laws of the Markets; Law and Hassard, Actor Network Theory and After. Ironically, there have been no actor-network studies of networks, which is to say, of particular information and communication technologies such as the Internet. The confusion of the word network (as an analytical and methodological term) with that of network (as a particular configuration of wires, waves, software, and chips, or of people, roads, and buses, or of databases, names, and diseases) means that it is necessary to always distinguish this-network-here from any-network-whatsoever. My approach shares much with the ontological questions raised in works such as Law, Aircraft Stories; Mol, The Body Multiple; Cussins, "Ontological Choreography"; Charis Thompson, Making Parents; and Dumit, Picturing Personhood. }~ Historically speaking, the chapters of part II can be understood as a contribution to a history of scientific infrastructure—or perhaps to an understanding of large-scale, collective experimentation.~{ I understand a concern with scientific infrastructure to begin with Steve Shapin and Simon Schaffer in Leviathan and the Air Pump, but the genealogy is no doubt more complex. It includes Shapin, The Social History of Truth; Biagioli, Galileo, Courtier; Galison, How Experiments End and Image and Logic; Daston, Biographies of Scientific Objects; Johns, The Nature of the Book. A whole range of works explore the issue of scientific tools and infrastructure: Kohler, Lords of the Fly; Rheinberger, Towards a History of Epistemic Things; Landecker, Culturing Life; Keating and Cambrosio, Biomedical Platforms. Bruno Latour’s "What Rules of Method for the New Socio-scientific Experiments" provides one example of where science studies might go with these questions. Important texts on the subject of technical infrastructures include Walsh and Bayma, "Computer Networks and Scientific Work"; Bowker and Star, Sorting Things Out; Edwards, The ,{[pg 316]}, Closed World; Misa, Brey, and Feenberg, Modernity and Technology; Star and Ruhleder, "Steps Towards an Ecology of Infrastructure." }~ The Internet and Free Software are each an important practical transformation that will have effects on the practice of science and a kind of complex technical practice for which there are few existing models of study. -={actor network theory;Internet+1} +={Actor Network Theory;Internet+1} A methodological note about the peculiarity of my subject is also in order. The Attentive Reader will note that there are very few fragments of conventional ethnographic material (i.e., interviews or notes) transcribed herein. Where they do appear, they tend to be "publicly available"—which is to say, accessible via the Internet—and are cited as such, with as much detail as necessary to allow the reader to recover them. Conventional wisdom in both anthropology and history has it that what makes a study interesting, in part, is the work a researcher has put into gathering that which is not already available, that is, primary sources as opposed to secondary sources. In some cases I provide that primary access (specifically in chapters 2, 8, and 9), but in many others it is now literally impossible: nearly everything is archived. Discussions, fights, collaborations, talks, papers, software, articles, news stories, history, old software, old software manuals, reminiscences, notes, and drawings—it is all saved by someone, somewhere, and, more important, often made instantly available by those who collect it. The range of conversations and interactions that count as private (either in the sense of disappearing from written memory or of being accessible only to the parties involved) has shrunk demonstrably since about 1981. ={ethnographic data:availability of+5} @@ -307,7 +307,7 @@ _1 2. Boyle, "The Second Enclosure Movement and the Construction of the Public D 2~ From the Facts of Human Activity Boston, May 2003. Starbucks. Sean and Adrian are on their way to pick me up for dinner. I’ve already had too much coffee, so I sit at the window reading the paper. Eventually Adrian calls to find out where I am, I tell him, and he promises to show up in fifteen minutes. I get bored and go outside to wait, watch the traffic go by. More or less right on time (only post-dotcom is Adrian ever on time), Sean’s new blue VW Beetle rolls into view. Adrian jumps out of the passenger seat and into the back, and I get in. Sean has been driving for a little over a year. He seems confident, cautious, but meanders through the streets of Cambridge. We are destined for Winchester, a township on the Charles River, in order to go to an Indian restaurant that one of Sean’s friends has recommended. When I ask how they are doing, they say, "Good, good." Adrian offers, "Well, Sean’s better than he has been in two years." "Really?" I say, impressed. -={Doyle, Sean+6;Groper Adrian+6} +={Doyle, Sean+6;Gropper, Adrian+6} Sean says, "Well, happier than at least the last year. I, well, let me put it this way: forgive me father for I have sinned, I still have unclean thoughts about some of the upper management in the company, I occasionally think they are not doing things in the best interest of the company, and I see them as self-serving and sometimes wish them ill." In this rolling blue confessional Sean describes some of the people who I am familiar with whom he now tries very hard not to think about. I look at him and say, "Ten Hail Marys and ten Our Fathers, and you will be absolved, my child." Turning to Adrian, I ask, "And what about you?" Adrian continues the joke: "I, too, have sinned. I have reached the point where I can see absolutely nothing good coming of this company but that I can keep my investments in it long enough to pay for my children’s college tuition." I say, "You, my son, I cannot help." Sean says, "Well, funny thing about tainted money . . . there just taint enough of it." @@ -1120,7 +1120,7 @@ The absence of an economic or corporate mandate for Thompson’s and Ritchie’s ={AT&T+14;McIlroy, Douglas} UNIX was unique for many technical reasons, but also for a specific economic reason: it was never quite academic and never quite commercial. Martin Campbell-Kelly notes that UNIX was a "non-proprietary operating system of major significance."~{ Campbell-Kelly, From Airline Reservations to Sonic the Hedgehog, 143. }~ Kelly’s use of "non-proprietary" is not surprising, but it is incorrect. Although business-speak regularly opposed open to proprietary throughout the 1980s and early 1990s (and UNIX was definitely the former), Kelly’s slip marks clearly the confusion between software ownership and software distribution that permeates both popular and academic understandings. UNIX was indeed proprietary—it was copyrighted and wholly owned by Bell Labs and in turn by Western Electric ,{[pg 127]}, and AT&T—but it was not exactly commercialized or marketed by them. Instead, AT&T allowed individuals and corporations to install UNIX and to create UNIX-like derivatives for very low licensing fees. Until about 1982, UNIX was licensed to academics very widely for a very small sum: usually royalty-free with a minimal service charge (from about $150 to $800).~{ Ritchie’s Web site contains a copy of a 1974 license (http://cm.bell-labs.com/cm/cs/who/dmr/licenses.html) and a series of ads that exemplify the uneasy positioning of UNIX as a commercial product (http://cm.bell-labs.com/cm/cs/who/dmr/unixad.html). According to Don Libes and Sandy Ressler, "The original licenses were source licenses. . . . [C]ommercial institutions paid fees on the order of $20,000. If you owned more than one machine, you had to buy binary licenses for every additional machine [i.e., you were not allowed to copy the source and install it] you wanted to install UNIX on. They were fairly pricey at $8000, considering you couldn’t resell them. On the other hand, educational institutions could buy source licenses for several hundred dollars—just enough to cover Bell Labs’ administrative overhead and the cost of the tapes" (Life with UNIX, 20-21). }~ The conditions of this license allowed researchers to do what they liked with the software so long as they kept it secret: they could not distribute or use it outside of their university labs (or use it to create any commercial product or process), nor publish any part of it. As a result, throughout the 1970s UNIX was developed both by Thompson and Ritchie inside Bell Labs and by users around the world in a relatively informal manner. Bell Labs followed such a liberal policy both because it was one of a small handful of industry-academic research and development centers and because AT&T was a government monopoly that provided phone service to the country and was therefore forbidden to directly enter the computer software market.~{ According to Salus, this licensing practice was also a direct result of Judge Thomas Meaney’s 1956 antitrust consent decree which required AT&T to reveal and to license its patents for nominal fees (A Quarter Century of UNIX, 56); see also Brock, The Second Information Revolution, 116-20. }~ -={AT&T:Bell Labratories+13;licensing, of UNIX+6;proprietary systems: open vs.;monopoly} +={AT&T:Bell Laboratories+13;licensing, of UNIX+6;proprietary systems: open vs.;monopoly} Being on the border of business and academia meant that UNIX was, on the one hand, shielded from the demands of management and markets, allowing it to achieve the conceptual integrity that made it so appealing to designers and academics. On the other, it also meant that AT&T treated it as a potential product in the emerging software industry, which included new legal questions from a changing intellectual-property regime, novel forms of marketing and distribution, and new methods of developing, supporting, and distributing software. @@ -1174,7 +1174,7 @@ Unfortunately, Commentary was also legally restricted in its distribution. AT&T ={trade secret law+1} Thus, these generations of computer-science students and academics shared a secret—a trade secret become open secret. Every student who learned the essentials of the UNIX operating system from a photocopy of Lions’s commentary, also learned about AT&T’s attempt to control its legal distribution on the front cover of their textbook. The parallel development of photocopying has a nice resonance here; together with home cassette taping of music and the introduction of the video-cassette recorder, photocopying helped drive the changes to copyright law adopted in 1976. -={copyright:changes in} +={copyright:changes in 1976} Thirty years later, and long after the source code in it had been completely replaced, Lions’s Commentary is still widely admired by geeks. Even though Free Software has come full circle in providing students with an actual operating system that can be legally studied, taught, copied, and implemented, the kind of "literary criticism" that Lions’s work represents is still extremely rare; even reading obsolete code with clear commentary is one of the few ways to truly understand the design elements and clever implementations that made the UNIX operating system so different from its predecessors and even many of its successors, few, if any of which have been so successfully ported to the minds of so many students. ={design+2} @@ -1255,7 +1255,7 @@ The open-systems story is also a story of the blind spot of open systems—in th ={intellectual property;interoperability+21;openness (component of Free Software):intellectual property and} Standardization was at the heart of the contest, but by whom and by what means was never resolved. The dream of open systems, pursued in an entirely unregulated industry, resulted in a complicated experiment in novel forms of standardization and cooperation. The creation of a "standard" operating system based on UNIX is the story of a failure, a kind of "figuring out" gone haywire, which resulted in huge consortia of computer manufacturers attempting to work together and compete with each other at the same time. Meanwhile, the successful creation of a "standard" networking protocol—known as the Open Systems Interconnection Reference Model (OSI)—is a story of failure that hides a larger success; OSI was eclipsed in the same period by the rapid and ad hoc adoption of the Transmission Control Protocol/Internet Protocol (TCP/IP), which used a radically different standardization process and which succeeded for a number of surprising reasons, allowing the Internet ,{[pg 145]}, to take the form it did in the 1990s and ultimately exemplifying the moral-technical imaginary of a recursive public—and one at the heart of the practices of Free Software. -={figuring out;Open Systems Interconnection (OSI), as reference model;Openness (component of Free Software):standardization and;protocols:Open Systems Interconnection (OSI)|TCP/IP;standards organizations;TCP/IP (Transmission Control Protocol/Internet Protocol)} +={figuring out;Open Systems Interconnection (OSI):as reference model;Openness (component of Free Software):standardization and;protocols:Open Systems Interconnection (OSI)|TCP/IP;standards organizations;TCP/IP (Transmission Control Protocol/Internet Protocol)} The conceiving of openness, which is the central plot of these two stories, has become an essential component of the contemporary practice and power of Free Software. These early battles created a kind of widespread readiness for Free Software in the 1990s, a recognition of Free Software as a removal of open systems’ blind spot, as much as an exploitation of its power. The geek ideal of openness and a moral-technical order (the one that made Napster so significant an event) was forged in the era of open systems; without this concrete historical conception of how to maintain openness in technical and moral terms, the recursive public of geeks would be just another hierarchical closed organization—a corporation manqué—and not an independent public serving as a check on the kinds of destructive power that dominated the open-systems contest. ={Napster} @@ -1441,7 +1441,7 @@ The growth of Free Software in the 1980s and 1990s depended on openness as a con ={Open Systems:networks and+28} The struggle to standardize UNIX as a platform for open systems was not the only open-systems struggle; alongside the UNIX wars, another "religious war" was raging. The attempt to standardize networks—in particular, protocols for the inter-networking of multiple, diverse, and autonomous networks of computers—was also a key aspect of the open-systems story of the 1980s.~{ The distinction between a protocol, an implementation and a standard is important: Protocols are descriptions of the precise terms by which two computers can communicate (i.e., a dictionary and a handbook for communicating). An implementation is the creation of software that uses a protocol (i.e., actually does the communicating; thus two implementations using the same protocol should be able to share data. A standard defines which protocol should be used by which computers, for what purposes. It may or may not define the protocol, but will set limits on changes to that protocol. }~ The war ,{[pg 167]}, between the TCP/IP and OSI was also a story of failure and surprising success: the story of a successful standard with international approval (the OSI protocols) eclipsed by the experimental, military-funded TCP/IP, which exemplified an alternative and unusual standards process. The moral-technical orders expressed by OSI and TCP/IP are, like that of UNIX, on the border between government, university, and industry; they represent conflicting social imaginaries in which power and legitimacy are organized differently and, as a result, expressed differently in the technology. -={moral and technical order;Networks:protools for+3;Open Systems Interconnection (OSI), as reference model+27;protocols:Open Systems Interconnection (OSI)+27|TCP/IP;TCP/IP (Transmission Control Protocol/Internet Protocol)+27;religious wars+3;social imaginary;standards process+3} +={moral and technical order;Networks:protools for+3;Open Systems Interconnection (OSI):as reference model+27;protocols:Open Systems Interconnection (OSI)+27|TCP/IP;TCP/IP (Transmission Control Protocol/Internet Protocol)+27;religious wars+3;social imaginary;standards processes+3} OSI and TCP/IP started with different goals: OSI was intended to satisfy everyone, to be the complete and comprehensive model against which all competing implementations would be validated; TCP/IP, by contrast, emphasized the easy and robust interconnection of diverse networks. TCP/IP is a protocol developed by bootstrapping between standard and implementation, a mode exemplified by the Requests for Comments system that developed alongside them as part of the Arpanet project. OSI was a "model" or reference standard developed by internationally respected standards organizations. ={Arpanet (network)+18;Request for Comments (RFC)} @@ -1467,7 +1467,7 @@ One important feature united almost all of these experiments: the networks of th ={antitrust} TCP/IP and OSI have become emblematic of the split between the worlds of telecommunications and computing; the metaphors of religious wars or of blood feuds and cold wars were common.~{ Drake, "The Internet Religious War." }~ A particularly arch account from this period is Carl Malamud’s Exploring the Internet: A Technical Travelogue, which documents Malamud’s (physical) visits to Internet sites around the globe, discussions (and beer) with networking researchers on technical details of the networks they have created, and his own typically geeky, occasionally offensive takes on cultural difference.~{ Malamud, Exploring the Internet; see also Michael M. J. Fischer, "Worlding Cyberspace." }~ A subtheme of the story is the religious war between Geneva (in particular the ITU) and the Internet: Malamud tells the story of asking the ITU to release its 19,000-page "blue book" of standards on the Internet, to facilitate its adoption and spread. -={Malmud, Carl+1;standards process+4} +={Malmud, Carl+1;standards processes+4} The resistance of the ITU and Malamud’s heroic if quixotic attempts are a parable of the moral-technical imaginaries of openness—and indeed, his story draws specifically on the usable past of Giordano Bruno.~{ The usable past of Giordano Bruno is invoked by Malamud to signal the heretical nature of his own commitment to openly publishing standards that ISO was opposed to releasing. Bruno’s fate at the hands of the Roman Inquisition hinged in some part on his acceptance of the Copernican cosmology, so he has been, like Galileo, a natural figure for revolutionary claims during the 1990s. }~ The "bruno" project demonstrates the gulf that exists between two models of legitimacy—those of ISO and the ITU—in which standards represent the legal and legitimate consensus of a regulated industry, approved by member nations, paid for and enforced by governments, and implemented and adhered to by corporations. ={Bruno, Giordano;Usable pasts;International Organization for Standardization (ISO)+3} @@ -1486,10 +1486,10 @@ Until the mid-1980s, the TCP/IP protocols were resolutely research-oriented, and ={Cerf, Vinton+2;Kahn, Robert;TCP/IP (Transmission Control Protocol/Internet Protocol):goals of+2} The explicit goal of TCP/IP was thus to share computer resources, not necessarily to connect two individuals or firms together, or to create a competitive market in networks or networking software. Sharing between different kinds of networks implied allowing the different networks to develop autonomously (as their creators and maintainers saw best), but without sacrificing the ability to continue sharing. Years later, David Clark, chief Internet engineer for several years in the 1980s, gave a much more explicit explanation of the goals that led to the TCP/IP protocols. In particular, he suggested that the main overarching goal was not just to share resources but "to develop an effective technique for multiplexed utilization of existing interconnected networks," and he more explicitly stated the issue of control that faced the designers: "Networks represent administrative boundaries of control, and it was an ambition of this project to come to grips with the problem of integrating a number ,{[pg 173]}, of separately administrated entities into a common utility."~{ Clark, "The Design Philosophy of the DARPA Internet Protocols," 54-55. }~ By placing the goal of expandability first, the TCP/IP protocols were designed with a specific kind of simplicity in mind: the test of the protocols’ success was simply the ability to connect. -={Clark,David} +={Clark, David} By setting different goals, TCP/IP and OSI thus differed in terms of technical details; but they also differed in terms of their context and legitimacy, one being a product of international-standards bodies, the other of military-funded research experiments. The technical and organizational differences imply different processes for standardization, and it is the peculiar nature of the so-called Requests for Comments (RFC) process that gave TCP/IP one of its most distinctive features. The RFC system is widely recognized as a unique and serendipitous outcome of the research process of Arpanet.~{ RFCs are archived in many places, but the official site is RFC Editor, http://www.rfc-editor.org/. }~ In a thirty-year retrospective (published, naturally, as an RFC: RFC 2555), Vint Cerf says, "Hiding in the history of the RFCs is the history of human institutions for achieving cooperative work." He goes on to describe their evolution over the years: "When the RFCs were first produced, they had an almost 19th century character to them—letters exchanged in public debating the merits of various design choices for protocols in the ARPANET. As email and bulletin boards emerged from the fertile fabric of the network, the far-flung participants in this historic dialog began to make increasing use of the online medium to carry out the discussion—reducing the need for documenting the debate in the RFCs and, in some respects, leaving historians somewhat impoverished in the process. RFCs slowly became conclusions rather than debates."~{ RFC Editor, RFC 2555, 6. }~ -={standards process;Request for Comments (RFC)+2} +={standards processes;Request for Comments (RFC)+2} Increasingly, they also became part of a system of discussion and implementation in which participants created working software as part of an experiment in developing the standard, after which there was more discussion, then perhaps more implementation, and finally, a standard. The RFC process was a way to condense the process of standardization and validation into implementation; which is to say, the proof of open systems was in the successful connection of diverse networks, and the creation of a standard became a kind of ex post facto rubber-stamping of this demonstration. Any further improvement of the standard hinged on an improvement on the standard implementation because the standards that resulted were freely and widely available: "A user could request an RFC by email from his host computer and have it automatically delivered to his mailbox. . . . RFCs were also shared freely with official standards ,{[pg 174]}, bodies, manufacturers and vendors, other working groups, and universities. None of the RFCs were ever restricted or classified. This was no mean feat when you consider that they were being funded by DoD during the height of the Cold War."~{ Ibid., 11. }~ ={Software:implementation of;standards:implementation+9|validation of;Secrecy+1} @@ -1568,7 +1568,7 @@ Stallman’s GNU General Public License "hacks" the federal copyright law, as is ={Copyleft licenses (component of Free Software):as hack of copyright law+1;Copyright+1} Like all software since the 1980 copyright amendments, Free Software is copyrightable—and what’s more, automatically copyrighted as it is written (there is no longer any requirement to register). Copyright law grants the author (or the employer of the author) a number of strong rights over the dispensation of what has been written: rights to copy, distribute, and change the work.~{ Copyright Act of 1976, Pub. L. No. 94-553, 90 Stat. 2541, enacted 19 October 1976; and Copyright Amendments, Pub. L. No. 96-517, 94 Stat. 3015, 3028 (amending §101 and §117, title 17, United States Code, regarding computer programs), enacted 12 December 1980. All amendments since 1976 are listed at http://www.copyright.gov/title17/92preface.html. }~ Free Software’s hack is to immediately make use of these rights in order to abrogate the rights the programmer has been given, thus granting all subsequent licensees rights to copy, distribute, modify, and use the copyrighted software. Some licenses, like the GPL, add the further restriction that every licensee must offer the same terms to any subsequent licensee, others make no such restriction on subsequent uses. Thus, while statutory law suggests that individuals need strong rights and grants them, Free Software licenses effectively annul them in favor of other activities, such as sharing, porting, and forking software. It is for this reason that they have earned the name "copyleft."~{ The history of the copyright and software is discussed in Litman, Digital Copyright; Cohen et al., Copyright in a Global Information Economy; and Merges, Menell, and Lemley, Intellectual Property in the New Technological Age. }~ -={Copyright:changes in|rights granted by} +={Copyright:changes in 1976|rights granted by} This is a convenient ex post facto description, however. Neither Stallman nor anyone else started out with the intention of hacking copyright law. The hack of the Free Software licenses was a response to a complicated controversy over a very important invention, a tool that in turn enabled an invention called EMACS. The story of the controversy is well-known among hackers and geeks, but not often told, and not in any rich detail, outside of these small circles.~{ See Wayner, Free for All; Moody, Rebel Code; and Williams, Free as in Freedom. Although this story could be told simply by interviewing Stallman and James Gosling, both of whom are still alive and active in the software world, I have chosen to tell it through a detailed analysis of the Usenet and Arpanet archives of the controversy. The trade-off is between a kind of incomplete, fly-on-the-wall access to a moment in history and the likely revisionist retellings of those who lived through it. All of the messages referenced here are cited by their "Message-ID," which should allow anyone interested to access the original messages through Google Groups (http://groups.google.com). }~ @@ -1854,10 +1854,10 @@ The final component of Free Software is coordination. For many participants and ={Free Software:open source vs.;Open Source:Free Software vs.;peer production;practices:five components of Free Software+2;Source Code Management tools (SCMs)} Coordination is important because it collapses and resolves the distinction between technical and social forms into a meaningful ,{[pg 211]}, whole for participants. On the one hand, there is the coordination and management of people; on the other, there is the coordination of source code, patches, fixes, bug reports, versions, and distributions—but together there is a meaningful technosocial practice of managing, decision-making, and accounting that leads to the collaborative production of complex software and networks. Such coordination would be unexceptional, essentially mimicking long-familiar corporate practices of engineering, except for one key fact: it has no goals. Coordination in Free Software privileges adaptability over planning. This involves more than simply allowing any kind of modification; the structure of Free Software coordination actually gives precedence to a generalized openness to change, rather than to the following of shared plans, goals, or ideals dictated or controlled by a hierarchy of individuals.~{ On the distinction between adaptability and adaptation, see Federico Iannacci, "The Linux Managing Model," http://opensource.mit.edu/papers/iannacci2.pdf. Matt Ratto characterizes the activity of Linux-kernel developers as a "culture of re-working" and a "design for re-design," and captures the exquisite details of such a practice both in coding and in the discussion between developers, an activity he dubs the "pressure of openness" that "results as a contradiction between the need to maintain productive collaborative activity and the simultaneous need to remain open to new development directions" ("The Pressure of Openness," 112-38). }~ -={adaptability:planning vs.+1|as a form of critique+1|adaptation vs.;coordination (component of Free Software):individual virtuosity vs. hierarchical planning+2;critique, Free Software+1;goals, lack of in Free Software+1;hackers:curiosity and virtuosity of+1;hierarchy, in coordination+5;planning+1} +={adaptability:planning vs.+1|as a form of critique+1|adaptation vs.;coordination (component of Free Software):individual virtuosity vs. hierarchical planning+2;critique, Free Software as+1;goals, lack of in Free Software+1;hackers:curiosity and virtuosity of+1;hierarchy, in coordination+5;planning+1} Adaptability does not mean randomness or anarchy, however; it is a very specific way of resolving the tension between the individual curiosity and virtuosity of hackers, and the collective coordination necessary to create and use complex software and networks. No man is an island, but no archipelago is a nation, so to speak. Adaptability preserves the "joy" and "fun" of programming without sacrificing the careful engineering of a stable product. Linux and Apache should be understood as the results of this kind of coordination: experiments with adaptability that have worked, to the surprise of many who have insisted that complexity requires planning and hierarchy. Goals and planning are the province of governance—the practice of goal-setting, orientation, and definition of control—but adaptability is the province of critique, and this is why Free Software is a recursive public: it stands outside power and offers powerful criticism in the form of working alternatives. It is not the domain of the new—after all Linux is just a rewrite of UNIX—but the domain of critical and responsive public direction of a collective undertaking. -={Linux (Free Software project)+8;novelty, of free software;recursive public+1} +={Linux (Free Software project)+8;novelty, of Free Software;recursive public+1} Linux and Apache are more than pieces of software; they are organizations of an unfamiliar kind. My claim that they are "recursive publics" is useful insofar as it gives a name to a practice that is neither corporate nor academic, neither profit nor nonprofit, neither governmental nor nongovernmental. The concept of recursive public includes, within the spectrum of political activity, the creation, modification, and maintenance of software, networks, and legal documents. While a "public" in most theories is a body of ,{[pg 212]}, people and a discourse that give expressive form to some concern, "recursive public" is meant to suggest that geeks not only give expressive form to some set of concerns (e.g., that software should be free or that intellectual property rights are too expansive) but also give concrete infrastructural form to the means of expression itself. Linux and Apache are tools for creating networks by which expression of new kinds can be guaranteed and by which further infrastructural experimentation can be pursued. For geeks, hacking and programming are variants of free speech and freedom of assembly. ={public sphere:theories of;Apache (Free Software project)+4;experimentation;infrastructure} @@ -2083,7 +2083,7 @@ Both the Apache project and the Linux kernel project use SCMs. In the case of Ap While SCMs are in general good for managing conflicting changes, they can do so only up to a point. To allow anyone to commit a change, however, could result in a chaotic mess, just as difficult to disentangle as it would be without an SCM. In practice, therefore, most projects designate a handful of people as having the right to "commit" changes. The Apache project retained its voting scheme, for instance, but it became a way of voting for "committers" instead for patches themselves. Trusted committers—those with the mysterious "good taste," or technical intuition—became the core members of the group. The Linux kernel has also struggled with various issues surrounding SCMs and the management of responsibility they imply. The story of the so-called VGER tree and the creation of a new SCM called Bitkeeper is exemplary in this respect.~{ See Steven Weber, The Success of Open Source, 117-19; Moody, Rebel Code, 172-78. See also Shaikh and Cornford, "Version Management Tools." }~ By 1997, Linux developers had begun to use cvs to manage changes to the source code, though not without resistance. Torvalds was still in charge of the changes to the official stable tree, but as other "lieutenants" came on board, the complexity of the changes to the kernel grew. One such lieutenant was Dave Miller, who maintained a "mirror" of the stable Linux kernel tree, the VGER tree, on a server at Rutgers. In September 1998 a fight broke out among Linux kernel developers over two related issues: one, the fact that Torvalds was failing to incorporate (patch) contributions that had been forwarded to him by various people, including his lieutenants; and two, as a result, the VGER cvs repository was no longer in synch with the stable tree maintained by Torvalds. Two different versions of Linux threatened to emerge. -={Miller, Dave;Source Code Management tools (SCMs):see also Bitkeeper;Concurrent Versioning System (cvs):Linux and;Linux (Free Software project):VGER tree and+2;Bitkeeper (Source Code Management software)+12;Torvalds, Linux:in bitkeeper controversy+12} +={Miller, Dave;Source Code Management tools (SCMs):see also Bitkeeper;Concurrent Versioning System (cvs):Linux and;Linux (Free Software project):VGER tree and+2;Bitkeeper (Source Code Management software)+12;Torvalds, Linus:in bitkeeper controversy+12} A great deal of yelling ensued, as nicely captured in Moody’s Rebel Code, culminating in the famous phrase, uttered by Larry McVoy: "Linus does not scale." The meaning of this phrase is that the ability of Linux to grow into an ever larger project with increasing complexity, one which can handle myriad uses and functions (to "scale" up), is constrained by the fact that there is only one Linus Torvalds. By all accounts, Linus was and is excellent at what he does—but there is only one Linus. The danger of this situation is the danger of a fork. A fork would mean one or more new versions would proliferate under new leadership, a situation much like ,{[pg 233]}, the spread of UNIX. Both the licenses and the SCMs are designed to facilitate this, but only as a last resort. Forking also implies dilution and confusion—competing versions of the same thing and potentially unmanageable incompatibilities. ={McVoy, Larry+11;Moody, Glyn;forking:in Linux+1} @@ -2186,7 +2186,7 @@ In part III I confront this question directly. Indeed, it was this question that ={cultural significance;recursive public+3;Free Software:components of+1} Connexions modulates all of the components except that of the movement (there is, as of yet, no real "Free Textbook" movement, but the "Open Access" movement is a close second cousin).~{ In January 2005, when I first wrote this analysis, this was true. By April 2006, the Hewlett Foundation had convened the Open Educational Resources "movement" as something that would transform the production and circulation of textbooks like those created by Connexions. Indeed, in Rich Baraniuk’s report for Hewlett, the first paragraph reads: "A grassroots movement is on the verge of sweeping through the academic world. The open education movement is based on a set of intuitions that are shared by a remarkably wide range of academics: that knowledge should be free and open to use and re-use; that collaboration should be easier, not harder; that people should receive credit and kudos for contributing to education and research; and that concepts and ideas are linked in unusual and surprising ways and not the simple linear forms that textbooks present. Open education promises to fundamentally change the way authors, instructors, and students interact worldwide" (Baraniuk and King, "Connexions"). (In a nice confirmation of just how embedded participation can become in anthropology, Baraniuk cribbed the second sentence from something I had written two years earlier as part of a description of what I thought Connexions hoped to achieve.) The "movement" as such still does not quite exist, but the momentum for it is clearly part of the actions that Hewlett hopes to achieve. }~ Perhaps the most complex modulation concerns coordination—changes to the practice of coordination and collaboration in academic-textbook creation in particular, and more generally to the nature of collaboration and coordination of knowledge in science and scholarship generally. -={coordination (components of Free Software);movement (component of Free Software)+2} +={coordination (component of Free Software);movement (component of Free Software)+2} Connexions emerged out of Free Software, and not, as one might expect, out of education, textbook writing, distance education, or any of those areas that are topically connected to pedagogy. That is to say, the people involved did not come to their project by attempting to deal with a problem salient to education and teaching as much as they did so through the problems raised by Free Software and the question of how those problems apply to university textbooks. Similarly, a second project, Creative Commons, also emerged out of a direct engagement with and exploration of Free Software, and not out of any legal movement or scholarly commitment to the critique of intellectual-property law or, more important, out of any desire to transform the entertainment industry. Both projects are resolutely committed to experimenting with the given practices of Free Software—to testing their limits and changing them where they can—and this is what makes them vibrant, risky, and potentially illuminating as cases of a recursive public. ={affinity (of geeks);commons+1;Creative Commons+1;pedagogy;recursive public:examples of+1} @@ -2208,7 +2208,7 @@ Around 1998 or 1999, Rich decided that it was time for him to write a textbook o ={Burris, C. Sidney;Connexions project:textbooks and+4;Rice University} At about the same time as his idea for a textbook, Rich’s research group was switching over to Linux, and Rich was first learning about Open Source and the emergence of a fully free operating system created entirely by volunteers. It isn’t clear what Rich’s aha! moment was, other than simply when he came to an understanding that such a thing as Linux was actually possible. Nonetheless, at some point, Rich had the idea that his textbook could be an Open Source textbook, that is, a textbook created not just by him, but by DSP researchers all over the world, and made available to everyone to make use of and modify and improve as they saw fit, just like Linux. Together with Brent Hendricks, Yan David Erlich, ,{[pg 249]}, and Ross Reedstrom, all of whom, as geeks, had a deep familiarity with the history and practices of Free and Open Source Software, Rich started to conceptualize a system; they started to think about modulations of different components of Free and Open Source Software. The idea of a Free Software textbook repository slowly took shape. -={Linux (Free Software project);Open Source:inspiration for Connexions+27;Reedstorm, Ross} +={Linux (Free Software project);Open Source:inspiration for Connexions+27;Reedstrom, Ross} Thus, Connexions: an "open content repository of high-quality educational materials." These "textbooks" very quickly evolved into something else: "modules" of content, something that has never been sharply defined, but which corresponds more or less to a small chunk of teachable information, like two or three pages in a textbook. Such modules are much easier to conceive of in sciences like mathematics or biology, in which textbooks are often multiauthored collections, finely divided into short chapters with diagrams, exercises, theorems, or programs. Modules lend themselves much less well to a model of humanities or social-science scholarship based in reading texts, discussion, critique, and comparison—and this bias is a clear reflection of what Brent, Ross, and Rich knew best in terms of teaching and writing. Indeed, the project’s frequent recourse to the image of an assembly-line model of knowledge production often confirms the worst fears of humanists and educators when they first encounter Connexions. The image suggests that knowledge comes in prepackaged and colorfully branded tidbits for the delectation of undergrads, rather than characterizing knowledge as a state of being or as a process. ={Connexions project:model of learning in|modules in+1} @@ -2224,7 +2224,7 @@ Free Software—and, in particular, Open Source in the guise of "self-organizing ={Connexions project:relationship to education+2;distance learning+2} Thus, Rich styled Connexions as more than just a factory of knowledge—it would be a community or culture developing richly associative and novel kinds of textbooks—and as much more than just distance education. Indeed, Connexions was not the only such project busy differentiating itself from the perceived dangers of distance education. In April 2001 MIT had announced that it would make the content of all of its courses available for free online in a project strategically called OpenCourseWare (OCW). Such news could only bring attention to MIT, which explicitly positioned the announcement as a kind of final death blow to the idea of distance education, by saying that what students pay $35,000 and up for per year is not "knowledge"—which is free—but the experience of being at MIT. The announcement created pure profit from the perspective of MIT’s reputation as a generator and disseminator of scientific knowledge, but the project did not emerge directly out of an interest in mimicking the success of Open Source. That angle was ,{[pg 252]}, provided ultimately by the computer-science professor Hal Abelson, whose deep understanding of the history and growth of Free Software came from his direct involvement in it as a long-standing member of the computer-science community at MIT. OCW emerged most proximately from the strange result of a committee report, commissioned by the provost, on how MIT should position itself in the "distance/e-learning" field. The surprising response: don’t do it, give the content away and add value to the campus teaching and research experience instead.~{ "Provost Announces Formation of Council on Educational Technology," MIT Tech Talk, 29 September 1999, http://web.mit.edu/newsoffice/1999/council-0929.html. }~ -={Abelson, Hal;Massachusetts Institute of Technology (MIT):open courseware and+2;Open CourseWare (OCW)+2;Connexions poject:Open CourseWare+2} +={Abelson, Hal;Massachusetts Institute of Technology (MIT):open courseware and+2;Open CourseWare (OCW)+2;Connexions project:Open CourseWare+2} OCW, Connexions, and distance learning, therefore, while all ostensibly interested in combining education with the networks and software, emerged out of different demands and different places. While the profit-driven demand of distance learning fueled many attempts around the country, it stalled in the case of OCW, largely because the final MIT Council on Educational Technology report that recommended OCW was issued at the same time as the first plunge in the stock market (April 2000). Such issues were not a core factor in the development of Connexions, which is not to say that the problems of funding and sustainability have not always been important concerns, only that genesis of the project was not at the administrative level or due to concerns about distance education. For Rich, Brent, and Ross the core commitment was to openness and to the success of Open Source as an experiment with massive, distributed, Internet-based, collaborative production of software—their commitment to this has been, from the beginning, completely and adamantly unwavering. Neverthless, the project has involved modulations of the core features of Free Software. Such modulations depend, to a certain extent, on being a project that emerges out of the ideas and practices of Free Software, rather than, as in the case of OCW, one founded as a result of conflicting goals (profit and academic freedom) and resulting in a strategic use of public relations to increase the symbolic power of the university over its fiscal growth. ={Reedstrom, Ross} @@ -2292,7 +2292,7 @@ Creative Commons provided more than licenses, though. It was part of a social im ={moral and technical order;social imaginary} Creative Commons was thus a back-door approach: if the laws could not be changed, then people should be given the tools they needed to work around those laws. Understanding how Creative Commons was conceived requires seeing it as a modulation of both the notion of "source code" and the modulation of "copyright licenses." But the modulations take place in that context of a changing legal system that was so unfamiliar to Stallman and his EMACS users, a legal system responding to new forms of software, networks, and devices. For instance, the changes to the Copyright Act of 1976 created an unintended effect that Creative Commons would ultimately seize on. By eliminating the requirement to register copyrighted works (essentially granting copyright as soon as the ,{[pg 261]}, work is "fixed in a tangible medium"), the copyright law created a situation wherein there was no explicit way in which a work could be intentionally placed in the public domain. Practically speaking an author could declare that a work was in the public domain, but legally speaking the risk would be borne entirely by the person who sought to make use of that work: to copy it, transform it, sell it, and so on. With the explosion of interest in the Internet, the problem ramified exponentially; it became impossible to know whether someone who had placed a text, an image, a song, or a video online intended for others to make use of it—even if the author explicitly declared it "in the public domain." Creative Commons licenses were thus conceived and rhetorically positioned as tools for making explicit exactly what uses could be made of a specific work. They protected the rights of people who sought to make use of "culture" (i.e., materials and ideas and works they had not authored), an approach that Lessig often summed up by saying, "Culture always builds on the past." -={copyright:requirement to register;sharing source code (component of Free Software):modulations of;creative commons:activism of+1;public domain+4} +={copyright:requirement to register;sharing source code (component of Free Software):modulations of;Creative Commons:activism of+1;public domain+4} The background to and context of the emergence of Creative Commons was of course much more complicated and fraught. Concerns ranged from the plights of university libraries with regard to high-priced journals, to the problem of documentary filmmakers unable to afford, or even find the owners of, rights to use images or snippets in films, to the high-profile fights over online music trading, Napster, and the RIAA. Over the course of four years, Lessig and the other founders of Creative Commons would address all of these issues in books, in countless talks and presentations and conferences around the world, online and off, among audiences ranging from software developers to entrepreneurs to musicians to bloggers to scientists. ={Napster;Recording Industry Association of America (RIAA)} diff --git a/data/v2/samples/democratizing_innovation.eric_von_hippel.sst b/data/v2/samples/democratizing_innovation.eric_von_hippel.sst index ee567f0..f47afc5 100644 --- a/data/v2/samples/democratizing_innovation.eric_von_hippel.sst +++ b/data/v2/samples/democratizing_innovation.eric_von_hippel.sst @@ -105,7 +105,7 @@ The whole sport of high-performance windsurfing really started from that. As soo By 1998, more than a million people were engaged in windsurfing, and a large fraction of the boards sold incorporated the user-developed innovations for the high-performance sport. The user-centered innovation process just illustrated is in sharp contrast to the traditional model, in which products and services are developed by manufacturers in a closed way, the manufacturers using patents, copyrights, and other protections to prevent imitators from free riding on their innovation investments. In this traditional model, a user's only role is to have needs, which manufacturers then identify and fill by designing and producing new products. The manufacturer-centric model does fit some fields and conditions. However, a growing body of empirical work shows that users are the first to develop many and perhaps most new industrial and consumer products. Further, the contribution of users is growing steadily larger as a result of continuing advances in computer and communications capabilities. -={Intellectual property rights:See also Private-collective innovation|copyrights and|innovation and+2;Copyrights:See Intellectual property rights;Manufacturers:government policy and+2;Product development+2;Users:See also Lead Users|government policy and;Economic benefit, expectations of by lead users:by manufacturers+;Economic benefit, expectations of by lead users:by manufacturers+12;Government policy:manufacturer innovation and+2;Manufacturers:expectations of economic benefit by+26} +={Intellectual property rights:See also Private-collective innovation|copyrights and|innovation and+2;Copyrights:See Intellectual property rights;Manufacturers:government policy and+2;Product development+2;Users:government policy and;Economic benefit, expectations of by lead users:by manufacturers+5;Economic benefit, expectations of by lead users:by manufacturers+12;Government policy:manufacturer innovation and+2;Manufacturers:expectations of economic benefit by+26} In this book I explain in detail how the emerging process of user-centric, democratized innovation works. I also explain how innovation by users provides a very necessary complement to and feedstock for manufacturer innovation. @@ -115,7 +115,7 @@ The ongoing shift of innovation to users has some very attractive qualities. It % check government policy Users, as the term will be used in this book, are firms or individual consumers that expect to benefit from /{using}/ a product or a service. In contrast, manufacturers expect to benefit from /{selling}/ a product or a service. A firm or an individual can have different relationships to different products or innovations. For example, Boeing is a manufacturer of airplanes, but it is also a user of machine tools. If we were examining innovations developed by Boeing for the airplanes it sells, we would consider Boeing a manufacturer-innovator in those cases. But if we were considering innovations in metal-forming machinery developed by Boeing for in-house use in building airplanes, we would categorize those as user-developed innovations and would categorize Boeing as a user-innovator in those cases. -={Users:See also Lead users|characteristics of+2;Manufacturers:characteristics of+2} +={Users:characteristics of+2;Manufacturers:characteristics of+2} Innovation user and innovation manufacturer are the two general "functional" relationships between innovator and innovation. Users are unique in that they alone benefit /{directly}/ from innovations. All others (here lumped under the term "manufacturers") must sell innovation-related products or services to users, indirectly or directly, in order to profit from innovations. Thus, in order to profit, inventors must sell or license knowledge related to innovations, and manufacturers must sell products or services incorporating innovations. Similarly, suppliers of innovation-related materials or services---unless they have direct use for the innovations---must sell the materials or services in order to profit from the innovations. ={Innovation:See also Innovation communities|functional sources of;Suppliers} @@ -156,7 +156,7 @@ Research provides a firm grounding for these empirical findings. The two definin User-innovators with stronger "lead user" characteristics develop innovations having higher appeal in the general marketplace. Estimated OLS function: Y = 2.06 + 0.57x, where Y represents attractiveness of innovation and x represents lead-user-ness of respondent. Adjusted R^{2}^ = 0.281; p = 0.002; n = 30. Source of data: Franke and von Hippel 2003. !_ Why Many Users Want Custom Products (Chapter 3) -={Custom products:heterogeneity of user needs and+2;User need+2;Users:See also Lead users|innovate-or-buy decisions by+8|needs of+2} +={Custom products:heterogeneity of user needs and+2;User need+2;Users:innovate-or-buy decisions by+8|needs of+2} Why do so many users develop or modify products for their own use? Users may innovate if and as they want something that is not available on the market and are able and willing to pay for its development. It is likely that many users do not find what they want on the market. Meta-analysis of market-segmentation studies suggests that users' needs for products are highly heterogeneous in many fields (Franke and Reisinger 2003). ={Reisinger, H.} @@ -165,7 +165,7 @@ Mass manufacturers tend to follow a strategy of developing products that are des ={Apache web server software;Manufacturers:lead users and} !_ Users' Innovate-or-Buy Decisions (Chapter 4) -={Custom products:heterogeneity of user needs and+3|manufacturers and+3|agency costs and+2;User need+3;Users:needs of+3;Manufacturers:innovation and+9|innovate-or-buy decisions and+4;Users:See also Lead Users|agency costs and+2} +={Custom products:heterogeneity of user needs and+3|manufacturers and+3|agency costs and+2;User need+3;Users:needs of+3;Manufacturers:innovation and+9|innovate-or-buy decisions and+4;Users:agency costs and+2} Even if many users want "exactly right products" and are willing and able to pay for their development, why do users often do this for themselves rather than hire a custom manufacturer to develop a special just-right product for them? After all, custom manufacturers specialize in developing products for one or a few users. Since these firms are specialists, it is possible that they could design and build custom products for individual users or user firms faster, better, or cheaper than users could do this for themselves. Despite this possibility, several factors can drive users to innovate rather than buy. Both in the case of user firms and in the case of individual user-innovators, agency costs play a major role. In the case of individual user-innovators, enjoyment of the innovation process can also be important. ={Agency costs+1;Manufacturers:custom products and+2;Custom products:users and+3;Economic benefit, expectations of by lead users:by manufacturers+13} @@ -180,7 +180,7 @@ A small model of the innovate-or-buy decision follows. This model shows in a qua ={Innovation communities:social welfare, and;Manufacturers:social welfare and+21;Social welfare:manufacturer innovation and+21|user innovation and+21} Chapter 4 concludes by pointing out that an additional incentive can drive individual user-innovators to innovate rather than buy: they may value the /{process}/ of innovating because of the enjoyment or learning that it brings them. It might seem strange that user-innovators can enjoy product development enough to want to do it themselves---after all, manufacturers pay their product developers to do such work! On the other hand, it is also clear that enjoyment of problem solving is a motivator for many individual problem solvers in at least some fields. Consider for example the millions of crossword-puzzle aficionados. Clearly, for these individuals enjoyment of the problem-solving process rather than the solution is the goal. One can easily test this by attempting to offer a puzzle solver a completed puzzle---the very output he or she is working so hard to create. One will very likely be rejected with the rebuke that one should not spoil the fun! Pleasure as a motivator can apply to the development of commercially useful innovations as well. Studies of the motivations of volunteer contributors of code to widely used software products have shown that these individuals too are often strongly motivated to innovate by the joy and learning they find in this work (Hertel et al. 2003; Lakhani and Wolf 2005). -={Hertel, G.;Lakhani, K.;Wolf, B.;Innovation process;User:See also Lead users|innovation process and, 7;Free software:See also Open source software;Hackers;Herrmann, S.} +={Hertel, G.;Lakhani, K.;Wolf, B.;Innovation process;Users:innovation process and+7;Free software:See also Open source software;Hackers;Herrmann, S.} !_ Users' Low-Cost Innovation Niches (Chapter 5) ={Users:low-cost innovation niches of+3} @@ -214,7 +214,7 @@ Active efforts by innovators to freely reveal---as opposed to sullen acceptance- ={Innovation communities+3} Innovation by users tends to be widely distributed rather than concentrated among just a very few very innovative users. As a result, it is important for user-innovators to find ways to combine and leverage their efforts. Users achieve this by engaging in many forms of cooperation. Direct, informal user-to-user cooperation (assisting others to innovate, answering questions, and so on) is common. Organized cooperation is also common, with users joining together in networks and communities that provide useful structures and tools for their interactions and for the distribution of innovations. Innovation communities can increase the speed and effectiveness with which users and also manufacturers can develop and test and diffuse their innovations. They also can greatly increase the ease with which innovators can build larger systems from interlinkable modules created by community participants. -={Users:innovation communities+2} +={Users:innovation communities and+2} Free and open source software projects are a relatively well-developed and very successful form of Internet-based innovation community. However, innovation communities are by no means restricted to software or even to information products, and they can play a major role in the development of physical products. Franke and Shah (2003) have documented the value that user innovation communities can provide to user-innovators developing physical products in the field of sporting equipment. The analogy to open source innovation communities is clear. ={Franke, N.;Shah, S.;Free software;Innovation communities:open source software and|physical products and|sporting equipment and;Open source software:innovation communities and} @@ -304,7 +304,7 @@ The studies cited in table 2.1 clearly show that a lot of product development an !_ Table 2.1 Many respondents reported developing or modifying products for their own use in the eight product areas listed here. -={Lüthje, C.+1;Urban, G.+1;Franke, N.+1;Herstatt, C.+1;Morrison, Pamela+1;von Hippel, E.+1;Lead users:Apache web server software and+1r|library information search system and+1|mountain biking and+1|outdoor consumer products and+1|pipe hanger hardware and+1|printed circuit CAD software and+1|surgical equipment and+;Library information search system+1;Mountain biking+1;Outdoor products+1;Pipe hanger hardware+1;Printed circuit CAD software+1;Surgical equipment+1} +={Lüthje, C.+1;Urban, G.+1;Franke, N.+1;Herstatt, C.+1;Morrison, Pamela+1;von Hippel, E.+1;Lead users:Apache web server software and+1r|library information search system and+1|mountain biking and+1|outdoor consumer products and+1|pipe hanger hardware and+1|printed circuit CAD software and+1|surgical equipment and+3;Library information search system+1;Mountain biking+1;Outdoor products+1;Pipe hanger hardware+1;Printed circuit CAD software+1;Surgical equipment+1} table{~h c4; 20; 45; 15; 20; @@ -844,7 +844,7 @@ Those interested can easily enhance their intuitions about heterogenity of user ={Users:innovation and+4|innovate-or-buy decisions by+74} Why does a user wanting a custom product sometimes innovate for itself rather than buying from a manufacturer of custom products? There is, after all, a choice---at least it would seem so. However, if a user with the resources and willingness to pay does decide to buy, it may be surprised to discover that it is not so easy to find a manufacturer willing to make exactly what an individual user wants. Of course, we all know that mass manufacturers with businesses built around providing standard products in large numbers will be reluctant to accommodate special requests. Consumers know this too, and few will be so foolish as to contact a major soup producer like Campbell's with a request for a special, "just-right" can of soup. But what about manufacturers that specialize in custom products? Isn't it their business to respond to special requests? To understand which way the innovate-or-buy choice will go, one must consider both transaction costs and information asymmetries specific to users and manufacturers. I will talk mainly about transaction costs in this chapter and mainly about information asymmetries in chapter 5. -={Custom products:users and+3;Innovation process+3;Manufacturers:innovation and+3;Transaction costs+3;Users:innovation process+3|and paying for innovations} +={Custom products:users and+3;Innovation process+3;Manufacturers:innovation and+3;Transaction costs+3;Users:innovation process and+3|and paying for innovations} I begin this chapter by discussing four specific and significant transaction costs that affect users' innovate-or-buy decisions. Next I review a case study that illustrates these. Then, I use a simple quantitative model to further explore when user firms will find it more cost-effective to develop a solution---a new product or service---for themselves rather than hiring a manufacturer to solve the problem for them. Finally, I point out that /{individual}/ users can sometimes be more inclined to innovate than one might expect because they sometimes value the /{process}/ of innovating as well as the novel product or service that is created. @@ -1841,7 +1841,7 @@ Users that innovate and wish to freely diffuse innovation-related information ar ={Lessig, L.} !_ R&D Subsidies and Tax Credits -={Government policy:&D subsidies and+3} +={Government policy:R&D subsidies and+3} In many countries, manufacturing firms are rewarded for their innovative activity by R&D subsidies and tax credits. Such measures can make economic sense if average social returns to innovation are significantly higher than average private returns, as has been found by Mansfield et al. (1977) and others. However, important innovative activities carried out by users are often not similarly rewarded, because they tend to not be documentable as formal R&D activities. As we have seen, users tend to develop innovations in the course of "doing" in their normal use environments. Bresnahan and Greenstein (1996a) make a similar point. They investigate the role of "co-invention" in the move by users from mainframe to client-server architecture.~{ See also Bresnahan and Greenstein 1996b; Bresnahan and Saloner 1997; Saloner and Steinmueller 1996. }~ By "co-invention" Bresnahan and Greenstein mean organizational changes and innovations developed and implemented by users that are required to take full advantage of a new invention. They point out the high importance that co-invention has for realizing social returns from innovation. They consider the federal government's support for creating "national information infrastructures" insufficient or misallocated, since they view co-invention is the bottleneck for social returns and likely the highest value locus for invention. ={Bresnahan, T.;Greenstein, S.;Mansfield, E.;Users:co-invention and} @@ -2270,7 +2270,7 @@ _* Recall that Urban and von Hippel (1988) tested the relative commercial attrac ={Urban, G.;Printed circuit CAD software} _* Herstatt and von Hippel (1992) documented a lead user project seeking to develop a new line of pipe hangers---hardware used to attach pipes to the ceilings of commercial buildings. Hilti, a major manufacturer of construction-related equipment and products, conducted the project. The firm introduced a new line of pipe hanger products based on the lead user concept and a post-study evaluation has shown that this line has become a major commercial success for Hilti. -={Herstatt;Pipe hanger hardware} +={Herstatt, C.;Pipe hanger hardware} _* Olson and Bakke (2001) report on two lead user studies carried out by Cinet, a leading IT systems integrator in Norway, for the firm's two major product areas, desktop personal computers, and Symfoni application GroupWare. These projects were very successful, with most of the ideas incorporated into next-generation products having been collected from lead users. ={Bakke, G.;Olson, E.} diff --git a/data/v2/samples/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams.sst b/data/v2/samples/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams.sst index 563dd23..213c76e 100644 --- a/data/v2/samples/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams.sst +++ b/data/v2/samples/free_as_in_freedom.richard_stallman_crusade_for_free_software.sam_williams.sst @@ -2026,7 +2026,7 @@ Although not the first person to view software as public property, Stallman is g Predicting the future is risky sport, but most people, when presented with the question, seemed eager to bite. "One hundred years from now, Richard and a couple of other people are going to deserve more than a footnote," says Moglen. "They're going to be viewed as the main line of the story." The "couple other people" Moglen nominates for future textbook chapters include John Gilmore, Stallman's GPL advisor and future founder of the Electronic Frontier Foundation, and Theodor Holm Nelson, a.k.a. Ted Nelson, author of the 1982 book, Literary Machines. Moglen says Stallman, Nelson, and Gilmore each stand out in historically significant, nonoverlapping ways. He credits Nelson, commonly considered to have coined the term "hypertext," for identifying the predicament of information ownership in the digital age. Gilmore and Stallman, meanwhile, earn notable credit for identifying the negative political effects of information control and building organizations-the Electronic Frontier Foundation in the case of Gilmore and the Free Software Foundation in the case of Stallman-to counteract those effects. Of the two, however, Moglen sees Stallman's activities as more personal and less political in nature. -={Electronic Frontier Foundation;Gilmore, John;Nelson, Theodor Holm+2;Nelson Ted+2} +={Electronic Frontier Foundation;Gilmore, John;Nelson, Theodor Holm+2;Nelson, Ted+2} "Richard was unique in that the ethical implications of unfree software were particularly clear to him at an early moment," says Moglen. "This has a lot to do with Richard's personality, which lots of people will, when writing about him, try to depict as epiphenomenal or even a drawback in Richard Stallman's own life work." diff --git a/data/v2/samples/the_wealth_of_networks.yochai_benkler.sst b/data/v2/samples/the_wealth_of_networks.yochai_benkler.sst index d45d6be..67a8d62 100644 --- a/data/v2/samples/the_wealth_of_networks.yochai_benkler.sst +++ b/data/v2/samples/the_wealth_of_networks.yochai_benkler.sst @@ -75,7 +75,7 @@ Much of the early work in this project was done at New York University, whose la Since 2001, first as a visitor and now as a member, I have had the remarkable pleasure of being part of the intellectual community that is Yale Law School. The book in its present form, structure, and emphasis is a direct reflection of my immersion in this wonderful community. Practically every single one of my colleagues has read articles I have written over this period, attended workshops where I presented my work, provided comments that helped to improve the articles--and through them, this book, as well. I owe each and every one of them thanks, not least to Tony Kronman, who made me see that it would be so. To list them all would be redundant. To list some would inevitably underrepresent the various contributions they have made. Still, I will try to say a few of the special thanks, owing much yet to ,{[pg xii]}, those I will not name. Working out the economics was a precondition of being able to make the core political claims. Bob Ellickson, Dan Kahan, and Carol Rose all engaged deeply with questions of reciprocity and commonsbased production, while Jim Whitman kept my feet to the fire on the relationship to the anthropology of the gift. Ian Ayres, Ron Daniels during his visit, Al Klevorick, George Priest, Susan Rose-Ackerman, and Alan Schwartz provided much-needed mixtures of skepticism and help in constructing the arguments that would allay it. Akhil Amar, Owen Fiss, Jerry Mashaw, Robert Post, Jed Rubenfeld, Reva Siegal, and Kenji Yoshino helped me work on the normative and constitutional questions. The turn I took to focusing on global development as the core aspect of the implications for justice, as it is in chapter 9, resulted from an invitation from Harold Koh and Oona Hathaway to speak at their seminar on globalization, and their thoughtful comments to my paper. The greatest influence on that turn has been Amy Kapczynski's work as a fellow at Yale, and with her, the students who invited me to work with them on university licensing policy, in particular, Sam Chaifetz. -Oddly enough, I have never had the proper context in which to give two more basic thanks. My father, who was swept up in the resistance to British colonialism and later in Israel's War of Independence, dropped out of high school. He was left with a passionate intellectual hunger and a voracious appetite for reading. He died too young to even imagine sitting, as I do today with my own sons, with the greatest library in human history right there, at the dinner table, with us. But he would have loved it. Another great debt is to David Grais, who spent many hours mentoring me in my first law job, bought me my first copy of Strunk and White, and, for all practical purposes, taught me how to write in English; as he reads these words, he will be mortified, I fear, to be associated with a work of authorship as undisciplined as this, with so many excessively long sentences, replete with dependent clauses and unnecessarily complex formulations of quite simple ideas. +Oddly enough, I have *{never had the proper context}* in which to give two more basic thanks. My father, who was swept up in the resistance to British colonialism and later in Israel's War of Independence, dropped out of high school. He was left with a passionate intellectual hunger and a voracious appetite for reading. He died too young to even imagine sitting, as I do today with my own sons, with the greatest library in human history right there, at the dinner table, with us. But he would have loved it. Another great debt is to David Grais, who spent many hours mentoring me in my first law job, bought me my first copy of Strunk and White, and, for all practical purposes, taught me how to write in English; as he reads these words, he will be mortified, I fear, to be associated with a work of authorship as undisciplined as this, with so many excessively long sentences, replete with dependent clauses and unnecessarily complex formulations of quite simple ideas. Finally, to my best friend and tag-team partner in this tussle we call life, Deborah Schrag, with whom I have shared nicely more or less everything since we were barely adults. ,{[pg 1]}, @@ -89,7 +89,7 @@ A series of changes in the technologies, economic organization, and social pract The rise of greater scope for individual and cooperative nonmarket production of information and culture, however, threatens the incumbents of the industrial information economy. At the beginning of the twenty-first century, we find ourselves in the midst of a battle over the institutional ecology of the digital environment. A wide range of laws and institutions-- from broad areas like telecommunications, copyright, or international trade regulation, to minutiae like the rules for registering domain names or whether digital television receivers will be required by law to recognize a particular code--are being tugged and warped in efforts to tilt the playing field toward one way of doing things or the other. How these battles turn out over the next decade or so will likely have a significant effect on how we come to know what is going on in the world we occupy, and to what extent and in what forms we will be able--as autonomous individuals, as citizens, and as participants in cultures and communities--to affect how we and others see the world as it is and as it might be. 2~ THE EMERGENCE OF THE NETWORKED INFORMATION ECONOMY -={information economy:emergence of+9;networked environment policy+52;networked environment policy:emergence of+9} +={information economy:emergence of+9;networked information economy+52|emergence of+9} The most advanced economies in the world today have made two parallel shifts that, paradoxically, make possible a significant attenuation of the limitations that market-based production places on the pursuit of the political ,{[pg 3]}, values central to liberal societies. The first move, in the making for more than a century, is to an economy centered on information (financial services, accounting, software, science) and cultural (films, music) production, and the manipulation of symbols (from making sneakers to branding them and manufacturing the cultural significance of the Swoosh). The second is the move to a communications environment built on cheap processors with high computation capabilities, interconnected in a pervasive network--the phenomenon we associate with the Internet. It is this second shift that allows for an increasing role for nonmarket production in the information and cultural production sector, organized in a radically more decentralized pattern than was true of this sector in the twentieth century. The first shift means that these new patterns of production--nonmarket and radically decentralized--will emerge, if permitted, at the core, rather than the periphery of the most advanced economies. It promises to enable social production and exchange to play a much larger role, alongside property- and marketbased production, than they ever have in modern democracies. ={nonmarket information producers+4;physical constraints on information production+2;production of information:physical constraints on+2} @@ -116,7 +116,7 @@ In the networked information economy, the physical capital required for producti Because the presence and importance of nonmarket production has become so counterintuitive to people living in market-based economies at the end of the twentieth century, part I of this volume is fairly detailed and technical; overcoming what we intuitively "know" requires disciplined analysis. Readers who are not inclined toward economic analysis should at least read the introduction to part I, the segments entitled "When Information Production Meets the Computer Network" and "Diversity of Strategies in our Current Production System" in chapter 2, and the case studies in chapter 3. These should provide enough of an intuitive feel for what I mean by the diversity of production strategies for information and the emergence of nonmarket individual and cooperative production, to serve as the basis for the more normatively oriented parts of the book. Readers who are genuinely skeptical of the possibility that nonmarket production is sustainable and effective, and in many cases is an efficient strategy for information, knowledge, and cultural production, should take the time to read part I in its entirety. The emergence of precisely this possibility and practice lies at the very heart of my claims about the ways in which liberal commitments are translated into lived experiences in the networked environment, and forms the factual foundation of the political-theoretical and the institutional-legal discussion that occupies the remainder of the book. 2~ NETWORKED INFORMATION ECONOMY AND LIBERAL, DEMOCRATIC SOCIETIES -={democratic societies+15;information economy:democracy and liberalism+15;liberal societies+15;networked environment policy:democracy and liberalism+15} +={democratic societies+15;information economy:democracy and liberalism+15;liberal societies+15;networked information economy:democracy and liberalism+15} How we make information, how we get it, how we speak to others, and how others speak to us are core components of the shape of freedom in any society. Part II of this book provides a detailed look at how the changes in the technological, economic, and social affordances of the networked information environment affect a series of core commitments of a wide range of liberal democracies. The basic claim is that the diversity of ways of organizing information production and use opens a range of possibilities for pursuing % ,{[pg 8]}, the core political values of liberal societies--individual freedom, a more genuinely participatory political system, a critical culture, and social justice. These values provide the vectors of political morality along which the shape and dimensions of any liberal society can be plotted. Because their practical policy implications are often contradictory, rather than complementary, the pursuit of each places certain limits on how we pursue the others, leading different liberal societies to respect them in different patterns. How much a society constrains the democratic decision-making powers of the majority in favor of individual freedom, or to what extent it pursues social justice, have always been attributes that define the political contours and nature of that society. But the economics of industrial production, and our pursuit of productivity and growth, have imposed a limit on how we can pursue any mix of arrangements to implement our commitments to freedom and justice. Singapore is commonly trotted out as an extreme example of the trade-off of freedom for welfare, but all democracies with advanced capitalist economies have made some such trade-off. Predictions of how well we will be able to feed ourselves are always an important consideration in thinking about whether, for example, to democratize wheat production or make it more egalitarian. Efforts to push workplace democracy have also often foundered on the shoals--real or imagined--of these limits, as have many plans for redistribution in the name of social justice. Market-based, proprietary production has often seemed simply too productive to tinker with. The emergence of the networked information economy promises to expand the horizons of the feasible in political imagination. Different liberal polities can pursue different mixtures of respect for different liberal commitments. However, the overarching constraint represented by the seeming necessity of the industrial model of information and cultural production has significantly shifted as an effective constraint on the pursuit of liberal commitments. @@ -162,10 +162,10 @@ The networked information economy also allows for the emergence of a more critic ={Balkin, Jack;communities:critical culture and self-reflection+1;critical culture and self-reflection+1;culture:criticality of (self-reflection)+1;democratic societies:critical culture and social relations+1;Fisher, William (Terry);Koren, Niva Elkin;Lessig, Lawrence (Larry);self-organization: See clusters in network topology self-reflection+1;liberal societies:critical culture and social relations} Throughout much of this book, I underscore the increased capabilities of individuals as the core driving social force behind the networked information economy. This heightened individual capacity has raised concerns by many that the Internet further fragments community, continuing the long trend of industrialization. A substantial body of empirical literature suggests, however, that we are in fact using the Internet largely at the expense of television, and that this exchange is a good one from the perspective of social ties. We use the Internet to keep in touch with family and intimate friends, both geographically proximate and distant. To the extent we do see a shift in social ties, it is because, in addition to strengthening our strong bonds, we are also increasing the range and diversity of weaker connections. Following ,{[pg 16]}, Manuel Castells and Barry Wellman, I suggest that we have become more adept at filling some of the same emotional and context-generating functions that have traditionally been associated with the importance of community with a network of overlapping social ties that are limited in duration or intensity. -={attention fragmentation;Castells, Manuel;fragmentation of communication;norms (social): fragments of communication;regulation by social norms: fragmentation of communication;social relations and norms:fragmentation of communication;communities: fragmentation of;diversity:fragmentation of communication;Castells, Manuel} +={attention fragmentation;Castells, Manuel;fragmentation of communication;norms (social): fragmentation of communication;regulation by social norms: fragmentation of communication;social relations and norms:fragmentation of communication;communities: fragmentation of;diversity:fragmentation of communication;Castells, Manuel} 2~ FOUR METHODOLOGICAL COMMENTS -={information economy:methodological choices+14;networked environmental policy. See policy networked information economy:methodological choices+14} +={information economy:methodological choices+14;networked environmental policy:See policy;networked information economy:methodological choices+14} There are four methodological choices represented by the thesis that I have outlined up to this point, and therefore in this book as a whole, which require explication and defense. The first is that I assign a very significant role to technology. The second is that I offer an explanation centered on social relations, but operating in the domain of economics, rather than sociology. The third and fourth are more internal to liberal political theory. The third is that I am offering a liberal political theory, but taking a path that has usually been resisted in that literature--considering economic structure and the limits of the market and its supporting institutions from the perspective of freedom, rather than accepting the market as it is, and defending or criticizing adjustments through the lens of distributive justice. Fourth, my approach heavily emphasizes individual action in nonmarket relations. Much of the discussion revolves around the choice between markets and nonmarket social behavior. In much of it, the state plays no role, or is perceived as playing a primarily negative role, in a way that is alien to the progressive branches of liberal political thought. In this, it seems more of a libertarian or an anarchistic thesis than a liberal one. I do not completely discount the state, as I will explain. But I do suggest that what is special about our moment is the rising efficacy of individuals and loose, nonmarket affiliations as agents of political economy. Just like the market, the state will have to adjust to this new emerging modality of human action. Liberal political theory must first recognize and understand it before it can begin to renegotiate its agenda for the liberal state, progressive or otherwise. ={capabilities of individuals:technology and human affairs+5;human affairs, technology and+5;individual capabilities and action: technology and human affairs+5} @@ -207,7 +207,7 @@ The important new fact about the networked environment, however, is the efficacy ={collaborative authorship: See also peer production collective social action} 2~ THE STAKES OF IT ALL: THE BATTLE OVER THE INSTITUTIONAL ECOLOGY OF THE DIGITAL ENVIRONMENT -={commercial model of communication+9;industrial model of communication+9;information economy:institutional ecology+9;institutional ecology of digital environment+9;networked environment policy:institutional ecology+9;proprietary rights+9;traditional model of communication+9} +={commercial model of communication+9;industrial model of communication+9;information economy:institutional ecology+9;institutional ecology of digital environment+9;networked information economy:institutional ecology+9;proprietary rights+9;traditional model of communication+9} No benevolent historical force will inexorably lead this technologicaleconomic moment to develop toward an open, diverse, liberal equilibrium. ,{[pg 23]}, If the transformation I describe as possible occurs, it will lead to substantial redistribution of power and money from the twentieth-century industrial producers of information, culture, and communications--like Hollywood, the recording industry, and perhaps the broadcasters and some of the telecommunications services giants--to a combination of widely diffuse populations around the globe, and the market actors that will build the tools that make this population better able to produce its own information environment rather than buying it ready-made. None of the industrial giants of yore are taking this reallocation lying down. The technology will not overcome their resistance through an insurmountable progressive impulse. The reorganization of production and the advances it can bring in freedom and justice will emerge, therefore, only as a result of social and political action aimed at protecting the new social patterns from the incumbents' assaults. It is precisely to develop an understanding of what is at stake and why it is worth fighting for that I write this book. I offer no reassurances, however, that any of this will in fact come to pass. @@ -215,7 +215,7 @@ The battle over the relative salience of the proprietary, industrial models of i ={property ownership+5;commons} This is not to say that property is in some sense inherently bad. Property, together with contract, is the core institutional component of markets, and ,{[pg 24]}, a core institutional element of liberal societies. It is what enables sellers to extract prices from buyers, and buyers to know that when they pay, they will be secure in their ability to use what they bought. It underlies our capacity to plan actions that require use of resources that, without exclusivity, would be unavailable for us to use. But property also constrains action. The rules of property are circumscribed and intended to elicit a particular datum--willingness and ability to pay for exclusive control over a resource. They constrain what one person or another can do with regard to a resource; that is, use it in some ways but not others, reveal or hide information with regard to it, and so forth. These constraints are necessary so that people must transact with each other through markets, rather than through force or social networks, but they do so at the expense of constraining action outside of the market to the extent that it depends on access to these resources. -={constrains of information production:physical+2;physical constraints on information production+2} +={constrains of information production, physical+2;physical constraints on information production+2} Commons are another core institutional component of freedom of action in free societies, but they are structured to enable action that is not based on exclusive control over the resources necessary for action. For example, I can plan an outdoor party with some degree of certainty by renting a private garden or beach, through the property system. Alternatively, I can plan to meet my friends on a public beach or at Sheep's Meadow in Central Park. I can buy an easement from my neighbor to reach a nearby river, or I can walk around her property using the public road that makes up our transportation commons. Each institutional framework--property and commons--allows for a certain freedom of action and a certain degree of predictability of access to resources. Their complementary coexistence and relative salience as institutional frameworks for action determine the relative reach of the market and the domain of nonmarket action, both individual and social, in the resources they govern and the activities that depend on access to those resources. Now that material conditions have enabled the emergence of greater scope for nonmarket action, the scope and existence of a core common infrastructure that includes the basic resources necessary to produce and exchange information will shape the degree to which individuals will be able to act in all the ways that I describe as central to the emergence of a networked information economy and the freedoms it makes possible. ={commons} @@ -488,7 +488,7 @@ How are we to know that the content produced by widely dispersed individuals is ={accreditation:Amazon+1;Amazon+1;filtering:Amazon+1;relevance filtering:Amazon+1} Amazon uses a mix of mechanisms to get in front of their buyers of books and other products that the users are likely to purchase. A number of these mechanisms produce relevance and accreditation by harnessing the users themselves. At the simplest level, the recommendation "customers who bought items you recently viewed also bought these items" is a mechanical means of extracting judgments of relevance and accreditation from the actions of many individuals, who produce the datum of relevance as byproduct of making their own purchasing decisions. Amazon also allows users to create topical lists and track other users as their "friends and favorites." Amazon, like many consumer sites today, also provides users with the ability ,{[pg 76]}, to rate books they buy, generating a peer-produced rating by averaging the ratings. More fundamentally, the core innovation of Google, widely recognized as the most efficient general search engine during the first half of the 2000s, was to introduce peer-based judgments of relevance. Like other search engines at the time, Google used a text-based algorithm to retrieve a given universe of Web pages initially. Its major innovation was its PageRank algorithm, which harnesses peer production of ranking in the following way. The engine treats links from other Web sites pointing to a given Web site as votes of confidence. Whenever someone who authors a Web site links to someone else's page, that person has stated quite explicitly that the linked page is worth a visit. Google's search engine counts these links as distributed votes of confidence in the quality of the page pointed to. Pages that are heavily linked-to count as more important votes of confidence. If a highly linked-to site links to a given page, that vote counts for more than the vote of a site that no one else thinks is worth visiting. The point to take home from looking at Google and Amazon is that corporations that have done immensely well at acquiring and retaining users have harnessed peer production to enable users to find things they want quickly and efficiently. -={accreditation:Google;communities:critical culture and self-reflection+1;culture:critically of (self-reflection)+1;filtering:Google;Google;relevance filtering:Google} +={accreditation:Google;communities:critical culture and self-reflection+1;culture:criticality of (self-reflection)+1;filtering:Google;Google;relevance filtering:Google} The most prominent example of a distributed project self-consciously devoted to peer production of relevance is the Open Directory Project. The site relies on more than sixty thousand volunteer editors to determine which links should be included in the directory. Acceptance as a volunteer requires application. Quality relies on a peer-review process based substantially on seniority as a volunteer and level of engagement with the site. The site is hosted and administered by Netscape, which pays for server space and a small number of employees to administer the site and set up the initial guidelines. Licensing is free and presumably adds value partly to America Online's (AOL's) and Netscape's commercial search engine/portal and partly through goodwill. Volunteers are not affiliated with Netscape and receive no compensation. They spend time selecting sites for inclusion in the directory (in small increments of perhaps fifteen minutes per site reviewed), producing the most comprehensive, highest-quality human-edited directory of the Web--at this point outshining the directory produced by the company that pioneered human edited directories of the Web: Yahoo!. ={accreditation:Open Directory Project (ODP);critical culture and self-reflection:Open Directory Project;filtering:Open Directory Project (ODP);ODP (Open Directory Project);Open Directory Project (ODP);relevance filtering:Open Directory Project (ODP);self-organization:Open Directory Project} @@ -1229,7 +1229,7 @@ Another dimension that is less well developed in the United States than it is in ={Gilmore, Dan;Pantic, Drazen;Rheingold, Howard;mobile phones;text messaging} 2~ NETWORKED INFORMATION ECONOMY MEETS THE PUBLIC SPHERE -={information economy:effects on public sphere+21;networked environment policy:effects on public sphere+21} +={information economy:effects on public sphere+21;networked information economy:effects on public sphere+21} The networked public sphere is not made of tools, but of social production practices that these tools enable. The primary effect of the Internet on the ,{[pg 220]}, public sphere in liberal societies relies on the information and cultural production activity of emerging nonmarket actors: individuals working alone and cooperatively with others, more formal associations like NGOs, and their feedback effect on the mainstream media itself. These enable the networked public sphere to moderate the two major concerns with commercial mass media as a platform for the public sphere: (1) the excessive power it gives its owners, and (2) its tendency, when owners do not dedicate their media to exert power, to foster an inert polity. More fundamentally, the social practices of information and discourse allow a very large number of actors to see themselves as potential contributors to public discourse and as potential actors in political arenas, rather than mostly passive recipients of mediated information who occasionally can vote their preferences. In this section, I offer two detailed stories that highlight different aspects of the effects of the networked information economy on the construction of the public sphere. The first story focuses on how the networked public sphere allows individuals to monitor and disrupt the use of mass-media power, as well as organize for political action. The second emphasizes in particular how the networked public sphere allows individuals and groups of intense political engagement to report, comment, and generally play the role traditionally assigned to the press in observing, analyzing, and creating political salience for matters of public interest. The case studies provide a context both for seeing how the networked public sphere responds to the core failings of the commercial, mass-media-dominated public sphere and for considering the critiques of the Internet as a platform for a liberal public sphere. @@ -1628,7 +1628,7 @@ Only two encyclopedias focus explicitly on Barbie's cultural meaning: Britannica The relative emphasis of Google and /{Wikipedia}/, on the one hand, and Overture, Yahoo!, and the commercial encyclopedias other than Britannica, on the other hand, is emblematic of a basic difference between markets and social conversations with regard to culture. If we focus on the role of culture as "common knowledge" or background knowledge, its relationship to the market--at least for theoretical economists--is exogenous. It can be taken as given and treated as "taste." In more practical business environments, culture is indeed a source of taste and demand, but it is not taken as exogenous. Culture, symbolism, and meaning, as they are tied with marketbased goods, become a major focus of advertising and of demand management. No one who has been exposed to the advertising campaigns of Coca-Cola, Nike, or Apple Computers, as well as practically to any one of a broad range of advertising campaigns over the past few decades, can fail to see that these are not primarily a communication about the material characteristics or qualities of the products or services sold by the advertisers. ,{[pg 290]}, They are about meaning. These campaigns try to invest the act of buying their products or services with a cultural meaning that they cultivate, manipulate, and try to generalize in the practices of the society in which they are advertising, precisely in order to shape taste. They offer an opportunity to generate rents, because the consumer has to have this company's shoe rather than that one, because that particular shoe makes the customer this kind of person rather than that kind--cool rather than stuffy, sophisticated rather than common. Neither the theoretical economists nor the marketing executives have any interest in rendering culture transparent or writable. Whether one treats culture as exogenous or as a domain for limiting the elasticity of demand for one's particular product, there is no impetus to make it easier for consumers to see through the cultural symbols, debate their significance, or make them their own. If there is business reason to do anything about culture, it is to try to shape the cultural meaning of an object or practice, in order to shape the demand for it, while keeping the role of culture hidden and assuring control over the careful cultural choreography of the symbols attached to the company. Indeed, in 1995, the U.S. Congress enacted a new kind of trademark law, the Federal Antidilution Act, which for the first time disconnects trademark protection from protecting consumers from confusion by knockoffs. The Antidilution Act of 1995 gives the owner of any famous mark--and only famous marks--protection from any use that dilutes the meaning that the brand owner has attached to its own mark. It can be entirely clear to consumers that a particular use does not come from the owner of the brand, and still, the owner has a right to prevent this use. While there is some constitutional free-speech protection for criticism, there is also a basic change in the understanding of trademark law-- from a consumer protection law intended to assure that consumers can rely on the consistency of goods marked in a certain way, to a property right in controlling the meaning of symbols a company has successfully cultivated so that they are, in fact, famous. This legal change marks a major shift in the understanding of the role of law in assigning control for cultural meaning generated by market actors. -={Antidilutation Act of 1995;branding:trademark dilutation;dilutation of trademaks;logical layer of institutional ecology:trademark dilutation;proprietary rights:trademark dilutation;trademark dilutation;information production, market-based:cultural change, transparency of+4;market-based information producers: cultural change, transparency of+4;nonmarket information producers:cultural change, transparency of+4} +={Antidilutation Act of 1995;branding:trademark dilutation;dilutation of trademarks;logical layer of institutional ecology:trademark dilutation;proprietary rights:trademark dilutation;trademark dilutation;information production, market-based:cultural change, transparency of+4;market-based information producers: cultural change, transparency of+4;nonmarket information producers:cultural change, transparency of+4} Unlike market production of culture, meaning making as a social, nonmarket practice has no similar systematic reason to accept meaning as it comes. Certainly, some social relations do. When girls play with dolls, collect them, or exhibit them, they are rarely engaged in reflection on the meaning of the dolls, just as fans of Scarlett O'Hara, of which a brief Internet search suggests there are many, are not usually engaged in critique of Gone with the ,{[pg 291]}, Wind as much as in replication and adoption of its romantic themes. Plainly, however, some conversations we have with each other are about who we are, how we came to be who we are, and whether we view the answers we find to these questions as attractive or not. In other words, some social interactions do have room for examining culture as well as inhabiting it, for considering background knowledge for what it is, rather than taking it as a given input into the shape of demand or using it as a medium for managing meaning and demand. People often engage in conversations with each other precisely to understand themselves in the world, their relationship to others, and what makes them like and unlike those others. One major domain in which this formation of self- and group identity occurs is the adoption or rejection of, and inquiry into, cultural symbols and sources of meaning that will make a group cohere or splinter; that will make people like or unlike each other. @@ -1680,10 +1680,10 @@ We can analyze the implications of the emergence of the networked information ec The opportunities that the network information economy offers, however, often run counter to the central policy drive of both the United States and the European Union in the international trade and intellectual property systems. These two major powers have systematically pushed for ever-stronger proprietary protection and increasing reliance on strong patents, copyrights, and similar exclusive rights as the core information policy for growth and development. Chapter 2 explains why such a policy is suspect from a purely economic perspective concerned with optimizing innovation. ,{[pg 303]}, A system that relies too heavily on proprietary approaches to information production is not, however, merely inefficient. It is unjust. Proprietary rights are designed to elicit signals of people's willingness and ability to pay. In the presence of extreme distribution differences like those that characterize the global economy, the market is a poor measure of comparative welfare. A system that signals what innovations are most desirable and rations access to these innovations based on ability, as well as willingness, to pay, overrepresents welfare gains of the wealthy and underrepresents welfare gains of the poor. Twenty thousand American teenagers can simply afford, and will be willing to pay, much more for acne medication than the more than a million Africans who die of malaria every year can afford to pay for a vaccine. A system that relies too heavily on proprietary models for managing information production and exchange is unjust because it is geared toward serving small welfare increases for people who can pay a lot for incremental improvements in welfare, and against providing large welfare increases for people who cannot pay for what they need. 2~ LIBERAL THEORIES OF JUSTICE AND THE NETWORKED INFORMATION ECONOMY -={human development and justice:liberal theories of+7;human welfare:liberal theories of justice+7;information economy:justice, liberal theories of+7;justice and human development:liberal theories of+7;liberal societies:theories of justice+7;networked environment policy:justice, liberal theories of+7;welfare:liberal theories of justice+7|see also justice and human development} +={human development and justice:liberal theories of+7;human welfare:liberal theories of justice+7;information economy:justice, liberal theories of+7;justice and human development:liberal theories of+7;liberal societies:theories of justice+7;welfare:liberal theories of justice+7|see also justice and human development} Liberal theories of justice can be categorized according to how they characterize the sources of inequality in terms of luck, responsibility, and structure. By luck, I mean reasons for the poverty of an individual that are beyond his or her control, and that are part of that individual's lot in life unaffected by his or her choices or actions. By responsibility, I mean causes for the poverty of an individual that can be traced back to his or her actions or choices. By structure, I mean causes for the inequality of an individual that are beyond his or her control, but are traceable to institutions, economic organizations, or social relations that form a society's transactional framework and constrain the behavior of the individual or undermine the efficacy of his or her efforts at self-help. -={background knowledge:see culture bad luck, justice and+2;DSL:see broadband networks dumb luck, justice and+2;luck, justice and+2;misfortune, justice and+2;organizational structure:justice and+2;structure of organizations:justice and+2} +={background knowledge:see culture bad luck, justice and+2;DSL:see broadband networks dumb luck, justice and+2;luck, justice and+2;misfortune, justice and+2;organization structure:justice and+2;structure of organizations:justice and+2} We can think of John Rawls's /{Theory of Justice}/ as based on a notion that the poorest people are the poorest because of dumb luck. His proposal for a systematic way of defending and limiting redistribution is the "difference principle." A society should organize its redistribution efforts in order to make those who are least well-off as well-off as they can be. The theory of desert is that, because any of us could in principle be the victim of this dumb luck, we would all have agreed, if none of us had known where we ,{[pg 304]}, would be on the distribution of bad luck, to minimize our exposure to really horrendous conditions. The practical implication is that while we might be bound to sacrifice some productivity to achieve redistribution, we cannot sacrifice too much. If we did that, we would most likely be hurting, rather than helping, the weakest and poorest. Libertarian theories of justice, most prominently represented by Robert Nozick's entitlement theory, on the other hand, tend to ignore bad luck or impoverishing structure. They focus solely on whether the particular holdings of a particular person at any given moment are unjustly obtained. If they are not, they may not justly be taken from the person who holds them. Explicitly, these theories ignore the poor. As a practical matter and by implication, they treat responsibility as the source of the success of the wealthy, and by negation, the plight of the poorest--leading them to be highly resistant to claims of redistribution. ={Rawls, John+1;Nozick, Robert;redistribution theory+1} @@ -2052,7 +2052,7 @@ Increased practical individual autonomy has been central to my claims throughout ={communities:virtual+9;virtual communities+9:see also social relations and norms} We are seeing two effects: first, and most robustly, we see a thickening of preexisting relations with friends, family, and neighbors, particularly with those who were not easily reachable in the pre-Internet-mediated environment. Parents, for example, use instant messages to communicate with their children who are in college. Friends who have moved away from each other are keeping in touch more than they did before they had e-mail, because email does not require them to coordinate a time to talk or to pay longdistance rates. However, this thickening of contacts seems to occur alongside a loosening of the hierarchical aspects of these relationships, as individuals weave their own web of supporting peer relations into the fabric of what might otherwise be stifling familial relationships. Second, we are beginning to see the emergence of greater scope for limited-purpose, loose relationships. These may not fit the ideal model of "virtual communities." They certainly do not fit a deep conception of "community" as a person's primary source of emotional context and support. They are nonetheless effective and meaningful to their participants. It appears that, as the digitally networked environment begins to displace mass media and telephones, its salient communications characteristics provide new dimensions to thicken existing social relations, while also providing new capabilities for looser and more fluid, but still meaningful social networks. A central aspect of this positive improvement in loose ties has been the technical-organizational shift from an information environment dominated by commercial mass media on a oneto-many model, which does not foster group interaction among viewers, to an information environment that both technically and as a matter of social practice enables user-centric, group-based active cooperation platforms of the kind that typify the networked information economy. This is not to say that the Internet necessarily effects all people, all social groups, and networks identically. The effects on different people in different settings and networks will likely vary, certainly in their magnitude. My purpose here, however, is ,{[pg 358]}, to respond to the concern that enhanced individual capabilities entail social fragmentation and alienation. The available data do not support that claim as a description of a broad social effect. -={communication:thickening of preexisting relations;displacement of real-world interactions;family relations, strengthening of;loose affiliations;neighborhood relations, strengthening of;networked public sphere:loose affiliations;norms (social):loose affiliations|thickening of preexisting relations;peer production:loose affiliations;preexisting relations, thickening of;public sphere:loose affiliations;regulation by social norms:loose affiliations|thickening of preexisting relations;scope of loose relationships;social relations and norms:loose affiliations|thickening of preexisting relations;supplantation of real-world interaction;thickening of preexisting relations} +={communication:thickening of preexisting relations;displacement of real-world interaction;family relations, strengthening of;loose affiliations;neighborhood relations, strengthening of;networked public sphere:loose affiliations;norms (social):loose affiliations|thickening of preexisting relations;peer production:loose affiliations;preexisting relations, thickening of;public sphere:loose affiliations;regulation by social norms:loose affiliations|thickening of preexisting relations;scope of loose relationships;social relations and norms:loose affiliations|thickening of preexisting relations;supplantation of real-world interaction;thickening of preexisting relations} 2~ FROM "VIRTUAL COMMUNITIES" TO FEAR OF DISINTEGRATION @@ -2081,7 +2081,7 @@ The concerns represented by these early studies of the effects of Internet use o ={Coleman, James;Granovetter, Mark;Putnum, Robert} There are, roughly speaking, two types of responses to these concerns. The first is empirical. In order for these concerns to be valid as applied to increasing use of Internet communications, it must be the case that Internet communications, with all of their inadequacies, come to supplant real-world human interactions, rather than simply to supplement them. Unless Internet connections actually displace direct, unmediated, human contact, there is no basis to think that using the Internet will lead to a decline in those nourishing connections we need psychologically, or in the useful connections we make socially, that are based on direct human contact with friends, family, and neighbors. The second response is theoretical. It challenges the notion that the socially embedded individual is a fixed entity with unchanging needs that are, or are not, fulfilled by changing social conditions and relations. Instead, it suggests that the "nature" of individuals changes over time, based on actual social practices and expectations. In this case, we are seeing a shift from individuals who depend on social relations that are dominated by locally embedded, thick, unmediated, given, and stable relations, into networked individuals--who are more dependent on their own combination of strong and weak ties, who switch networks, cross boundaries, and weave their own web of more or less instrumental, relatively fluid relationships. Manuel Castells calls this the "networked society,"~{ Manuel Castells, The Rise of Networked Society 2d ed. (Malden, MA: Blackwell Publishers, Inc., 2000). }~ Barry Wellman, "networked individualism."~{ Barry Wellman et al., "The Social Affordances of the Internet for Networked Individualism," Journal of Computer Mediated Communication 8, no. 3 (April 2003). }~ To simplify vastly, it is not that people cease to depend on others and their context for both psychological and social wellbeing and efficacy. It is that the kinds of connections that we come to rely on for these basic human needs change over time. Comparisons of current practices to the old ways of achieving the desiderata of community, and fears regarding the loss of community, are more a form of nostalgia than a diagnosis of present social malaise. ,{[pg 363]}, -={Castells, Manuel;Wellman, Barry;displacement of real-world interaction+5;family relations, strengthening of+5;loose affiliations;neighborhood relations, strengthening of+5;networked public sphere:loose affiliations;norms (social):loose affiliations;peer production:loose affiliations;public sphere:loose affiliations;regulations by social norms:loose affiliations;social relations and norms:loose affiliations;supplantation of real-world interaction+5;thickening of preexisting relations+5} +={Castells, Manuel;Wellman, Barry;displacement of real-world interaction+5;family relations, strengthening of+5;loose affiliations;neighborhood relations, strengthening of+5;networked public sphere:loose affiliations;norms (social):loose affiliations;peer production:loose affiliations;public sphere:loose affiliations;regulation by social norms:loose affiliations;social relations and norms:loose affiliations;supplantation of real-world interaction+5;thickening of preexisting relations+5} 3~ Users Increase Their Connections with Preexisting Relations ={e-mail:thickening of preexisting relations+4;social capital:thickening of preexisting relations+4} @@ -2198,7 +2198,7 @@ The first two parts of this book explained why the introduction of digital compu ={commercial model of communication:mapping, framework for+13;industrial model of communication:mapping, framework for+13;institutional ecology of digital environment:mapping, framework for+13;layers of institutional ecology+13;policy:mapping institutional ecology+13;policy layers+13;traditional model of communication:mapping, framework for+13} Two specific examples will illustrate the various levels at which law can operate to shape the use of information and its production and exchange. The first example builds on the story from chapter 7 of how embarrassing internal e-mails from Diebold, the electronic voting machine maker, were exposed by investigative journalism conducted on a nonmarket and peerproduction model. After students at Swarthmore College posted the files, Diebold made a demand under the DMCA that the college remove the materials or face suit for contributory copyright infringement. The students were therefore forced to remove the materials. However, in order keep the materials available, the students asked students at other institutions to mirror the files, and injected them into the eDonkey, BitTorrent, and FreeNet filesharing and publication networks. Ultimately, a court held that the unauthorized publication of files that were not intended for sale and carried such high public value was a fair use. This meant that the underlying publication of the files was not itself a violation, and therefore the Internet service provider was not liable for providing a conduit. However, the case was decided on September 30, 2004--long after the information would have been relevant ,{[pg 390]}, to the voting equipment certification process in California. What kept the information available for public review was not the ultimate vindication of the students' publication. It was the fact that the materials were kept in the public sphere even under threat of litigation. Recall also that at least some of the earlier set of Diebold files that were uncovered by the activist who had started the whole process in early 2003 were zipped, or perhaps encrypted in some form. Scoop, the Web site that published the revelation of the initial files, published--along with its challenge to the Internet community to scour the files and find holes in the system--links to locations in which utilities necessary for reading the files could be found. -={Diebold Elections Systems+3;electronic voting machines (case study)+3;networked public sphere:Diebold Election Systems case study+3;policy:Diebold Election Systems case study+3;public sphere:Diebold Election Systems case study+3;voting, electronic+3} +={Diebold Election Systems+3;electronic voting machines (case study)+3;networked public sphere:Diebold Election Systems case study+3;policy:Diebold Election Systems case study+3;public sphere:Diebold Election Systems case study+3;voting, electronic+3} There are four primary potential points of failure in this story that could have conspired to prevent the revelation of the Diebold files, or at least to suppress the peer-produced journalistic mode that made them available. First, if the service provider--the college, in this case--had been a sole provider with no alternative physical transmission systems, its decision to block the materials under threat of suit would have prevented publication of the materials throughout the relevant period. Second, the existence of peer-to-peer networks that overlay the physical networks and were used to distribute the materials made expunging them from the Internet practically impossible. There was no single point of storage that could be locked down. This made the prospect of threatening other universities futile. Third, those of the original files that were not in plain text were readable with software utilities that were freely available on the Internet, and to which Scoop pointed its readers. This made the files readable to many more critical eyes than they otherwise would have been. Fourth, and finally, the fact that access to the raw materials--the e-mails--was ultimately found to be privileged under the fair-use doctrine in copyright law allowed all the acts that had been performed in the preceding period under a shadow of legal liability to proceed in the light of legality. @@ -2218,7 +2218,7 @@ The remainder of this chapter provides a more or less detailed presentation of t A quick look at table 11.1 reveals that there is a diverse set of sources of openness. A few of these are legal. Mostly, they are based on technological and social practices, including resistance to legal and regulatory drives toward enclosure. Examples of policy interventions that support an open core common infrastructure are the FCC's increased permission to deploy open wireless networks and the various municipal broadband initiatives. The former is a regulatory intervention, but its form is largely removal of past prohibitions on an entire engineering approach to building wireless systems. Municipal efforts to produce open broadband networks are being resisted at the state legislation level, with statutes that remove the power to provision broadband from the home rule powers of municipalities. For the most part, the drive for openness is based on individual and voluntary cooperative action, not law. The social practices of openness take on a quasi-normative face when practiced in standard-setting bodies like the Internet Engineering Task Force (IETF) or the World Wide Web Consortium (W3C). However, none of these have the force of law. Legal devices also support openness when used in voluntaristic models like free software licensing and Creative Commons?type licensing. However, most often when law has intervened in its regulatory force, as opposed to its contractual-enablement force, it has done so almost entirely on the side of proprietary enclosure. Another characteristic of the social-economic-institutional struggle is an alliance between a large number of commercial actors and the social sharing culture. We see this in the way that wireless equipment manufacturers are selling into a market of users of WiFi and similar unlicensed wireless devices. We see this in the way that personal computer manufacturers are competing ,{[pg 395]}, over decreasing margins by producing the most general-purpose machines that would be most flexible for their users, rather than machines that would most effectively implement the interests of Hollywood and the recording industry. We see this in the way that service and equipment-based firms, like IBM and Hewlett-Packard (HP), support open-source and free software. The alliance between the diffuse users and the companies that are adapting their business models to serve them as users, instead of as passive consumers, affects the political economy of this institutional battle in favor of openness. On the other hand, security consciousness in the United States has led to some efforts to tip the balance in favor of closed proprietary systems, apparently because these are currently perceived as more secure, or at least more amenable to government control. While orthogonal in its political origins to the battle between proprietary and commons-based strategies for information production, this drive does tilt the field in favor of enclosure, at least at the time of this writing in 2005. -={commercial model of communication:security related policy;industrial model of communication:security-related policy;institutional ecology of digital environment:security-related policy;policy:security-related;security-related policy;traditional model of communication:security-related policy} +={commercial model of communication:security-related policy;industrial model of communication:security-related policy;institutional ecology of digital environment:security-related policy;policy:security-related;security-related policy;traditional model of communication:security-related policy} % paragraph end moved above table diff --git a/data/v2/samples/two_bits.christopher_kelty.sst b/data/v2/samples/two_bits.christopher_kelty.sst index 85efb46..1cff4f9 100644 --- a/data/v2/samples/two_bits.christopher_kelty.sst +++ b/data/v2/samples/two_bits.christopher_kelty.sst @@ -94,7 +94,7 @@ At first glance, the thread tying these projects together seems to be the Intern ={Internet+12:relation to Free Software;Free Software:relation to Internet;public sphere:theories of} Both the Internet and Free Software are historically specific, that is, not just any old new media or information technology. But the Internet is many, many specific things to many, many specific people. As one reviewer of an early manuscript version of this book noted, "For most people, the Internet is porn, stock quotes, Al Jazeera clips of executions, Skype, seeing pictures of the grandkids, porn, never having to buy another encyclopedia, MySpace, e-mail, online housing listings, Amazon, Googling potential romantic interests, etc. etc." It is impossible to explain all of these things; the meaning and significance of the proliferation of digital pornography is a very different concern than that of the fall of the print encyclopedia ,{[pg 5]}, and the rise of Wikipedia. Yet certain underlying practices relate these diverse phenomena to one another and help explain why they have occurred at this time and in this technical, legal, and social context. By looking carefully at Free Software and its modulations, I suggest, one can come to a better understanding of the changes affecting pornography, Wikipedia, stock quotes, and many other wonderful and terrifying things.~{ Wikipedia is perhaps the most widely known and generally familiar example of what this book is about. Even though it is not identified as such, it is in fact a Free Software project and a "modulation" of Free Software as I describe it here. The non-technically inclined reader might keep Wikipedia in mind as an example with which to follow the argument of this book. I will return to it explicitly in part 3. However, for better or for worse, there will be no discussion of pornography. }~ -={Wikipedia} +={Wikipedia (collaborative encyclopedia)} Two Bits has three parts. Part I of this book introduces the reader to the concept of recursive publics by exploring the lives, works, and discussions of an international community of geeks brought together by their shared interest in the Internet. Chapter 1 asks, in an ethnographic voice, "Why do geeks associate with one another?" The answer—told via the story of Napster in 2000 and the standards process at the heart of the Internet—is that they are making a recursive public. Chapter 2 explores the words and attitudes of geeks more closely, focusing on the strange stories they tell (about the Protestant Reformation, about their practical everyday polymathy, about progress and enlightenment), stories that make sense of contemporary political economy in sometimes surprising ways. Central to part I is an explication of the ways in which geeks argue about technology but also argue with and through it, by building, modifying, and maintaining the very software, networks, and legal tools within which and by which they associate with one another. It is meant to give the reader a kind of visceral sense of why certain arrangements of technology, organization, and law—specifically that of the Internet and Free Software—are so vitally important to these geeks. ={geeks;Napster;technology:as argument} @@ -209,7 +209,7 @@ The study of distributed phenomena does not necessarily imply the detailed, loca ={Weber, Max} It is in this sense that the ethnographic object of this study is not geeks and not any particular project or place or set of people, but Free Software and the Internet. Even more precisely, the ethnographic object of this study is "recursive publics"—except that this concept is also the work of the ethnography, not its preliminary object. I could not have identified "recursive publics" as the object of the ethnography at the outset, and this is nice proof that ethnographic work is a particular kind of epistemological encounter, an encounter that requires considerable conceptual work during and after the material labor of fieldwork, and throughout the material labor of writing and rewriting, in order to make sense of and reorient it into a question that will have looked deliberate and ,{[pg 21]}, answerable in hindsight. Ethnography of this sort requires a long-term commitment and an ability to see past the obvious surface of rapid transformation to a more obscure and slower temporality of cultural significance, yet still pose questions and refine debates about the near future.~{ Despite what might sound like a "shoot first, ask questions later" approach, the design of this project was in fact conducted according to specific methodologies. The most salient is actor-network theory: Latour, Science in Action; Law, "Technology and Heterogeneous Engineering"; Callon, "Some Elements of a Sociology of Translation"; Latour, Pandora’s Hope; Latour, Re-assembling the Social; Callon, Laws of the Markets; Law and Hassard, Actor Network Theory and After. Ironically, there have been no actor-network studies of networks, which is to say, of particular information and communication technologies such as the Internet. The confusion of the word network (as an analytical and methodological term) with that of network (as a particular configuration of wires, waves, software, and chips, or of people, roads, and buses, or of databases, names, and diseases) means that it is necessary to always distinguish this-network-here from any-network-whatsoever. My approach shares much with the ontological questions raised in works such as Law, Aircraft Stories; Mol, The Body Multiple; Cussins, "Ontological Choreography"; Charis Thompson, Making Parents; and Dumit, Picturing Personhood. }~ Historically speaking, the chapters of part II can be understood as a contribution to a history of scientific infrastructure—or perhaps to an understanding of large-scale, collective experimentation.~{ I understand a concern with scientific infrastructure to begin with Steve Shapin and Simon Schaffer in Leviathan and the Air Pump, but the genealogy is no doubt more complex. It includes Shapin, The Social History of Truth; Biagioli, Galileo, Courtier; Galison, How Experiments End and Image and Logic; Daston, Biographies of Scientific Objects; Johns, The Nature of the Book. A whole range of works explore the issue of scientific tools and infrastructure: Kohler, Lords of the Fly; Rheinberger, Towards a History of Epistemic Things; Landecker, Culturing Life; Keating and Cambrosio, Biomedical Platforms. Bruno Latour’s "What Rules of Method for the New Socio-scientific Experiments" provides one example of where science studies might go with these questions. Important texts on the subject of technical infrastructures include Walsh and Bayma, "Computer Networks and Scientific Work"; Bowker and Star, Sorting Things Out; Edwards, The ,{[pg 316]}, Closed World; Misa, Brey, and Feenberg, Modernity and Technology; Star and Ruhleder, "Steps Towards an Ecology of Infrastructure." }~ The Internet and Free Software are each an important practical transformation that will have effects on the practice of science and a kind of complex technical practice for which there are few existing models of study. -={actor network theory;Internet+1} +={Actor Network Theory;Internet+1} A methodological note about the peculiarity of my subject is also in order. The Attentive Reader will note that there are very few fragments of conventional ethnographic material (i.e., interviews or notes) transcribed herein. Where they do appear, they tend to be "publicly available"—which is to say, accessible via the Internet—and are cited as such, with as much detail as necessary to allow the reader to recover them. Conventional wisdom in both anthropology and history has it that what makes a study interesting, in part, is the work a researcher has put into gathering that which is not already available, that is, primary sources as opposed to secondary sources. In some cases I provide that primary access (specifically in chapters 2, 8, and 9), but in many others it is now literally impossible: nearly everything is archived. Discussions, fights, collaborations, talks, papers, software, articles, news stories, history, old software, old software manuals, reminiscences, notes, and drawings—it is all saved by someone, somewhere, and, more important, often made instantly available by those who collect it. The range of conversations and interactions that count as private (either in the sense of disappearing from written memory or of being accessible only to the parties involved) has shrunk demonstrably since about 1981. ={ethnographic data:availability of+5} @@ -293,7 +293,7 @@ _1 2. Boyle, "The Second Enclosure Movement and the Construction of the Public D 2~ From the Facts of Human Activity Boston, May 2003. Starbucks. Sean and Adrian are on their way to pick me up for dinner. I’ve already had too much coffee, so I sit at the window reading the paper. Eventually Adrian calls to find out where I am, I tell him, and he promises to show up in fifteen minutes. I get bored and go outside to wait, watch the traffic go by. More or less right on time (only post-dotcom is Adrian ever on time), Sean’s new blue VW Beetle rolls into view. Adrian jumps out of the passenger seat and into the back, and I get in. Sean has been driving for a little over a year. He seems confident, cautious, but meanders through the streets of Cambridge. We are destined for Winchester, a township on the Charles River, in order to go to an Indian restaurant that one of Sean’s friends has recommended. When I ask how they are doing, they say, "Good, good." Adrian offers, "Well, Sean’s better than he has been in two years." "Really?" I say, impressed. -={Doyle, Sean+6;Groper Adrian+6} +={Doyle, Sean+6;Gropper, Adrian+6} Sean says, "Well, happier than at least the last year. I, well, let me put it this way: forgive me father for I have sinned, I still have unclean thoughts about some of the upper management in the company, I occasionally think they are not doing things in the best interest of the company, and I see them as self-serving and sometimes wish them ill." In this rolling blue confessional Sean describes some of the people who I am familiar with whom he now tries very hard not to think about. I look at him and say, "Ten Hail Marys and ten Our Fathers, and you will be absolved, my child." Turning to Adrian, I ask, "And what about you?" Adrian continues the joke: "I, too, have sinned. I have reached the point where I can see absolutely nothing good coming of this company but that I can keep my investments in it long enough to pay for my children’s college tuition." I say, "You, my son, I cannot help." Sean says, "Well, funny thing about tainted money . . . there just taint enough of it." @@ -1106,7 +1106,7 @@ The absence of an economic or corporate mandate for Thompson’s and Ritchie’s ={AT&T+14;McIlroy, Douglas} UNIX was unique for many technical reasons, but also for a specific economic reason: it was never quite academic and never quite commercial. Martin Campbell-Kelly notes that UNIX was a "non-proprietary operating system of major significance."~{ Campbell-Kelly, From Airline Reservations to Sonic the Hedgehog, 143. }~ Kelly’s use of "non-proprietary" is not surprising, but it is incorrect. Although business-speak regularly opposed open to proprietary throughout the 1980s and early 1990s (and UNIX was definitely the former), Kelly’s slip marks clearly the confusion between software ownership and software distribution that permeates both popular and academic understandings. UNIX was indeed proprietary—it was copyrighted and wholly owned by Bell Labs and in turn by Western Electric ,{[pg 127]}, and AT&T—but it was not exactly commercialized or marketed by them. Instead, AT&T allowed individuals and corporations to install UNIX and to create UNIX-like derivatives for very low licensing fees. Until about 1982, UNIX was licensed to academics very widely for a very small sum: usually royalty-free with a minimal service charge (from about $150 to $800).~{ Ritchie’s Web site contains a copy of a 1974 license (http://cm.bell-labs.com/cm/cs/who/dmr/licenses.html) and a series of ads that exemplify the uneasy positioning of UNIX as a commercial product (http://cm.bell-labs.com/cm/cs/who/dmr/unixad.html). According to Don Libes and Sandy Ressler, "The original licenses were source licenses. . . . [C]ommercial institutions paid fees on the order of $20,000. If you owned more than one machine, you had to buy binary licenses for every additional machine [i.e., you were not allowed to copy the source and install it] you wanted to install UNIX on. They were fairly pricey at $8000, considering you couldn’t resell them. On the other hand, educational institutions could buy source licenses for several hundred dollars—just enough to cover Bell Labs’ administrative overhead and the cost of the tapes" (Life with UNIX, 20-21). }~ The conditions of this license allowed researchers to do what they liked with the software so long as they kept it secret: they could not distribute or use it outside of their university labs (or use it to create any commercial product or process), nor publish any part of it. As a result, throughout the 1970s UNIX was developed both by Thompson and Ritchie inside Bell Labs and by users around the world in a relatively informal manner. Bell Labs followed such a liberal policy both because it was one of a small handful of industry-academic research and development centers and because AT&T was a government monopoly that provided phone service to the country and was therefore forbidden to directly enter the computer software market.~{ According to Salus, this licensing practice was also a direct result of Judge Thomas Meaney’s 1956 antitrust consent decree which required AT&T to reveal and to license its patents for nominal fees (A Quarter Century of UNIX, 56); see also Brock, The Second Information Revolution, 116-20. }~ -={AT&T:Bell Labratories+13;licensing, of UNIX+6;proprietary systems: open vs.;monopoly} +={AT&T:Bell Laboratories+13;licensing, of UNIX+6;proprietary systems: open vs.;monopoly} Being on the border of business and academia meant that UNIX was, on the one hand, shielded from the demands of management and markets, allowing it to achieve the conceptual integrity that made it so appealing to designers and academics. On the other, it also meant that AT&T treated it as a potential product in the emerging software industry, which included new legal questions from a changing intellectual-property regime, novel forms of marketing and distribution, and new methods of developing, supporting, and distributing software. @@ -1160,7 +1160,7 @@ Unfortunately, Commentary was also legally restricted in its distribution. AT&T ={trade secret law+1} Thus, these generations of computer-science students and academics shared a secret—a trade secret become open secret. Every student who learned the essentials of the UNIX operating system from a photocopy of Lions’s commentary, also learned about AT&T’s attempt to control its legal distribution on the front cover of their textbook. The parallel development of photocopying has a nice resonance here; together with home cassette taping of music and the introduction of the video-cassette recorder, photocopying helped drive the changes to copyright law adopted in 1976. -={copyright:changes in} +={copyright:changes in 1976} Thirty years later, and long after the source code in it had been completely replaced, Lions’s Commentary is still widely admired by geeks. Even though Free Software has come full circle in providing students with an actual operating system that can be legally studied, taught, copied, and implemented, the kind of "literary criticism" that Lions’s work represents is still extremely rare; even reading obsolete code with clear commentary is one of the few ways to truly understand the design elements and clever implementations that made the UNIX operating system so different from its predecessors and even many of its successors, few, if any of which have been so successfully ported to the minds of so many students. ={design+2} @@ -1241,7 +1241,7 @@ The open-systems story is also a story of the blind spot of open systems—in th ={intellectual property;interoperability+21;openness (component of Free Software):intellectual property and} Standardization was at the heart of the contest, but by whom and by what means was never resolved. The dream of open systems, pursued in an entirely unregulated industry, resulted in a complicated experiment in novel forms of standardization and cooperation. The creation of a "standard" operating system based on UNIX is the story of a failure, a kind of "figuring out" gone haywire, which resulted in huge consortia of computer manufacturers attempting to work together and compete with each other at the same time. Meanwhile, the successful creation of a "standard" networking protocol—known as the Open Systems Interconnection Reference Model (OSI)—is a story of failure that hides a larger success; OSI was eclipsed in the same period by the rapid and ad hoc adoption of the Transmission Control Protocol/Internet Protocol (TCP/IP), which used a radically different standardization process and which succeeded for a number of surprising reasons, allowing the Internet ,{[pg 145]}, to take the form it did in the 1990s and ultimately exemplifying the moral-technical imaginary of a recursive public—and one at the heart of the practices of Free Software. -={figuring out;Open Systems Interconnection (OSI), as reference model;Openness (component of Free Software):standardization and;protocols:Open Systems Interconnection (OSI)|TCP/IP;standards organizations;TCP/IP (Transmission Control Protocol/Internet Protocol)} +={figuring out;Open Systems Interconnection (OSI):as reference model;Openness (component of Free Software):standardization and;protocols:Open Systems Interconnection (OSI)|TCP/IP;standards organizations;TCP/IP (Transmission Control Protocol/Internet Protocol)} The conceiving of openness, which is the central plot of these two stories, has become an essential component of the contemporary practice and power of Free Software. These early battles created a kind of widespread readiness for Free Software in the 1990s, a recognition of Free Software as a removal of open systems’ blind spot, as much as an exploitation of its power. The geek ideal of openness and a moral-technical order (the one that made Napster so significant an event) was forged in the era of open systems; without this concrete historical conception of how to maintain openness in technical and moral terms, the recursive public of geeks would be just another hierarchical closed organization—a corporation manqué—and not an independent public serving as a check on the kinds of destructive power that dominated the open-systems contest. ={Napster} @@ -1427,7 +1427,7 @@ The growth of Free Software in the 1980s and 1990s depended on openness as a con ={Open Systems:networks and+28} The struggle to standardize UNIX as a platform for open systems was not the only open-systems struggle; alongside the UNIX wars, another "religious war" was raging. The attempt to standardize networks—in particular, protocols for the inter-networking of multiple, diverse, and autonomous networks of computers—was also a key aspect of the open-systems story of the 1980s.~{ The distinction between a protocol, an implementation and a standard is important: Protocols are descriptions of the precise terms by which two computers can communicate (i.e., a dictionary and a handbook for communicating). An implementation is the creation of software that uses a protocol (i.e., actually does the communicating; thus two implementations using the same protocol should be able to share data. A standard defines which protocol should be used by which computers, for what purposes. It may or may not define the protocol, but will set limits on changes to that protocol. }~ The war ,{[pg 167]}, between the TCP/IP and OSI was also a story of failure and surprising success: the story of a successful standard with international approval (the OSI protocols) eclipsed by the experimental, military-funded TCP/IP, which exemplified an alternative and unusual standards process. The moral-technical orders expressed by OSI and TCP/IP are, like that of UNIX, on the border between government, university, and industry; they represent conflicting social imaginaries in which power and legitimacy are organized differently and, as a result, expressed differently in the technology. -={moral and technical order;Networks:protools for+3;Open Systems Interconnection (OSI), as reference model+27;protocols:Open Systems Interconnection (OSI)+27|TCP/IP;TCP/IP (Transmission Control Protocol/Internet Protocol)+27;religious wars+3;social imaginary;standards process+3} +={moral and technical order;Networks:protools for+3;Open Systems Interconnection (OSI):as reference model+27;protocols:Open Systems Interconnection (OSI)+27|TCP/IP;TCP/IP (Transmission Control Protocol/Internet Protocol)+27;religious wars+3;social imaginary;standards processes+3} OSI and TCP/IP started with different goals: OSI was intended to satisfy everyone, to be the complete and comprehensive model against which all competing implementations would be validated; TCP/IP, by contrast, emphasized the easy and robust interconnection of diverse networks. TCP/IP is a protocol developed by bootstrapping between standard and implementation, a mode exemplified by the Requests for Comments system that developed alongside them as part of the Arpanet project. OSI was a "model" or reference standard developed by internationally respected standards organizations. ={Arpanet (network)+18;Request for Comments (RFC)} @@ -1453,7 +1453,7 @@ One important feature united almost all of these experiments: the networks of th ={antitrust} TCP/IP and OSI have become emblematic of the split between the worlds of telecommunications and computing; the metaphors of religious wars or of blood feuds and cold wars were common.~{ Drake, "The Internet Religious War." }~ A particularly arch account from this period is Carl Malamud’s Exploring the Internet: A Technical Travelogue, which documents Malamud’s (physical) visits to Internet sites around the globe, discussions (and beer) with networking researchers on technical details of the networks they have created, and his own typically geeky, occasionally offensive takes on cultural difference.~{ Malamud, Exploring the Internet; see also Michael M. J. Fischer, "Worlding Cyberspace." }~ A subtheme of the story is the religious war between Geneva (in particular the ITU) and the Internet: Malamud tells the story of asking the ITU to release its 19,000-page "blue book" of standards on the Internet, to facilitate its adoption and spread. -={Malmud, Carl+1;standards process+4} +={Malmud, Carl+1;standards processes+4} The resistance of the ITU and Malamud’s heroic if quixotic attempts are a parable of the moral-technical imaginaries of openness—and indeed, his story draws specifically on the usable past of Giordano Bruno.~{ The usable past of Giordano Bruno is invoked by Malamud to signal the heretical nature of his own commitment to openly publishing standards that ISO was opposed to releasing. Bruno’s fate at the hands of the Roman Inquisition hinged in some part on his acceptance of the Copernican cosmology, so he has been, like Galileo, a natural figure for revolutionary claims during the 1990s. }~ The "bruno" project demonstrates the gulf that exists between two models of legitimacy—those of ISO and the ITU—in which standards represent the legal and legitimate consensus of a regulated industry, approved by member nations, paid for and enforced by governments, and implemented and adhered to by corporations. ={Bruno, Giordano;Usable pasts;International Organization for Standardization (ISO)+3} @@ -1472,10 +1472,10 @@ Until the mid-1980s, the TCP/IP protocols were resolutely research-oriented, and ={Cerf, Vinton+2;Kahn, Robert;TCP/IP (Transmission Control Protocol/Internet Protocol):goals of+2} The explicit goal of TCP/IP was thus to share computer resources, not necessarily to connect two individuals or firms together, or to create a competitive market in networks or networking software. Sharing between different kinds of networks implied allowing the different networks to develop autonomously (as their creators and maintainers saw best), but without sacrificing the ability to continue sharing. Years later, David Clark, chief Internet engineer for several years in the 1980s, gave a much more explicit explanation of the goals that led to the TCP/IP protocols. In particular, he suggested that the main overarching goal was not just to share resources but "to develop an effective technique for multiplexed utilization of existing interconnected networks," and he more explicitly stated the issue of control that faced the designers: "Networks represent administrative boundaries of control, and it was an ambition of this project to come to grips with the problem of integrating a number ,{[pg 173]}, of separately administrated entities into a common utility."~{ Clark, "The Design Philosophy of the DARPA Internet Protocols," 54-55. }~ By placing the goal of expandability first, the TCP/IP protocols were designed with a specific kind of simplicity in mind: the test of the protocols’ success was simply the ability to connect. -={Clark,David} +={Clark, David} By setting different goals, TCP/IP and OSI thus differed in terms of technical details; but they also differed in terms of their context and legitimacy, one being a product of international-standards bodies, the other of military-funded research experiments. The technical and organizational differences imply different processes for standardization, and it is the peculiar nature of the so-called Requests for Comments (RFC) process that gave TCP/IP one of its most distinctive features. The RFC system is widely recognized as a unique and serendipitous outcome of the research process of Arpanet.~{ RFCs are archived in many places, but the official site is RFC Editor, http://www.rfc-editor.org/. }~ In a thirty-year retrospective (published, naturally, as an RFC: RFC 2555), Vint Cerf says, "Hiding in the history of the RFCs is the history of human institutions for achieving cooperative work." He goes on to describe their evolution over the years: "When the RFCs were first produced, they had an almost 19th century character to them—letters exchanged in public debating the merits of various design choices for protocols in the ARPANET. As email and bulletin boards emerged from the fertile fabric of the network, the far-flung participants in this historic dialog began to make increasing use of the online medium to carry out the discussion—reducing the need for documenting the debate in the RFCs and, in some respects, leaving historians somewhat impoverished in the process. RFCs slowly became conclusions rather than debates."~{ RFC Editor, RFC 2555, 6. }~ -={standards process;Request for Comments (RFC)+2} +={standards processes;Request for Comments (RFC)+2} Increasingly, they also became part of a system of discussion and implementation in which participants created working software as part of an experiment in developing the standard, after which there was more discussion, then perhaps more implementation, and finally, a standard. The RFC process was a way to condense the process of standardization and validation into implementation; which is to say, the proof of open systems was in the successful connection of diverse networks, and the creation of a standard became a kind of ex post facto rubber-stamping of this demonstration. Any further improvement of the standard hinged on an improvement on the standard implementation because the standards that resulted were freely and widely available: "A user could request an RFC by email from his host computer and have it automatically delivered to his mailbox. . . . RFCs were also shared freely with official standards ,{[pg 174]}, bodies, manufacturers and vendors, other working groups, and universities. None of the RFCs were ever restricted or classified. This was no mean feat when you consider that they were being funded by DoD during the height of the Cold War."~{ Ibid., 11. }~ ={Software:implementation of;standards:implementation+9|validation of;Secrecy+1} @@ -1554,7 +1554,7 @@ Stallman’s GNU General Public License "hacks" the federal copyright law, as is ={Copyleft licenses (component of Free Software):as hack of copyright law+1;Copyright+1} Like all software since the 1980 copyright amendments, Free Software is copyrightable—and what’s more, automatically copyrighted as it is written (there is no longer any requirement to register). Copyright law grants the author (or the employer of the author) a number of strong rights over the dispensation of what has been written: rights to copy, distribute, and change the work.~{ Copyright Act of 1976, Pub. L. No. 94-553, 90 Stat. 2541, enacted 19 October 1976; and Copyright Amendments, Pub. L. No. 96-517, 94 Stat. 3015, 3028 (amending §101 and §117, title 17, United States Code, regarding computer programs), enacted 12 December 1980. All amendments since 1976 are listed at http://www.copyright.gov/title17/92preface.html. }~ Free Software’s hack is to immediately make use of these rights in order to abrogate the rights the programmer has been given, thus granting all subsequent licensees rights to copy, distribute, modify, and use the copyrighted software. Some licenses, like the GPL, add the further restriction that every licensee must offer the same terms to any subsequent licensee, others make no such restriction on subsequent uses. Thus, while statutory law suggests that individuals need strong rights and grants them, Free Software licenses effectively annul them in favor of other activities, such as sharing, porting, and forking software. It is for this reason that they have earned the name "copyleft."~{ The history of the copyright and software is discussed in Litman, Digital Copyright; Cohen et al., Copyright in a Global Information Economy; and Merges, Menell, and Lemley, Intellectual Property in the New Technological Age. }~ -={Copyright:changes in|rights granted by} +={Copyright:changes in 1976|rights granted by} This is a convenient ex post facto description, however. Neither Stallman nor anyone else started out with the intention of hacking copyright law. The hack of the Free Software licenses was a response to a complicated controversy over a very important invention, a tool that in turn enabled an invention called EMACS. The story of the controversy is well-known among hackers and geeks, but not often told, and not in any rich detail, outside of these small circles.~{ See Wayner, Free for All; Moody, Rebel Code; and Williams, Free as in Freedom. Although this story could be told simply by interviewing Stallman and James Gosling, both of whom are still alive and active in the software world, I have chosen to tell it through a detailed analysis of the Usenet and Arpanet archives of the controversy. The trade-off is between a kind of incomplete, fly-on-the-wall access to a moment in history and the likely revisionist retellings of those who lived through it. All of the messages referenced here are cited by their "Message-ID," which should allow anyone interested to access the original messages through Google Groups (http://groups.google.com). }~ @@ -1840,10 +1840,10 @@ The final component of Free Software is coordination. For many participants and ={Free Software:open source vs.;Open Source:Free Software vs.;peer production;practices:five components of Free Software+2;Source Code Management tools (SCMs)} Coordination is important because it collapses and resolves the distinction between technical and social forms into a meaningful ,{[pg 211]}, whole for participants. On the one hand, there is the coordination and management of people; on the other, there is the coordination of source code, patches, fixes, bug reports, versions, and distributions—but together there is a meaningful technosocial practice of managing, decision-making, and accounting that leads to the collaborative production of complex software and networks. Such coordination would be unexceptional, essentially mimicking long-familiar corporate practices of engineering, except for one key fact: it has no goals. Coordination in Free Software privileges adaptability over planning. This involves more than simply allowing any kind of modification; the structure of Free Software coordination actually gives precedence to a generalized openness to change, rather than to the following of shared plans, goals, or ideals dictated or controlled by a hierarchy of individuals.~{ On the distinction between adaptability and adaptation, see Federico Iannacci, "The Linux Managing Model," http://opensource.mit.edu/papers/iannacci2.pdf. Matt Ratto characterizes the activity of Linux-kernel developers as a "culture of re-working" and a "design for re-design," and captures the exquisite details of such a practice both in coding and in the discussion between developers, an activity he dubs the "pressure of openness" that "results as a contradiction between the need to maintain productive collaborative activity and the simultaneous need to remain open to new development directions" ("The Pressure of Openness," 112-38). }~ -={adaptability:planning vs.+1|as a form of critique+1|adaptation vs.;coordination (component of Free Software):individual virtuosity vs. hierarchical planning+2;critique, Free Software+1;goals, lack of in Free Software+1;hackers:curiosity and virtuosity of+1;hierarchy, in coordination+5;planning+1} +={adaptability:planning vs.+1|as a form of critique+1|adaptation vs.;coordination (component of Free Software):individual virtuosity vs. hierarchical planning+2;critique, Free Software as+1;goals, lack of in Free Software+1;hackers:curiosity and virtuosity of+1;hierarchy, in coordination+5;planning+1} Adaptability does not mean randomness or anarchy, however; it is a very specific way of resolving the tension between the individual curiosity and virtuosity of hackers, and the collective coordination necessary to create and use complex software and networks. No man is an island, but no archipelago is a nation, so to speak. Adaptability preserves the "joy" and "fun" of programming without sacrificing the careful engineering of a stable product. Linux and Apache should be understood as the results of this kind of coordination: experiments with adaptability that have worked, to the surprise of many who have insisted that complexity requires planning and hierarchy. Goals and planning are the province of governance—the practice of goal-setting, orientation, and definition of control—but adaptability is the province of critique, and this is why Free Software is a recursive public: it stands outside power and offers powerful criticism in the form of working alternatives. It is not the domain of the new—after all Linux is just a rewrite of UNIX—but the domain of critical and responsive public direction of a collective undertaking. -={Linux (Free Software project)+8;novelty, of free software;recursive public+1} +={Linux (Free Software project)+8;novelty, of Free Software;recursive public+1} Linux and Apache are more than pieces of software; they are organizations of an unfamiliar kind. My claim that they are "recursive publics" is useful insofar as it gives a name to a practice that is neither corporate nor academic, neither profit nor nonprofit, neither governmental nor nongovernmental. The concept of recursive public includes, within the spectrum of political activity, the creation, modification, and maintenance of software, networks, and legal documents. While a "public" in most theories is a body of ,{[pg 212]}, people and a discourse that give expressive form to some concern, "recursive public" is meant to suggest that geeks not only give expressive form to some set of concerns (e.g., that software should be free or that intellectual property rights are too expansive) but also give concrete infrastructural form to the means of expression itself. Linux and Apache are tools for creating networks by which expression of new kinds can be guaranteed and by which further infrastructural experimentation can be pursued. For geeks, hacking and programming are variants of free speech and freedom of assembly. ={public sphere:theories of;Apache (Free Software project)+4;experimentation;infrastructure} @@ -2069,7 +2069,7 @@ Both the Apache project and the Linux kernel project use SCMs. In the case of Ap While SCMs are in general good for managing conflicting changes, they can do so only up to a point. To allow anyone to commit a change, however, could result in a chaotic mess, just as difficult to disentangle as it would be without an SCM. In practice, therefore, most projects designate a handful of people as having the right to "commit" changes. The Apache project retained its voting scheme, for instance, but it became a way of voting for "committers" instead for patches themselves. Trusted committers—those with the mysterious "good taste," or technical intuition—became the core members of the group. The Linux kernel has also struggled with various issues surrounding SCMs and the management of responsibility they imply. The story of the so-called VGER tree and the creation of a new SCM called Bitkeeper is exemplary in this respect.~{ See Steven Weber, The Success of Open Source, 117-19; Moody, Rebel Code, 172-78. See also Shaikh and Cornford, "Version Management Tools." }~ By 1997, Linux developers had begun to use cvs to manage changes to the source code, though not without resistance. Torvalds was still in charge of the changes to the official stable tree, but as other "lieutenants" came on board, the complexity of the changes to the kernel grew. One such lieutenant was Dave Miller, who maintained a "mirror" of the stable Linux kernel tree, the VGER tree, on a server at Rutgers. In September 1998 a fight broke out among Linux kernel developers over two related issues: one, the fact that Torvalds was failing to incorporate (patch) contributions that had been forwarded to him by various people, including his lieutenants; and two, as a result, the VGER cvs repository was no longer in synch with the stable tree maintained by Torvalds. Two different versions of Linux threatened to emerge. -={Miller, Dave;Source Code Management tools (SCMs):see also Bitkeeper;Concurrent Versioning System (cvs):Linux and;Linux (Free Software project):VGER tree and+2;Bitkeeper (Source Code Management software)+12;Torvalds, Linux:in bitkeeper controversy+12} +={Miller, Dave;Source Code Management tools (SCMs):see also Bitkeeper;Concurrent Versioning System (cvs):Linux and;Linux (Free Software project):VGER tree and+2;Bitkeeper (Source Code Management software)+12;Torvalds, Linus:in bitkeeper controversy+12} A great deal of yelling ensued, as nicely captured in Moody’s Rebel Code, culminating in the famous phrase, uttered by Larry McVoy: "Linus does not scale." The meaning of this phrase is that the ability of Linux to grow into an ever larger project with increasing complexity, one which can handle myriad uses and functions (to "scale" up), is constrained by the fact that there is only one Linus Torvalds. By all accounts, Linus was and is excellent at what he does—but there is only one Linus. The danger of this situation is the danger of a fork. A fork would mean one or more new versions would proliferate under new leadership, a situation much like ,{[pg 233]}, the spread of UNIX. Both the licenses and the SCMs are designed to facilitate this, but only as a last resort. Forking also implies dilution and confusion—competing versions of the same thing and potentially unmanageable incompatibilities. ={McVoy, Larry+11;Moody, Glyn;forking:in Linux+1} @@ -2172,7 +2172,7 @@ In part III I confront this question directly. Indeed, it was this question that ={cultural significance;recursive public+3;Free Software:components of+1} Connexions modulates all of the components except that of the movement (there is, as of yet, no real "Free Textbook" movement, but the "Open Access" movement is a close second cousin).~{ In January 2005, when I first wrote this analysis, this was true. By April 2006, the Hewlett Foundation had convened the Open Educational Resources "movement" as something that would transform the production and circulation of textbooks like those created by Connexions. Indeed, in Rich Baraniuk’s report for Hewlett, the first paragraph reads: "A grassroots movement is on the verge of sweeping through the academic world. The open education movement is based on a set of intuitions that are shared by a remarkably wide range of academics: that knowledge should be free and open to use and re-use; that collaboration should be easier, not harder; that people should receive credit and kudos for contributing to education and research; and that concepts and ideas are linked in unusual and surprising ways and not the simple linear forms that textbooks present. Open education promises to fundamentally change the way authors, instructors, and students interact worldwide" (Baraniuk and King, "Connexions"). (In a nice confirmation of just how embedded participation can become in anthropology, Baraniuk cribbed the second sentence from something I had written two years earlier as part of a description of what I thought Connexions hoped to achieve.) The "movement" as such still does not quite exist, but the momentum for it is clearly part of the actions that Hewlett hopes to achieve. }~ Perhaps the most complex modulation concerns coordination—changes to the practice of coordination and collaboration in academic-textbook creation in particular, and more generally to the nature of collaboration and coordination of knowledge in science and scholarship generally. -={coordination (components of Free Software);movement (component of Free Software)+2} +={coordination (component of Free Software);movement (component of Free Software)+2} Connexions emerged out of Free Software, and not, as one might expect, out of education, textbook writing, distance education, or any of those areas that are topically connected to pedagogy. That is to say, the people involved did not come to their project by attempting to deal with a problem salient to education and teaching as much as they did so through the problems raised by Free Software and the question of how those problems apply to university textbooks. Similarly, a second project, Creative Commons, also emerged out of a direct engagement with and exploration of Free Software, and not out of any legal movement or scholarly commitment to the critique of intellectual-property law or, more important, out of any desire to transform the entertainment industry. Both projects are resolutely committed to experimenting with the given practices of Free Software—to testing their limits and changing them where they can—and this is what makes them vibrant, risky, and potentially illuminating as cases of a recursive public. ={affinity (of geeks);commons+1;Creative Commons+1;pedagogy;recursive public:examples of+1} @@ -2194,7 +2194,7 @@ Around 1998 or 1999, Rich decided that it was time for him to write a textbook o ={Burris, C. Sidney;Connexions project:textbooks and+4;Rice University} At about the same time as his idea for a textbook, Rich’s research group was switching over to Linux, and Rich was first learning about Open Source and the emergence of a fully free operating system created entirely by volunteers. It isn’t clear what Rich’s aha! moment was, other than simply when he came to an understanding that such a thing as Linux was actually possible. Nonetheless, at some point, Rich had the idea that his textbook could be an Open Source textbook, that is, a textbook created not just by him, but by DSP researchers all over the world, and made available to everyone to make use of and modify and improve as they saw fit, just like Linux. Together with Brent Hendricks, Yan David Erlich, ,{[pg 249]}, and Ross Reedstrom, all of whom, as geeks, had a deep familiarity with the history and practices of Free and Open Source Software, Rich started to conceptualize a system; they started to think about modulations of different components of Free and Open Source Software. The idea of a Free Software textbook repository slowly took shape. -={Linux (Free Software project);Open Source:inspiration for Connexions+27;Reedstorm, Ross} +={Linux (Free Software project);Open Source:inspiration for Connexions+27;Reedstrom, Ross} Thus, Connexions: an "open content repository of high-quality educational materials." These "textbooks" very quickly evolved into something else: "modules" of content, something that has never been sharply defined, but which corresponds more or less to a small chunk of teachable information, like two or three pages in a textbook. Such modules are much easier to conceive of in sciences like mathematics or biology, in which textbooks are often multiauthored collections, finely divided into short chapters with diagrams, exercises, theorems, or programs. Modules lend themselves much less well to a model of humanities or social-science scholarship based in reading texts, discussion, critique, and comparison—and this bias is a clear reflection of what Brent, Ross, and Rich knew best in terms of teaching and writing. Indeed, the project’s frequent recourse to the image of an assembly-line model of knowledge production often confirms the worst fears of humanists and educators when they first encounter Connexions. The image suggests that knowledge comes in prepackaged and colorfully branded tidbits for the delectation of undergrads, rather than characterizing knowledge as a state of being or as a process. ={Connexions project:model of learning in|modules in+1} @@ -2210,7 +2210,7 @@ Free Software—and, in particular, Open Source in the guise of "self-organizing ={Connexions project:relationship to education+2;distance learning+2} Thus, Rich styled Connexions as more than just a factory of knowledge—it would be a community or culture developing richly associative and novel kinds of textbooks—and as much more than just distance education. Indeed, Connexions was not the only such project busy differentiating itself from the perceived dangers of distance education. In April 2001 MIT had announced that it would make the content of all of its courses available for free online in a project strategically called OpenCourseWare (OCW). Such news could only bring attention to MIT, which explicitly positioned the announcement as a kind of final death blow to the idea of distance education, by saying that what students pay $35,000 and up for per year is not "knowledge"—which is free—but the experience of being at MIT. The announcement created pure profit from the perspective of MIT’s reputation as a generator and disseminator of scientific knowledge, but the project did not emerge directly out of an interest in mimicking the success of Open Source. That angle was ,{[pg 252]}, provided ultimately by the computer-science professor Hal Abelson, whose deep understanding of the history and growth of Free Software came from his direct involvement in it as a long-standing member of the computer-science community at MIT. OCW emerged most proximately from the strange result of a committee report, commissioned by the provost, on how MIT should position itself in the "distance/e-learning" field. The surprising response: don’t do it, give the content away and add value to the campus teaching and research experience instead.~{ "Provost Announces Formation of Council on Educational Technology," MIT Tech Talk, 29 September 1999, http://web.mit.edu/newsoffice/1999/council-0929.html. }~ -={Abelson, Hal;Massachusetts Institute of Technology (MIT):open courseware and+2;Open CourseWare (OCW)+2;Connexions poject:Open CourseWare+2} +={Abelson, Hal;Massachusetts Institute of Technology (MIT):open courseware and+2;Open CourseWare (OCW)+2;Connexions project:Open CourseWare+2} OCW, Connexions, and distance learning, therefore, while all ostensibly interested in combining education with the networks and software, emerged out of different demands and different places. While the profit-driven demand of distance learning fueled many attempts around the country, it stalled in the case of OCW, largely because the final MIT Council on Educational Technology report that recommended OCW was issued at the same time as the first plunge in the stock market (April 2000). Such issues were not a core factor in the development of Connexions, which is not to say that the problems of funding and sustainability have not always been important concerns, only that genesis of the project was not at the administrative level or due to concerns about distance education. For Rich, Brent, and Ross the core commitment was to openness and to the success of Open Source as an experiment with massive, distributed, Internet-based, collaborative production of software—their commitment to this has been, from the beginning, completely and adamantly unwavering. Neverthless, the project has involved modulations of the core features of Free Software. Such modulations depend, to a certain extent, on being a project that emerges out of the ideas and practices of Free Software, rather than, as in the case of OCW, one founded as a result of conflicting goals (profit and academic freedom) and resulting in a strategic use of public relations to increase the symbolic power of the university over its fiscal growth. ={Reedstrom, Ross} @@ -2278,7 +2278,7 @@ Creative Commons provided more than licenses, though. It was part of a social im ={moral and technical order;social imaginary} Creative Commons was thus a back-door approach: if the laws could not be changed, then people should be given the tools they needed to work around those laws. Understanding how Creative Commons was conceived requires seeing it as a modulation of both the notion of "source code" and the modulation of "copyright licenses." But the modulations take place in that context of a changing legal system that was so unfamiliar to Stallman and his EMACS users, a legal system responding to new forms of software, networks, and devices. For instance, the changes to the Copyright Act of 1976 created an unintended effect that Creative Commons would ultimately seize on. By eliminating the requirement to register copyrighted works (essentially granting copyright as soon as the ,{[pg 261]}, work is "fixed in a tangible medium"), the copyright law created a situation wherein there was no explicit way in which a work could be intentionally placed in the public domain. Practically speaking an author could declare that a work was in the public domain, but legally speaking the risk would be borne entirely by the person who sought to make use of that work: to copy it, transform it, sell it, and so on. With the explosion of interest in the Internet, the problem ramified exponentially; it became impossible to know whether someone who had placed a text, an image, a song, or a video online intended for others to make use of it—even if the author explicitly declared it "in the public domain." Creative Commons licenses were thus conceived and rhetorically positioned as tools for making explicit exactly what uses could be made of a specific work. They protected the rights of people who sought to make use of "culture" (i.e., materials and ideas and works they had not authored), an approach that Lessig often summed up by saying, "Culture always builds on the past." -={copyright:requirement to register;sharing source code (component of Free Software):modulations of;creative commons:activism of+1;public domain+4} +={copyright:requirement to register;sharing source code (component of Free Software):modulations of;Creative Commons:activism of+1;public domain+4} The background to and context of the emergence of Creative Commons was of course much more complicated and fraught. Concerns ranged from the plights of university libraries with regard to high-priced journals, to the problem of documentary filmmakers unable to afford, or even find the owners of, rights to use images or snippets in films, to the high-profile fights over online music trading, Napster, and the RIAA. Over the course of four years, Lessig and the other founders of Creative Commons would address all of these issues in books, in countless talks and presentations and conferences around the world, online and off, among audiences ranging from software developers to entrepreneurs to musicians to bloggers to scientists. ={Napster;Recording Industry Association of America (RIAA)} -- cgit v1.2.3 From 2648c5bc1ad0f0503eadb737198034973598bfa4 Mon Sep 17 00:00:00 2001 From: Ralph Amissah Date: Sun, 22 Aug 2010 17:59:40 -0400 Subject: man page, update --- man/man1/sisu-markup-samples.1 | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/man/man1/sisu-markup-samples.1 b/man/man1/sisu-markup-samples.1 index aecd24e..3b18dcd 100644 --- a/man/man1/sisu-markup-samples.1 +++ b/man/man1/sisu-markup-samples.1 @@ -1,4 +1,4 @@ -.TH SISU\-MARKUP\-SAMPLES 7 2010\-06\-19 2.5.1 SiSU +.TH SISU\-MARKUP\-SAMPLES 7 2010\-08\-18 2.6.3 SiSU .SH NAME \fBSiSU\fR \- Structured information, Serialized Units \- a document publishing system, complete dependency package @@ -73,6 +73,11 @@ un_contracts_international_sale_of_goods_convention_1980.sst .BR UN Contracts for International Sale of Goods, UN +.TP +viral_spiral.david_bollier.sst +.BR +Viral Spiral, David Bollier + .SH SEE ALSO \fIsisu\fR(1), .BR -- cgit v1.2.3 From f34ed25bce90e8f2fcedacdce5e1b6a3b785c4c3 Mon Sep 17 00:00:00 2001 From: Ralph Amissah Date: Sun, 22 Aug 2010 18:03:58 -0400 Subject: content & copyright information update --- COPYRIGHT | 8 ++++++++ data/README | 8 ++++++++ 2 files changed, 16 insertions(+) diff --git a/COPYRIGHT b/COPYRIGHT index b8c543a..c007258 100644 --- a/COPYRIGHT +++ b/COPYRIGHT @@ -79,6 +79,14 @@ Author: Yochai Benkler Copyright: 2006 Yochai Benkler License: Attribution-Noncommercial-Share Alike (CC-BY-NC-SA) 2.5 URL: + +Text: Viral Spiral + URL: +Author: David Bollier + URL: +Copyright: 2008 David Bollier +License: Attribution-Noncommercial (CC-BY-NC) 3.0 + URL: Text: Democratizing Innovation URL: diff --git a/data/README b/data/README index 8528fe0..b3ad64f 100644 --- a/data/README +++ b/data/README @@ -53,6 +53,14 @@ maintained and often that the works be used only non-commercially License: Attribution-Noncommercial-No Derivative Works (CC-BY-NC-ND) 2.0 URL: + Text: Viral Spiral + URL: + Author: David Bollier + URL: + Copyright: 2008 David Bollier + License: Attribution-Noncommercial (CC-BY-NC) 3.0 + URL: + Text: Two Bits - The Cultural Significance of Free Software URL: Author: Christopher Kelty -- cgit v1.2.3 From 8b39c09f4aaa6c429f9eb46fb0f106dc23cbb469 Mon Sep 17 00:00:00 2001 From: Ralph Amissah Date: Sun, 22 Aug 2010 18:07:31 -0400 Subject: CHANGELOG update --- CHANGELOG | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/CHANGELOG b/CHANGELOG index 518f4b7..ecab305 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -17,6 +17,38 @@ http://www.jus.uio.no/sisu/archive/pool/non-free/s/sisu-markup-samples/sisu-mark * CONTENT, remove image based on original book cover + (2010-08-22:33/7) + sisu-markup-samples_2.0.5-2.dsc + sisu-markup-samples_2.0.5-2.diff.gz + + [debian in freeze for squeeze, the only changes are documentation related] + + * Viral Spiral, David Bollier markup sample added + + * Book index fixes to existing markup samples + + * Minor adjustments to markup sample headers where appropriate + + * miscellaneous fixes, + * add missing skin for v1 markup of "CONTENT", Cory Doctorow + * "CONTENT", use preferred curly braces with emphasized text (v1 & v2) + * "Free Culture", document structure + * add v1 markup for "Democratizing Innovation", Eric von Hippel + * (headers & yaml), links added/updated + + * COPYRIGHT and README update on contents + + * Manpage update + + * debian markup-samples headers tended to + + * debian/control + * update contents + * make description match sisu + * update Standards-Version 3.9.1 + * update Maintainers + * add Uploaders field + %% sisu-markup-samples_2.0.4.orig.tar.gz (2010-06-27:25/7) http://www.jus.uio.no/sisu/archive/pool/non-free/s/sisu-markup-samples/sisu-markup-samples_2.0.4.orig.tar.gz ca6001d7373ad324356d08c8d043f444363bd9a7fe5f71e9f74586c31cded21e 14056624 sisu-markup-samples_2.0.4.orig.tar.gz -- cgit v1.2.3 From 4d5869b54dc05c63ff6c232950d3b918c6f1cbd7 Mon Sep 17 00:00:00 2001 From: Ralph Amissah Date: Tue, 24 Aug 2010 12:00:43 -0400 Subject: CHANGELOG, package version & date --- CHANGELOG | 30 ++++++++++++++++-------------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/CHANGELOG b/CHANGELOG index ecab305..8069b0a 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -6,20 +6,11 @@ Reverse Chronological: %% STABLE MANIFEST -%% sisu-markup-samples_2.0.5.orig.tar.gz (2010-07-15:28/4) -http://www.jus.uio.no/sisu/archive/pool/non-free/s/sisu-markup-samples/sisu-markup-samples_2.0.5.orig.tar.gz - 45366204c646b6f9fd751040cfe4e0380554922e79048746e66bca340ec919f4 14114039 sisu-markup-samples_2.0.5.orig.tar.gz - 4c9d44b86d2a1b6c5d6a86c8c4f1ad3805d72d51101864f4003c3870b6fd48cc 1241 sisu-markup-samples_2.0.5-1.dsc - 6489c9b3d054fee34295671969b87fb4ae699f8752e0b8c2a74ca62d24e61af3 5316 sisu-markup-samples_2.0.5-1.diff.gz - - * Little Brother, use image by Richard Wilkinson (image 4 cropped), - (replacing previous image used based on original book cover) - - * CONTENT, remove image based on original book cover - - (2010-08-22:33/7) - sisu-markup-samples_2.0.5-2.dsc - sisu-markup-samples_2.0.5-2.diff.gz +%% sisu-markup-samples_2.0.6.orig.tar.gz (2010-08-24:34/2) +http://www.jus.uio.no/sisu/archive/pool/non-free/s/sisu-markup-samples/sisu-markup-samples_2.0.6.orig.tar.gz + sisu-markup-samples_2.0.6.orig.tar.gz + sisu-markup-samples_2.0.6-1.dsc + sisu-markup-samples_2.0.6-1.diff.gz [debian in freeze for squeeze, the only changes are documentation related] @@ -49,6 +40,17 @@ http://www.jus.uio.no/sisu/archive/pool/non-free/s/sisu-markup-samples/sisu-mark * update Maintainers * add Uploaders field +%% sisu-markup-samples_2.0.5.orig.tar.gz (2010-07-15:28/4) +http://www.jus.uio.no/sisu/archive/pool/non-free/s/sisu-markup-samples/sisu-markup-samples_2.0.5.orig.tar.gz + 45366204c646b6f9fd751040cfe4e0380554922e79048746e66bca340ec919f4 14114039 sisu-markup-samples_2.0.5.orig.tar.gz + 4c9d44b86d2a1b6c5d6a86c8c4f1ad3805d72d51101864f4003c3870b6fd48cc 1241 sisu-markup-samples_2.0.5-1.dsc + 6489c9b3d054fee34295671969b87fb4ae699f8752e0b8c2a74ca62d24e61af3 5316 sisu-markup-samples_2.0.5-1.diff.gz + + * Little Brother, use image by Richard Wilkinson (image 4 cropped), + (replacing previous image used based on original book cover) + + * CONTENT, remove image based on original book cover + %% sisu-markup-samples_2.0.4.orig.tar.gz (2010-06-27:25/7) http://www.jus.uio.no/sisu/archive/pool/non-free/s/sisu-markup-samples/sisu-markup-samples_2.0.4.orig.tar.gz ca6001d7373ad324356d08c8d043f444363bd9a7fe5f71e9f74586c31cded21e 14056624 sisu-markup-samples_2.0.4.orig.tar.gz -- cgit v1.2.3