changeset 6:1b2188262ae9

adding the installer.
author "jurzua <jurzua@mpiwg-berlin.mpg.de>"
date Wed, 13 May 2015 11:50:21 +0200
parents dd9adfc73390
children 6750dab0f86e
files DVN-web/installer/dvninstall/appdeploy/AS.properties.TEMPLATE DVN-web/installer/dvninstall/appdeploy/ant-deploy.xml DVN-web/installer/dvninstall/appdeploy/build-impl.xml DVN-web/installer/dvninstall/appdeploy/build.xml DVN-web/installer/dvninstall/appdeploy/glassfish.properties.TEMPLATE DVN-web/installer/dvninstall/appdeploy/private.properties DVN-web/installer/dvninstall/appdeploy/project.properties DVN-web/installer/dvninstall/config/dcmi_terms2ddi.xsl DVN-web/installer/dvninstall/config/dvn_data_functions.R DVN-web/installer/dvninstall/config/error.xsl DVN-web/installer/dvninstall/config/fgdc2ddi.xsl DVN-web/installer/dvninstall/config/graphml.props DVN-web/installer/dvninstall/config/header.xsl DVN-web/installer/dvninstall/config/jhove.conf DVN-web/installer/dvninstall/config/logging.properties DVN-web/installer/dvninstall/config/metadata.xsl DVN-web/installer/dvninstall/config/mif2ddi.xsl DVN-web/installer/dvninstall/config/neodb.props DVN-web/installer/dvninstall/config/networkData/lib/collections-generic-4.01.jar DVN-web/installer/dvninstall/config/networkData/lib/colt-1.2.0.jar DVN-web/installer/dvninstall/config/networkData/lib/concurrent-1.3.4.jar DVN-web/installer/dvninstall/config/networkData/lib/geronimo-jta_1.1_spec-1.1.1.jar DVN-web/installer/dvninstall/config/networkData/lib/jung-algorithms-2.0.jar DVN-web/installer/dvninstall/config/networkData/lib/jung-api-2.0.jar DVN-web/installer/dvninstall/config/networkData/lib/jung-visualization-2.0.jar DVN-web/installer/dvninstall/config/networkData/lib/junit-3.8.1.jar DVN-web/installer/dvninstall/config/networkData/lib/lucene-core-2.9.2.jar DVN-web/installer/dvninstall/config/networkData/lib/neo4j-index-1.1.jar DVN-web/installer/dvninstall/config/networkData/lib/neo4j-kernel-1.1.jar DVN-web/installer/dvninstall/config/networkData/lib/neo4j-utils-1.1.jar DVN-web/installer/dvninstall/config/networkData/lib/nestedvm-1.0.jar DVN-web/installer/dvninstall/config/networkData/lib/network_utils-1.0-SNAPSHOT.jar DVN-web/installer/dvninstall/config/networkData/lib/sqlite-jdbc-3.6.16.jar DVN-web/installer/dvninstall/config/oai_dc2ddi.xsl DVN-web/installer/dvninstall/config/oaicat.properties DVN-web/installer/dvninstall/doc/guides/_images/application-octet-stream.png DVN-web/installer/dvninstall/doc/guides/_images/application-pdf.png DVN-web/installer/dvninstall/doc/guides/_images/complex_exploration.png DVN-web/installer/dvninstall/doc/guides/_images/complex_graph_screenshot.png DVN-web/installer/dvninstall/doc/guides/_images/displaytabscreenshot.png DVN-web/installer/dvninstall/doc/guides/_images/editfiltersscreenshot.png DVN-web/installer/dvninstall/doc/guides/_images/editmeasuresscreenshot.png DVN-web/installer/dvninstall/doc/guides/_images/edittimevariablescreenshot.png DVN-web/installer/dvninstall/doc/guides/_images/measure_selected.png DVN-web/installer/dvninstall/doc/guides/_images/simple_explore_data.png DVN-web/installer/dvninstall/doc/guides/_images/sourcetabscreenshot.png DVN-web/installer/dvninstall/doc/guides/_sources/dataverse-R-ingest.txt DVN-web/installer/dvninstall/doc/guides/_sources/dataverse-api-main.txt DVN-web/installer/dvninstall/doc/guides/_sources/dataverse-developer-main.txt DVN-web/installer/dvninstall/doc/guides/_sources/dataverse-installer-main.txt DVN-web/installer/dvninstall/doc/guides/_sources/dataverse-user-main.txt DVN-web/installer/dvninstall/doc/guides/_sources/index.txt DVN-web/installer/dvninstall/doc/guides/_static/agogo.css DVN-web/installer/dvninstall/doc/guides/_static/ajax-loader.gif DVN-web/installer/dvninstall/doc/guides/_static/basic.css DVN-web/installer/dvninstall/doc/guides/_static/bgfooter.png DVN-web/installer/dvninstall/doc/guides/_static/bgtop.png DVN-web/installer/dvninstall/doc/guides/_static/comment-bright.png DVN-web/installer/dvninstall/doc/guides/_static/comment-close.png DVN-web/installer/dvninstall/doc/guides/_static/comment.png DVN-web/installer/dvninstall/doc/guides/_static/doctools.js DVN-web/installer/dvninstall/doc/guides/_static/down-pressed.png DVN-web/installer/dvninstall/doc/guides/_static/down.png DVN-web/installer/dvninstall/doc/guides/_static/file.png DVN-web/installer/dvninstall/doc/guides/_static/jquery.js DVN-web/installer/dvninstall/doc/guides/_static/logo.png DVN-web/installer/dvninstall/doc/guides/_static/minus.png DVN-web/installer/dvninstall/doc/guides/_static/plus.png DVN-web/installer/dvninstall/doc/guides/_static/pygments.css DVN-web/installer/dvninstall/doc/guides/_static/searchtools.js DVN-web/installer/dvninstall/doc/guides/_static/underscore.js DVN-web/installer/dvninstall/doc/guides/_static/up-pressed.png DVN-web/installer/dvninstall/doc/guides/_static/up.png DVN-web/installer/dvninstall/doc/guides/_static/websupport.js DVN-web/installer/dvninstall/doc/guides/dataverse-R-ingest.html DVN-web/installer/dvninstall/doc/guides/dataverse-api-main.html DVN-web/installer/dvninstall/doc/guides/dataverse-developer-main.html DVN-web/installer/dvninstall/doc/guides/dataverse-installer-main.html DVN-web/installer/dvninstall/doc/guides/dataverse-user-main.html DVN-web/installer/dvninstall/doc/guides/genindex.html DVN-web/installer/dvninstall/doc/guides/index.html DVN-web/installer/dvninstall/doc/guides/objects.inv DVN-web/installer/dvninstall/doc/guides/search.html DVN-web/installer/dvninstall/doc/guides/searchindex.js DVN-web/installer/dvninstall/domain.xml DVN-web/installer/dvninstall/domain.xml.TEMPLATE DVN-web/installer/dvninstall/install DVN-web/installer/dvninstall/install~ DVN-web/installer/dvninstall/pgdriver/postgresql-8.3-603.jdbc4.jar DVN-web/installer/dvninstall/pgdriver/postgresql-8.4-703.jdbc4.jar DVN-web/installer/dvninstall/pgdriver/postgresql-9.0-802.jdbc4.jar DVN-web/installer/dvninstall/pgdriver/postgresql-9.1-902.jdbc4.jar DVN-web/installer/dvninstall/referenceData.sql DVN-web/installer/dvninstall/referenceData.sql.TEMPLATE DVN-web/installer/dvninstall/robots.txt DVN-web/installer/dvninstall/web-core.jar
diffstat 96 files changed, 27256 insertions(+), 0 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/appdeploy/AS.properties.TEMPLATE	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,1 @@
+AS_ADMIN_PASSWORD=%GF_ADMIN_PASSWORD%
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/appdeploy/ant-deploy.xml	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,111 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project default="-deploy-ant" basedir=".">
+    <target name="-init-cl-deployment-env" if="deploy.ant.enabled">
+        <property file="${deploy.ant.properties.file}" />
+        <available file="${deploy.ant.docbase.dir}/WEB-INF/sun-web.xml" property="sun.web.present"/>
+        <available file="${deploy.ant.docbase.dir}/WEB-INF/glassfish-web.xml" property="glassfish.web.present"/>
+        <available file="${deploy.ant.resource.dir}" property="has.setup"/>
+        <tempfile prefix="gfv3" property="gfv3.password.file" destdir="${java.io.tmpdir}"/>  <!-- do not forget to delete this! -->
+        <echo message="AS_ADMIN_PASSWORD=${gfv3.password}" file="${gfv3.password.file}"/>
+    </target>
+    
+    <target name="-parse-sun-web" depends="-init-cl-deployment-env" if="sun.web.present">
+        <tempfile prefix="gfv3" property="temp.sun.web" destdir="${java.io.tmpdir}"/>
+        <copy file="${deploy.ant.docbase.dir}/WEB-INF/sun-web.xml" tofile="${temp.sun.web}"/>
+        <!-- The doctype triggers resolution which can fail -->
+        <replace file="${temp.sun.web}">
+            <replacetoken><![CDATA[<!DOCTYPE]]></replacetoken>
+            <replacevalue><![CDATA[<!-- <!DOCTYPE]]></replacevalue>
+        </replace>
+        <replace file="${temp.sun.web}">
+            <replacetoken><![CDATA[<sun-web-app]]></replacetoken>
+            <replacevalue><![CDATA[--> <sun-web-app]]></replacevalue>
+        </replace>
+        <xmlproperty file="${temp.sun.web}" validate="false">
+        </xmlproperty>    
+        <delete file="${temp.sun.web}"/>
+        <condition property="deploy.ant.client.url" value="${gfv3.url}${sun-web-app.context-root}" else="${gfv3.url}/${ant.project.name}">
+            <isset property="sun-web-app.context-root"/>
+        </condition>
+        <condition property="deploy.context.root.argument" value="&amp;contextroot=${sun-web-app.context-root}" else="/${ant.project.name}">
+            <isset property="sun-web-app.context-root"/>
+        </condition>
+    </target>
+    <target name="-parse-glassfish-web" depends="-init-cl-deployment-env" if="glassfish.web.present">
+        <tempfile prefix="gfv3" property="temp.gf.web" destdir="${java.io.tmpdir}"/>
+        <copy file="${deploy.ant.docbase.dir}/WEB-INF/glassfish-web.xml" tofile="${temp.gf.web}"/>
+        <!-- The doctype triggers resolution which can fail -->
+        <replace file="${temp.gf.web}">
+            <replacetoken><![CDATA[<!DOCTYPE]]></replacetoken>
+            <replacevalue><![CDATA[<!-- <!DOCTYPE]]></replacevalue>
+        </replace>
+        <replace file="${temp.gf.web}">
+            <replacetoken><![CDATA[<glassfish-web-app]]></replacetoken>
+            <replacevalue><![CDATA[--> <glassfish-web-app]]></replacevalue>
+        </replace>
+        <xmlproperty file="${temp.gf.web}" validate="false">
+        </xmlproperty>
+        <delete file="${temp.gf.web}"/>
+        <condition property="deploy.ant.client.url" value="${gfv3.url}${glassfish-web-app.context-root}" else="${gfv3.url}/${ant.project.name}">
+            <isset property="glassfish-web-app.context-root"/>
+        </condition>
+        <condition property="deploy.context.root.argument" value="&amp;contextroot=${glassfish-web-app.context-root}" else="/${ant.project.name}">
+            <isset property="glassfish-web-app.context-root"/>
+        </condition>
+    </target>
+    <target name="-no-parse-sun-web" depends="-init-cl-deployment-env" unless="sun.web.present">
+        <property name="deploy.context.root.argument" value=""/>
+    </target>
+    <target name="-add-resources" depends="-init-cl-deployment-env" if="has.setup">
+        <tempfile prefix="gfv3" property="gfv3.resources.dir" destdir="${java.io.tmpdir}"/>
+        <mkdir dir="${gfv3.resources.dir}"/>
+        <mkdir dir="${gfv3.resources.dir}/META-INF"/>
+        <copy todir="${gfv3.resources.dir}/META-INF">
+            <fileset dir="${deploy.ant.resource.dir}"/>
+        </copy>
+        <jar destfile="${deploy.ant.archive}" update="true">
+            <fileset dir="${gfv3.resources.dir}"/>
+        </jar>
+        <delete dir="${gfv3.resources.dir}"/>
+    </target>
+    <target name="-deploy-ant" depends="-parse-glassfish-web, -parse-sun-web, -no-parse-sun-web,-add-resources" if="deploy.ant.enabled">
+        <antcall target="-deploy-without-pw"/>
+        <antcall target="-deploy-with-pw"/>
+    </target>
+
+    <target name="-deploy-without-pw" unless="gfv3.password">
+        <echo message="Deploying ${deploy.ant.archive}"/>
+        <tempfile prefix="gfv3" property="gfv3.results.file" destdir="${java.io.tmpdir}"/>  <!-- do not forget to delete this! -->
+        <property name="full.deploy.ant.archive" location="${deploy.ant.archive}"/>
+        <get src="${gfv3.admin.url}/__asadmin/deploy?path=${full.deploy.ant.archive}${deploy.context.root.argument}&amp;force=true&amp;name=${ant.project.name}"
+            dest="${gfv3.results.file}"/>
+        <delete file="${gfv3.results.file}"/>    
+    </target>
+    <target name="-deploy-with-pw" if="gfv3.password">
+        <echo message="Deploying ${deploy.ant.archive}"/>
+        <tempfile prefix="gfv3" property="gfv3.results.file" destdir="${java.io.tmpdir}"/>  <!-- do not forget to delete this! -->
+        <property name="full.deploy.ant.archive" location="${deploy.ant.archive}"/>
+        <get username="${gfv3.username}" password="${gfv3.password}" src="${gfv3.admin.url}/__asadmin/deploy?path=${full.deploy.ant.archive}${deploy.context.root.argument}&amp;force=true&amp;name=${ant.project.name}"
+            dest="${gfv3.results.file}"/>
+        <delete file="${gfv3.results.file}"/>
+    </target>
+    <target name="-undeploy-ant" depends="-init-cl-deployment-env" if="deploy.ant.enabled">
+        <antcall target="-undeploy-without-pw"/>
+        <antcall target="-undeploy-with-pw"/>
+    </target>
+
+    <target name="-undeploy-without-pw" unless="gfv3.password">
+        <echo message="Undeploying ${deploy.ant.archive}"/>
+        <tempfile prefix="gfv3" property="gfv3.results.file" destdir="${java.io.tmpdir}"/>  <!-- do not forget to delete this! -->
+        <get src="${gfv3.admin.url}/__asadmin/undeploy?name=${ant.project.name}"
+            dest="${gfv3.results.file}"/>
+        <delete file="${gfv3.results.file}"/>    
+    </target>
+    <target name="-undeploy-with-pw" if="gfv3.password">
+        <echo message="Undeploying ${deploy.ant.archive}"/>
+        <tempfile prefix="gfv3" property="gfv3.results.file" destdir="${java.io.tmpdir}"/>  <!-- do not forget to delete this! -->
+        <get username="${gfv3.username}" password="${gfv3.password}" src="${gfv3.admin.url}/__asadmin/undeploy?name=${ant.project.name}"
+            dest="${gfv3.results.file}"/>
+        <delete file="${gfv3.results.file}"/>
+    </target>
+</project>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/appdeploy/build-impl.xml	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,17 @@
+<project name="DVN-web-impl" default="build" basedir=".." xmlns:ear2="http://www.netbeans.org/ns/j2ee-earproject/2">
+    <import file="ant-deploy.xml"/>
+    <target name="default" depends="run-deploy" description="Deploy project."/>
+    <target name="init-private" >
+        <property file="private.properties"/>
+    </target>
+    <target name="init-project" depends="init-private">
+        <property file="project.properties"/>
+    </target>
+    <target name="run-deploy" depends="init-project,-init-deploy-ant,-deploy-ant"/>
+    <target name="-init-deploy-ant" unless="netbeans.home">
+        <property name="deploy.ant.archive" value="${dist.jar}"/>
+        <property name="deploy.ant.resource.dir" value="${resource.dir}"/>
+        <property name="deploy.ant.enabled" value="true"/>
+    </target>
+    <target name="run-undeploy" depends="-init-deploy-ant,-undeploy-ant"/>
+</project>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/appdeploy/build.xml	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- You may freely edit this file. See commented blocks below for -->
+<!-- some examples of how to customize the build. -->
+<!-- (If you delete it and reopen the project it will be recreated.) -->
+<project name="DVN-web" default="default" basedir="." xmlns:ear="http://www.netbeans.org/ns/j2ee-earproject/2">
+    <description>Builds, tests, and runs the project DVN-web.</description>
+    <import file="build-impl.xml"/>
+</project>
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/appdeploy/glassfish.properties.TEMPLATE	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,6 @@
+gfv3.port=4848
+gfv3.host=127.0.0.1
+gfv3.admin.url=http\://${gfv3.host}\:${gfv3.port}
+gfv3.username=admin
+gfv3.root=%GF_ROOT_DIR%/glassfish
+gfv3.url=http\://${gfv3.host}\:8080
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/appdeploy/private.properties	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,1 @@
+deploy.ant.properties.file=glassfish.properties
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/appdeploy/project.properties	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,39 @@
+build.archive.dir=${build.dir}/jar
+build.classes.dir=${build.archive.dir}
+build.classes.excludes=**/*.java,**/*.form,**/.nbattrs
+build.dir=build
+build.generated.dir=${build.dir}/generated
+client.module.uri=DVN-web.war
+client.urlPart=
+compile.jsps=true
+debug.classpath=${javac.classpath}:${build.classes.dir}:${jar.content.additional}:${run.classpath}
+display.browser=true
+dist.dir=dist
+dist.jar=${dist.dir}/DVN-web.war
+dist.javadoc.dir=${dist.dir}/javadoc
+j2ee.appclient.mainclass.args=-client ${dist.jar} ${j2ee.appclient.args}
+j2ee.platform=1.6
+j2ee.server.type=gfv3ee6
+jar.compress=false
+jar.name=DVN-web.war
+javac.debug=true
+javac.deprecation=false
+javac.source=${default.javac.source}
+javac.target=${default.javac.target}
+javadoc.author=false
+javadoc.encoding=
+javadoc.noindex=false
+javadoc.nonavbar=false
+javadoc.notree=false
+javadoc.preview=true
+javadoc.private=false
+javadoc.splitindex=true
+javadoc.use=true
+javadoc.version=false
+javadoc.windowtitle=
+meta.inf=src/conf
+no.dependencies=false
+platform.active=default_platform
+resource.dir=setup
+source.root=.
+src.dir=src
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/config/dcmi_terms2ddi.xsl	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,156 @@
+<xsl:stylesheet version="1.0"
+		xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
+		xmlns:dc="http://purl.org/dc/terms/"
+		exclude-result-prefixes="dc"
+>
+<xsl:output method="xml" version="1.0" encoding="UTF-8"
+		indent="yes" />
+<xsl:template match="/">
+<codeBook 
+          xmlns="http://www.icpsr.umich.edu/DDI" 
+          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
+          xsi:schemaLocation="http://www.icpsr.umich.edu/DDI 
+          http://www.icpsr.umich.edu/DDI/Version2-0.xsd">
+        <stdyDscr>
+            <citation>
+                <titlStmt>
+		   <titl>
+		   <xsl:for-each select="//dc:title">
+			<xsl:value-of select="."/>
+		   </xsl:for-each>
+		   </titl>			
+		   <xsl:for-each select="//dc:identifier">
+		      <IDNo>
+		        <xsl:attribute name="agency">
+		        <xsl:choose>
+		         <xsl:when test='starts-with(.,"hdl:")'>handle</xsl:when>
+			 <xsl:when test='starts-with(.,"http://hdl.handle.net/")'>handle</xsl:when>
+		        </xsl:choose>
+		        </xsl:attribute>
+		        <xsl:choose>
+			 <xsl:when test='starts-with(.,"http://hdl.handle.net/")'>hdl:<xsl:value-of select='substring(.,23)'/></xsl:when>
+		         <xsl:otherwise><xsl:value-of select="."/></xsl:otherwise>
+		        </xsl:choose>
+		      </IDNo>
+		   </xsl:for-each>
+	        </titlStmt>
+		<rspStmt>
+		   <xsl:for-each select="//dc:creator">
+		   <AuthEnty><xsl:value-of select="."/></AuthEnty>
+		   </xsl:for-each>
+		</rspStmt>
+
+		<prodStmt>
+		<xsl:for-each select="//dc:publisher">
+		<xsl:if test="normalize-space(.)!=''">
+		   <producer>
+		   <xsl:value-of select="."/>
+		   </producer>
+		</xsl:if>		
+		</xsl:for-each>
+
+		<xsl:for-each select="//dc:date">
+		<xsl:if test="normalize-space(.)!=''">
+		   <prodDate>
+		   <xsl:value-of select="normalize-space(.)"/>
+		   </prodDate>
+		</xsl:if>		
+		</xsl:for-each>		
+		</prodStmt>		
+
+	    </citation>
+	    <stdyInfo>
+		<subject>
+		<xsl:for-each select="//dc:subject">
+		   <keyword><xsl:value-of select="."/></keyword>
+		</xsl:for-each>	
+		</subject>
+		<xsl:for-each select="//dc:description">
+		<abstract>
+		   <xsl:value-of select="."/>
+		</abstract>
+		</xsl:for-each>	
+		<sumDscr>
+		<xsl:for-each select="//dc:coverage">
+		<xsl:if test="normalize-space(.)!=''">
+		   <geogCover>
+		      <xsl:value-of select="."/>
+		   </geogCover>
+		</xsl:if>
+		</xsl:for-each>	
+		<xsl:for-each select="//dc:type">
+		<xsl:if test="normalize-space(.)!=''">
+		   <dataKind>
+		      <xsl:value-of select="."/>
+		   </dataKind>
+		</xsl:if>
+		</xsl:for-each>	
+		</sumDscr>
+	    </stdyInfo>
+
+	    <xsl:if test="normalize-space(//dc:source)!=''">
+	    <method>
+	       <dataColl>
+	          <sources>
+		     <xsl:for-each select="//dc:source">
+		     <xsl:if test="normalize-space(.)!=''">
+	             <dataSrc>
+		        <xsl:value-of select="normalize-space(.)"/>
+	             </dataSrc>
+		     </xsl:if>
+		     </xsl:for-each>
+	          </sources>
+	       </dataColl>
+	     </method>
+	     </xsl:if>
+
+
+	    <xsl:for-each select="//dc:rights">
+	    <xsl:if test="normalize-space(.)!=''">
+            <dataAccs>
+	       <useStmt>
+	     	  <restrctn>
+		   	<xsl:value-of select="normalize-space(.)"/>
+		  </restrctn>
+               </useStmt>
+            </dataAccs>
+            </xsl:if>
+	    </xsl:for-each>
+            <xsl:if test="normalize-space(//dc:relation)!='' or normalize-space(//dc:isReferencedBy)!=''">
+            <othrStdyMat>
+                <xsl:for-each select="//dc:relation">
+                <xsl:if test="normalize-space(.)!=''">
+               <relMat>
+                  <xsl:value-of select="normalize-space(.)"/>
+               </relMat>
+                </xsl:if>
+            </xsl:for-each>
+                <xsl:for-each select="//dc:isReferencedBy">
+                <relPubl>
+                    <citation source="DVN_3_0">
+                        <titlStmt>
+                            <IDNo>
+                            <xsl:attribute name="agency">
+                                <xsl:value-of select="normalize-space(./@agency)"/>
+                            </xsl:attribute>
+                            <xsl:value-of select="normalize-space(./@IDNo)"/>
+                            </IDNo>
+                        </titlStmt>
+                        <biblCit><xsl:value-of select="normalize-space(.)"/></biblCit>
+                        <holdings>
+                            <xsl:attribute name="URI">
+                                <xsl:value-of select="normalize-space(./@holdingsURI)"/>
+                            </xsl:attribute>
+                            <xsl:value-of select="normalize-space(./@holdingsURI)"/>
+                        </holdings>
+                    </citation>
+                </relPubl>
+                </xsl:for-each>
+            </othrStdyMat>
+            </xsl:if>
+	</stdyDscr>
+</codeBook>
+</xsl:template>
+</xsl:stylesheet>
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/config/dvn_data_functions.R	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,1765 @@
+library(foreign)
+library(stats)
+library(methods)
+library(UNF)
+library(R2HTML)
+
+options(digits.secs = 3)
+
+
+############ parameters ########################
+univarstathdr<-c("Valid Cases", "Missing Cases(NAs)", "Total", "Mean", "Standard deviation", "Skewness", "Kurtosis", "Coefficient of variation", "Mode", "Minimum","1st Quartile","Median","3rd Quartile","Maximum","Range","Interquartile Range","Normality Test(Shapiro-Wilk Statistic)", "Normality Test(Shapiro-Wilk Statistic: p value)")
+
+imgprfx1<-c("<img src=\"http://")
+imgprfx2<-c("/nph-dmpJpg.pl?jpgfn=")
+imgsffx1<-c("\" >\n")
+imgsffx2<-c("\" >\n")
+
+############# parameters #######################
+# Note: 
+#  - The parameter na.strings is set to "NA", even though in the DVN tab files Missing Values are encoded as empty strings; 
+#    this may be some sort of a legacy thing (may be older files still had "NA"s in them as this was written?). After calling
+#    this function, read.table141vdc, the DVN application classes (for ex., DvnRforeignFileConversionServiceImpl.java) make
+#    another call to reset all the empties to NA. Some functions further down in this file also do that explicitly. 
+#  - I changed the strip.white parameter to FALSE (-- L.A., 05/07/2013); having it set to TRUE was resulting in the dropping 
+#    the empty entries that were supposed to represent Missing Values, when the subset contained a single numeric column, 
+#    no matter what the na.strings= was set to. 
+
+read.table141vdc<-function (file, header = FALSE, sep = "\t", quote = "", dec = ".", col.names=NULL, na.strings = "NA",colClasses = NA,  colClassesx = NA, nrows = -1, skip = 0, check.names = TRUE,fill = !blank.lines.skip, strip.white = FALSE, blank.lines.skip = FALSE, comment.char = "", varFormat=list()) 
+{
+    if (is.character(file)) {
+        file <- file(file, "r")
+        on.exit(close(file))
+    }
+    if (!inherits(file, "connection")) stop("argument 'file' must be a character string or connection")
+    if (!isOpen(file)) {
+        open(file, "r")
+        on.exit(close(file))
+    }
+    if (skip > 0) readLines(file, skip)
+
+    cols<- length(colClassesx)
+    if (is.null(col.names)) col.names<-paste("V", 1:cols, sep = "")
+    if(check.names) col.names <- make.names(col.names, unique = TRUE)
+    what <- rep(list(""), cols)
+    names(what) <- col.names
+    known <- colClasses %in% c("logical", "integer", "numeric", "complex", "character")
+    what[known] <- sapply(colClasses[known], do.call, list(0))
+    
+    data <- scan(file = file, what = what, sep = sep, quote = quote, dec = dec, nmax = nrows, skip = 0, na.strings = na.strings, quiet = TRUE, fill = fill, strip.white = strip.white, blank.lines.skip = blank.lines.skip, multi.line = FALSE, comment.char = comment.char)
+    
+    nlines <- length(data[[1]])
+    
+    if (cols != length(data)) {
+        warning(paste("cols =", cols, " != length(data) =", length(data)))
+        cols <- length(data)
+    }
+
+    #cat("colClassesx:\n")
+    #cat(paste(class(colClassesx),"\n"))
+    #cat(paste(colClassesx,"\n",sep=" "))
+    #cat(paste(class(varFormat),"\n"))
+    #cat(paste(length(varFormat),"\n"))
+    #cat("varFormat:\n")
+    #cat(paste(varFormat,"\n",sep=" "))
+
+    saved.options <- options(digits.secs = 3)
+
+    for (i in 1:cols) {
+        #if (known[i]) next
+        #data[[i]] <- as(data[[i]], colClasses[i])
+	#cat(paste(class(data[[i]]),"\n"))
+	#cat(paste(mode(data[[i]]),"\n"))
+        if (colClassesx[i] == 0) {
+
+	     # Make sure the character values are handled as such:
+	     #data[[i]]<-I(data[[i]]);
+	     data[[i]]<-as.character(data[[i]]);
+	     # And replace empty strings with NAs:
+	     data[[i]][ data[[i]] == '' ]<-NA
+	     # And remove the double quotes we had put around the non-missing
+ 	     # string values as they were stored in the TAB files:
+
+	     data[[i]]<-sub("^\"", "", data[[i]])
+	     data[[i]]<-sub("\"$", "", data[[i]])
+            
+             if (is.null(unlist(varFormat[col.names[i]]))){
+                #cat("before-s=",i, "\n")
+                data[[i]] <- as(data[[i]], "character")
+                #cat("after-s=",i, "\n")
+             } else if (!is.null(unlist(varFormat[col.names[i]]))){
+                if (varFormat[col.names[i]] == 'D'){
+                    #cat("before-d=",i, "\n")
+                    #data[[i]]<-as.Date(data[[i]], "%Y-%m-%d")
+		    data[[i]]<-as.Date(data[[i]]);
+                    #cat("after-d=",i, "\n")
+                    colClassesx[i]<-1
+                } else if (varFormat[col.names[i]] == 'T'){
+                    #cat("before-t=",i,"\n")
+                    data[[i]]<-as.POSIXct(strptime(data[[i]], "%T"))
+                    #cat("after-t=", i,"\n")
+                    colClassesx[i]<-1
+                } else if (varFormat[col.names[i]] == 'DT'){
+                    data[[i]]<-as.POSIXct(strptime(data[[i]], "%F %H:%M:%OS"))
+                    colClassesx[i]<-1
+                } else if (varFormat[col.names[i]] == 'JT'){
+                    data[[i]]<-as.POSIXct(strptime(data[[i]], "%j %H:%M:%OS"))
+                    colClassesx[i]<-1
+                }
+             }
+        } else if (colClassesx[i] == 3) {
+
+	# special case for Boolean/logical variables: 
+	# (these will be passed from the application as vectors of 0s and 1s)
+	# also, note that this type will be used only when the subset is 
+	# created as part of the "save-as" functionality. When it's for 
+	# analysis, the DVN "boolean" variable will be of type 1, because 
+	# they will be handled as regular integer categoricals with the labels 
+	# "TRUE" and "FALSE". -- L.A. 
+	    #print(data[[i]])
+
+	    for (j in 1:length(data[[i]])) {
+	       if (!is.na(data[[i]][j]) && data[[i]][j] == "") { 
+	          data[[i]][j]<-NA 
+	       }
+	    }
+
+	    #print(data[[i]])
+
+	    data[[i]]<-as.logical(as.numeric(data[[i]]))
+	    #print(data[[i]])
+
+
+        } else {
+            data[[i]]<-type.convert(data[[i]], dec = dec)
+            #cat("data[[", i, "]]:", class(data[[i]]), "\n", sep="")
+            #if ( (class(data[[i]]) == "numeric") & (colClassesx[i]==1) ) {
+            #   colClassesx[i]<-2
+            #}
+        }
+    }
+
+    options(saved.options)
+
+    class(data) <- "data.frame"
+    row.names(data) <- as.character(seq(len = nlines))
+    attr(data, "var.type")<-colClassesx
+    #cat("end of read.table141vdc\n")
+    data
+} # end of read.table141vdc
+
+transformrecoded <-function(x, recodedvarsindx = 2, dec = ".", col.names = NULL, colClassesx = undef, varFormat = list()){
+
+    #cat("inside transformrecoded\n")
+    #cat(paste(col.names,"\n",sep=""))
+
+    for (i in recodedvarsindx:length(x)) {
+
+    	#i = recodedindx[j]
+	#cat("index: ")
+	#cat(i)
+	#cat("\n")
+
+	#cat(paste(class(x[[i]]),"\n"))
+	#cat(paste(mode(x[[i]]),"\n"))
+	
+	#cat(paste(varFormat[col.names[i]],"\n"))
+	#cat(paste(unlist(varFormat[col.names[i]]),"\n"))
+
+	testbool<-is.null(unlist(varFormat[col.names[i]]))
+	#cat(as.character(testbool))
+
+
+        if (!is.null(unlist(varFormat[col.names[i]]))){
+	     	#cat("inside the if loop.\n")
+                if (varFormat[col.names[i]] == 'D'){
+	       	    x[[i]]<-as.Date(x[[i]])
+		    #cat("x[[i]] is a Date;\n")
+		    colClassesx[i]<-1
+                } else if (varFormat[col.names[i]] == 'T'){
+                    x[[i]]<-as.POSIXct(strptime(x[[i]], "%T"))
+                    colClassesx[i]<-1
+                } else if (varFormat[col.names[i]] == 'DT'){
+                    x[[i]]<-as.POSIXct(strptime(x[[i]], "%F %H:%M:%OS"))
+                    colClassesx[i]<-1
+                } else if (varFormat[col.names[i]] == 'JT'){
+                    x[[i]]<-as.POSIXct(strptime(x[[i]], "%j %H:%M:%OS"))
+                    colClassesx[i]<-1
+                }
+        }
+    }
+    x
+}
+
+###########################################################
+createvalindex <-function(dtfrm, attrname=NULL){
+    # this version relies on the list-based approach
+    # completely new final [without old cod block]
+    if (is.null(dtfrm)) {
+        stop("dataframe is not specified\n")
+    } else if (is.null(attrname)){
+        stop("attrname is is not specified\n")
+    } else if (!exists('dtfrm')) {
+        stop("dataframe is not found\n")
+    } else if (!is.data.frame(dtfrm) ) {
+        stop("Specified object is not a data.frame\n")
+    }
+        
+    #DBG<-TRUE
+    DBG<-FALSE
+    try ( {
+    if (attrname == 'val.index') {
+        tabletype<-'val.table'
+        valtable<-attr(dtfrm, 'val.table')
+    } else if (attrname == 'missval.index') {
+        tabletype<-'missval.table'
+        valtable<-attr(dtfrm, 'missval.table')
+    } else stop ("Specified attrname must be either val.index or missval.index\n")
+    
+    if (DBG) {cat("\nattribute name=",attrname,"\n")}
+    if (length(valtable)) {
+        vlindex  <- list();
+        vlst  <- list();
+        lstall<-list()
+        vltbl<-list()
+        if (DBG) {
+            cat("length(",attrname,")=",length(valtable),"\n")
+            cat("varidset(",attrname,")=",names(valtable),"\n")
+        }
+        nameset<-names(valtable)
+        if (DBG) {
+            str(nameset)
+            cat("\nnameset:", paste(nameset,collapse="|"), "\n",sep="")
+        }
+        for (i in 1:(length(valtable))){
+        if (DBG) {
+            cat("var=",i,"\n", sep="")
+            cat("\tlstall:", paste(if (length(lstall)) {as.vector(lstall,mode="integer")} else {"empty"}, collapse=","), "\n",sep="")
+        }
+            nameseti<-nameset[i]
+            if (!is.null(lstall[[as.character(i)]])){next}
+            lsti<-list()
+
+            # set i to the new list
+            lsti[[as.character(i)]]<-i
+            lstall[[as.character(i)]]<-i
+            vlindex[[as.character(nameseti)]]<-nameset[i]
+            vltbl[[as.character(nameseti)]]<-valtable[[i]]
+
+            if (DBG) {cat("\tlsti:", paste(as.vector(lsti, mode="integer"),collapse=","), "\n",sep="")}
+            for (j in i:length(valtable)){
+                if (!is.null(lstall[[as.character(j)]])){next}
+                if (attrname == 'val.index') {
+                    if (  identical( names(valtable[[i]]), names(valtable[[j]])  ) & identical(valtable[[i]], valtable[[j]]) ) {
+                        if (DBG) {cat("\tVL:new duplicate (var#) to be added:", j,"\n",sep="")}
+                        lsti[[as.character(j)]]<-j
+                        vlindex[[as.character(nameset[j])]]<-nameseti
+                        lstall[[as.character(j)]]<-j
+                    }
+                } else if (attrname == 'missval.index') {
+                    if ( identical(valtable[[i]], valtable[[j]]) ) {
+                        if (DBG) {cat("\tMSVL: new duplicate (var#) to be added:", j,"\n",sep="")}
+                        lsti[[as.character(j)]]<-j
+                        vlindex[[as.character(nameset[j])]]<-nameseti
+                        lstall[[as.character(j)]]<-j
+                    }
+                }
+            }
+            if (DBG) {cat("\tlsti to be attached to vlst:", paste(as.vector(lsti, mode="integer"),collapse=","), "\n",sep="")}
+            if (length(lsti)){
+                vlst[[nameseti]]<-nameset[as.vector(lsti, mode="integer")]
+            }
+        }
+        if (DBG) {
+            cat("\nvlst=attr(dtfrm,'val.list')  <- vlst\n")
+            str(vlst)
+            cat("\nvlindex=attr(dtfrm,'val.index') <- vlindex\n")
+            str(vlindex)
+            cat("\nvltbl=attr(dtfrm,'val.table')<- valtablex\n")
+            str(vltbl)
+            cat("\nnames(vltbl): equivalent to tmpunique\n")
+            cat("unique var IDs:", paste(names(vltbl),collapse="|"), "\n",sep="")
+        }
+        attr(dtfrm, attrname)<-vlindex
+
+        if (attrname == 'val.index') {
+            attr(dtfrm, 'val.list')  <- vlst
+            attr(dtfrm, 'val.table') <- vltbl
+        } else if (attrname == 'missval.index') {
+            attr(dtfrm, 'missval.list')  <- vlst
+            attr(dtfrm, 'missval.table')<-vltbl
+        }
+            
+    } else {
+            # no value labels
+            #vlindex<-rep(NA, dim(dtfrm)[2])
+            attr(dtfrm, attrname)<-NULL
+            if (attrname == 'val.index') {
+                attr(dtfrm, 'val.list')<- NA 
+            } else if (attrname == 'missval.index') {
+                attr(dtfrm, 'missval.list')  <- NA
+            }
+    }
+        
+    invisible(dtfrm)
+    }) # end try    
+} # end of createvalindex
+
+###########################################################
+# 2 table functions that return univariate statistics
+# continuous case
+
+frqtbl.ctn<-function(x){
+    frqtbl<-list()
+    tbl1<-table(x, useNA='ifany')
+    frqtbl[['Mode']]<-NA
+    if (length(x) > length(tbl1)) {
+        frqtbl[['Mode']]<- names(tbl1)[which.max(tbl1)]
+    }
+    frqtbl
+}
+
+frqtbl.dsc<-function(x){
+    frqtbl<-list()
+    DBG<-FALSE
+        
+        # ftbl: frequency table
+        ftbl<-table(x, useNA='ifany')
+            
+        # get the mode
+        frqtbl[['Mode']]<-NA
+        frqtbl[['freqtbl']]<-NA
+        frqtbl[['pcnttbl']]<- NA
+        if (length(x) > length(ftbl)){
+            frqtbl[['Mode']]<-names(ftbl[which.max(ftbl)])
+            if ((length(ftbl)<=50)){
+                # ptbl: percentage table
+                ptbl<-100*(ftbl/sum(ftbl))
+                # set up the return list
+                frqtbl[['freqtbl']]<- ftbl
+                frqtbl[['pcnttbl']]<- ptbl
+                if (DBG){
+                    cat("\ttable header:",paste(dimnames(ftbl)[[1]], collapse='|'), "\n")
+                    cat("\ttable frequency:",paste(ftbl, collapse='|'), "\n")
+                    cat("\tstatistical mode:", frqtbl[['Mode']], "\n")
+                    cat("\tstatistical mode(freq):", tbl1[which.max(ftbl)], "\n")
+                }
+            }
+        }
+        
+    frqtbl
+}
+
+sw.stat<-function(x,N){
+    DBG<-TRUE
+    DBG<-FALSE
+    SW<-list()
+    SW$value <- NA
+    SW$Pvalue <- NA
+    if ((N >= 3) & (N <= 5000)) {
+        shpr <- try(shapiro.test(x))
+        if (attr(shpr, "class") == 'htest') {
+            if(DBG) {cat("sw statistics assigned\n")}
+            SW$value <- shpr[[1]][[1]]
+            SW$Pvalue <- shpr[[2]]
+        }
+        if(DBG) {cat("sw statistics end\n")}
+    }
+    SW
+}
+
+univarStat.cntn<-function(varseti){
+    options(digits=3)
+    DBG<-TRUE
+    DBG<-FALSE
+    if(DBG) {cat("pass the point univStat(continuous)\n")}
+
+    N<-sum(complete.cases(varseti))
+    svnm<-summary(varseti)
+
+    if (N) {
+        min.value <- svnm[[1]]
+        q1.value <- svnm[[2]]
+        #median.value <- median(varseti)
+        median.value <- svnm[[3]] 
+        q3.value <- svnm[[5]]
+        max.value <- svnm[[6]]
+        range.value <- svnm[[6]]-svnm[[1]]
+        iqr.value <- svnm[[5]]-svnm[[1]]
+        mean.value <- svnm[[4]]
+    } else {
+        min.value <- NA
+        q1.value <- NA
+        median.value <- NA
+        q3.value <- NA
+        max.value <- NA
+        range.value <- NA
+        iqr.value <- NA
+        mean.value <- NA
+    }
+
+    stdv.value <- sd(varseti, na.rm=T)
+    z0 <- scale(varseti)
+    if (N >= 2) {cv.value <- stdv.value/svnm[[4]] } else {cv.value <- NA}
+    if (N >= 3) {skewness.value <- (N/(N-1)/(N-2))*sum((z0)^3, na.rm=T)} else {skewness.value <- NA}
+    if (N >= 4) {kurtosis.value <- ((N*(N+1)/(N-1))*sum((z0)^4, na.rm=T) - 3*(N-1)^2)/(N-2)/(N-3)} else {kurtosis.value <-NA}
+    # find the maximum frequency cell
+    # index: which.max(table(dtfrm[[i]]))
+
+    maxfreq<-frqtbl.ctn(x=varseti)[["Mode"]]
+    SW<-sw.stat(x=varseti,N=N)
+    statset<- list(
+        Vald = N,
+        Invald = sum(is.na(varseti)), 
+        Total = length(varseti), 
+        Mean = mean.value, 
+        Stdev = stdv.value, 
+        Skewness = skewness.value,
+        Kurtosis = kurtosis.value,
+        CV = cv.value, 
+        Mode = maxfreq,
+        Minimum = min.value, 
+        Q1 = q1.value,
+        Median = median.value, 
+        Q3 = q3.value, 
+        Maximum = max.value, 
+        Range = range.value, 
+        I.Q.R = iqr.value,
+        S.W.statistic = SW$value, 
+        S.W.P.value = SW$Pvalue
+    )
+    statset
+}
+    
+univarStat.dscrt<-function(varseti, ordnl=TRUE){
+    DBG<-TRUE
+    DBG<-FALSE
+
+    if(DBG) {cat("pass the point univStat(discrete)\n")}
+    N<-sum(complete.cases(varseti))
+    if (ordnl){
+        median.value <-NULL
+        if (N) {median.value <- median(varseti, na.rm=TRUE) }
+    }
+    tmpfrq<-frqtbl.dsc(x=varseti)
+
+    statset<- list(
+        Vald = N,
+        Invald = sum(is.na(varseti)), 
+        Total = length(varseti),
+        Mode = tmpfrq[["Mode"]],
+        freqtbl = tmpfrq[["freqtbl"]],
+        pcnttbl = tmpfrq[["pcnttbl"]]
+    )
+    if (ordnl){
+        statset$Median<-median.value
+    }
+    statset
+}
+    
+
+univarStat<-function(dtfrm){
+    DBG<-TRUE
+    DBG<-FALSE
+    if(DBG) {
+        cat("\n\nEntered the function univarStat\n")
+        NAMESET<-names(dtfrm)
+    }
+    
+    STATLST<-list()
+    
+    # create temp vars
+    VARTYPE<-attr(dtfrm, "var.type")
+    for (i in 1: dim(dtfrm)[2]) {
+        try ({
+            varseti<-dtfrm[[i]]
+            
+            if(DBG) {cat("variable name =",NAMESET[i],"\n")}
+
+            N<-sum(complete.cases(varseti))
+
+            if (VARTYPE[i]== 2) {
+            
+                STATLST[[as.character(i)]]<-univarStat.cntn(varseti=varseti)
+                
+            } else if (VARTYPE[i] == 1) {
+            
+                STATLST[[as.character(i)]]<-univarStat.dscrt(varseti=varseti)
+                
+            } else if (VARTYPE[i] == 0) {
+            
+                STATLST[[as.character(i)]]<-univarStat.dscrt(varseti=varseti,ordnl=FALSE)
+                
+            } else {
+            
+                STATLST[[as.character(i)]]<-NULL
+            
+            }
+
+        }) # end of try
+    } # end of the loop
+    
+    attr(dtfrm, "univarStat.lst")<-STATLST
+
+    invisible(dtfrm)
+} # end of univarStat
+###########################################################
+univarChart<-function(dtfrm, analysisoptn=NULL, imgflprfx=NULL, standalone=T){
+    # description
+    # to print univariate charts
+    #
+    # arguments
+    # dtfrm[[i]] variable name
+    # analysisoptn Analysis option
+    # imgflprfx temporary image file prefix
+
+    # local variable 
+    # varlabel variable label (local variable)
+    # No return value; each image file is written in /tmp
+    # $RvlsPrfx   = "$TMPDIR/Rvls.$PRCSSID";
+    # note: value labels will be printed in html tables
+    # unvlst[[as.character(i)]]<-statset
+    
+    # new list-based notations
+    # USL<-attr(dtfrm,"univarStat.lst")
+    # chartset[["hstbx"]]<-hstgrmfile
+    # chartset[["qqplt"]]<-qqpltfile
+    # chartset[["brchrt"]]<-barpltfile
+    # USL[[as.character(i)]][["freqtbl"]]
+    # chrtlst[[as.character(i)]]<-chartset
+
+
+    DBG<-FALSE
+    #DBG<-TRUE
+    if (is.null(analysisoptn)){
+        analysisoptn<-c(1,1,0)
+    }
+    
+    if (is.null(imgflprfx)) {
+        PRCID<-format(Sys.time(), "R%Y%m%d_%H%M%S")
+        #imgflprfx<-paste("c:/asone/R/temp/",PRCID,sep="")
+        imgflprfx<-PRCID
+        if (DBG) {cat("\nprocessID=",imgflprfx,"\n", sep="")}
+    }
+
+
+
+    # function defintions
+
+
+
+varlabel.chrt<-function(lblset){
+    DBG<-FALSE
+    #DBG<-TRUE
+    # variable label processing
+    if (DBG) {cat("\nEntered varlabel.chrt\n")}
+
+    if (nchar(lblset[["varlabel"]])>45) {
+        varlabel<- paste(substr(lblset[["varlabel"]], 1, 45), "...")
+    } else {
+        varlabel<-lblset[["varlabel"]]
+    }
+    lblset[["varlabel"]]<-paste(lblset[["varname"]], ": ", varlabel, sep="")
+    lblset
+}
+
+    
+
+univarChart.cntn<-function(varseti, imgflprfx, labelset) {
+    DBG<-FALSE
+    #DBG<-TRUE
+    chartset<-list()
+
+    if (DBG) {cat ("univarChart.cntn:varname:", labelset[["varname"]], "\n")}
+
+    #histgram/boxplot
+    hstgrmfile<-paste(imgflprfx, labelset[["varname"]],"hs.jpg", sep=".")
+    bitmap(hstgrmfile, type = "jpeg", height = 3.5, width = 3, res=100, pointsize=9)
+
+    layout(matrix(c(1,2),nrow=2,ncol=1), widths=c(1), heights=c(5,1))
+    par(mar=c(4,4,1,1), mgp=c(2, 0.5, 0), tcl=-0.25, cex.axis=0.9, cex.lab=0.9)
+
+    hist(varseti, main="", xlab=labelset[["varlabel"]], col="lightgrey")
+
+    par(mar=c(2,4,0,1))
+    boxplot(varseti, main="", xlab="", ylab="", col="lightgrey", horizontal=T)
+
+    dev.off()
+    #par(def.par)
+    
+    if (!standalone){
+        tmpvsldirhs<-unlist(strsplit(hstgrmfile,"/"))
+        hstgrmfile<-paste(tmpvsldirhs[(length(tmpvsldirhs)-1):length(tmpvsldirhs)],collapse="/")
+    }
+    chartset[["hstbx"]]<-hstgrmfile
+
+    #qq-plot
+    qqpltfile<-paste(imgflprfx, labelset[["varname"]],"qq.jpg", sep=".")
+    bitmap(qqpltfile, type = "jpeg", height = 3, width = 3, res= 100, pointsize=8.5)
+
+    par(tcl=-0.25, cex.axis=0.9, cex.lab=1.0)
+    qqnorm(varseti, main="Normal Q-Q Plot", ylab=labelset[["varlabel"]], pch=15)
+    qqline(varseti)
+    dev.off()
+    #par(def.par)
+    if (!standalone){
+        tmpvsldirqq<-unlist(strsplit(qqpltfile,"/"))
+        qqpltfile<-paste(tmpvsldirqq[(length(tmpvsldirqq)-1):length(tmpvsldirqq)],collapse="/")
+    }
+    chartset[["qqplt"]]<-qqpltfile
+    chartset
+}
+    
+univarChart.dscrt<-function(frqtbl, imgflprfx, labelset){
+    DBG<-FALSE
+    #DBG<-TRUE
+    chartset<-list()
+    if (DBG) {cat ("univarChart.dscrt:varname:", labelset[["varname"]], "\n")}
+
+    barpltfile<-paste(imgflprfx, labelset[["varname"]], "bp.jpg", sep=".")
+    bitmap(barpltfile, type = "jpeg", height = 3, width = 3, res= 100, pointsize=8.5)
+    par(tcl=-0.25, cex.axis=0.9, cex.lab=1.0)
+    barplot(frqtbl, col="lightgrey", main="", xlab=labelset[["varlabel"]], ylab="Frequency")
+    dev.off()
+    #par(def.par)
+    
+    if (!standalone){
+        tmpvsldirbp<-unlist(strsplit(barpltfile,"/"))
+        barpltfile<-paste(tmpvsldirbp[(length(tmpvsldirbp)-1):length(tmpvsldirbp)],collapse="/")
+    }
+
+    chartset[["brchrt"]]<-barpltfile
+    chartset
+}
+    
+    ############################
+    # implementation
+    
+    
+    varlabels<-attr(dtfrm,"var.labels")
+    varnames<-names(dtfrm)
+    vartypes<-attr(dtfrm,"var.type")
+    
+    
+    STATLST<-NULL
+    if (!is.null(attr(dtfrm,"univarStat.lst"))) {
+        STATLST<-attr(dtfrm,"univarStat.lst")
+    }
+    
+    chrtlst<-list()
+    for (i in 1: dim(dtfrm)[2]){
+    try( {
+        if (DBG) {cat("univarChart:",i,"-th var\n")}
+        chrtlbl<-list(varname=varnames[i], varlabel=varlabels[i])
+        labelset<-varlabel.chrt(lblset=chrtlbl)
+        
+        varseti<-dtfrm[[i]]
+
+        if (is.null(STATLST[[as.character(i)]])) {
+            tmpvald<-sum(complete.cases(varseti))
+        } else {
+            tmpvald<-STATLST[[as.character(i)]][["Vald"]]
+        }
+        if (DBG) {cat("tmpvald=",tmpvald,"\n")}
+        
+        chartset<-list()
+
+        if (vartypes[i]==2) {
+            #Continuous Variable
+            if (analysisoptn[2] & tmpvald) {
+                chrtlst[[as.character(i)]]<-univarChart.cntn(varseti=varseti, imgflprfx=imgflprfx, labelset=labelset)
+            }
+        } else {
+            #Discrete Variable
+            #bar plot
+            if (analysisoptn[2] & tmpvald ) {
+            
+                # chart option is chosen
+                if (analysisoptn[1]){
+                    # univariate statistics option is chosen -> freq table is available
+                    # note: univariate statistics option is not chosen, tmpfrqtbl is NA
+                    tmpfrqtbl<-STATLST[[as.character(i)]][["freqtbl"]]
+                } else {
+                    # calculate statistics
+                    if (vartypes[i]==1) {
+                        statlst<-univarStat.dscrt(varseti=varseti)
+                    } else {
+                        statlst<-univarStat.dscrt(varseti=varseti,ordnl=FALSE)
+                    }
+                    tmpfrqtbl<-statlst[["freqtbl"]]
+                }
+                
+                chartset[["brchrt"]]<-NA
+                if( (length(tmpfrqtbl)<=10) & (length(tmpfrqtbl)>1) ) {
+                    chartset<-univarChart.dscrt(frqtbl=tmpfrqtbl, imgflprfx=imgflprfx, labelset=labelset)
+                } else if (class(tmpfrqtbl)=="table") {
+                    # number of categories <= 50
+                    # no chart but table
+                    if (!analysisoptn[1]){
+                        STATLST[[as.character(i)]]<-statlst
+                    }
+                } else if (is.na(tmpfrqtbl)) {
+                    # no table available
+                    if (!analysisoptn[1]){
+                        STATLST[[as.character(i)]]<-statlst
+                    }
+                }
+                chrtlst[[as.character(i)]]<-chartset
+            }
+        } # end of D case
+    }) # end of try
+    } # end of var-wise-loop
+    attr(dtfrm, "univarChart.lst")<-chrtlst
+    
+    if (is.null(attr(dtfrm,"univarStat.lst")) ) {
+        attr(dtfrm,"univarStat.lst")<-STATLST
+    }
+    
+    invisible(dtfrm)    
+} # end of univarChart
+#######################################################################
+univarStatHtml<-function(dtfrm, tmpimgfile, analysisoptn, tmphtmlfile, standalone=T){
+    # Description
+    # 
+    # arguments
+    # dtfrm          variable furnished with attributes
+    # tmpimgfile    temporary image file prefix: =$SRVRCGI=$SERVER$CGIDIR
+    # analysisoptn  analysis option
+    # nrows         local variable
+    # tmphtmlfile   temporary html file
+    # file          tmphtmlfile 
+    
+    DBG<-TRUE
+    DBG<-FALSE
+
+    # open the connection
+    whtml<-file(tmphtmlfile, "w")
+    on.exit(close(whtml))
+    
+    # color parameters
+    # legend: c(1:background, 2:table header, 3: table body(o), 4: table body(e))
+    # clschm <-c("#FFFFFF", "#CCFFCC","#e0ffff","#f0fff0") # green-based palette
+    # blue-based palette
+    #clschm <-c("#FFFFFF", "#e6e6fa","#ffffff","#f5f5f5")
+    clschm <-c("dvnUnvStatTbl", "dvnUnvStatTblHdr","dvnUnvStatTblRowO","dvnUnvStatTblRowE")
+    
+    # table parameters
+    # legend: c(border, cellspacing)
+     tblprm <-c(0, 2)
+    
+    #cat("\nEntered the function univarStatHtml\n")
+    
+    # values for local tests
+    # set localtest 0 after local tests
+    localtest<-TRUE
+    localtest<-FALSE
+    if (localtest){
+        tmpimgfile<-c("")
+        imgprfx1<-c("<img src=\"")
+        imgprfx2<-c("")
+        univarstathdr<-c("Valid Cases", "Invalid Cases(NAs)", "Total", "Mean", "Standard deviation", "Skewness", "Kurtosis", "Coefficient of variation", "Mode", "Minimum","1st Quartile","Median","3rd Quartile","Maximum","Range","Interquartile Range","Normality Test:Shapiro-Wilk Statistic", "(Shapiro-Wilk Statistic: p value)")
+    }
+    if (standalone) {
+        imgflprfx<-paste(imgprfx1,tmpimgfile,imgprfx2,sep="")
+    } else {
+        imgflprfx<-"<img src=\""
+    }
+    # constant for rendering a table for univariate statistics(continuous vars only)
+    uslstlen<-length(univarstathdr)
+    nrows <-ceiling(uslstlen/2)
+    blnkcell<-uslstlen%%2==TRUE
+    
+    
+    nameset<-names(dtfrm)
+    varlabelset<-attr(dtfrm,"var.labels")
+    CHRTLST<-attr(dtfrm, "univarChart.lst")
+    STATLST<-attr(dtfrm, "univarStat.lst")
+    VARTYPE<-attr(dtfrm, "var.type")
+    VALINDEX<-attr(dtfrm, "val.index")
+    VALTABLE<-attr(dtfrm, "val.table")
+    
+    
+    pt.varheader<-function(namesi, varlabelsi=NA) {h3<-paste("<h3>", namesi, if (!is.na(varlabelsi)) {paste(": ", varlabelsi, sep="")}, "</h3>\n",sep="");h3}
+
+    ###################
+    # continuous case
+    univarStatHtml.cntn<-function(statlst, imgfllst, cmbntn, namesi, varlabelsi){
+
+        # statlst   STATLST[[as.character(i)]]
+        # imgfllst  imgfllst=CHRTLST[[as.character(i)]]
+        # cmbntn    analysisoptn
+        # function definition sections
+
+        # create the first tr tag: chart part
+        pt.tr1<-function(imgfllst, cmbntn){
+            tr1<-""
+            if (cmbntn[2]) {
+
+                if (cmbntn[1]) { colspan<-" colspan=\"2\"" } else { colspan<-""}
+
+                # both
+
+                if(!is.null(imgfllst[["hstbx"]])){
+                    tr1.l<-paste("<td",colspan,">\n",imgflprfx,imgfllst[["hstbx"]],imgsffx1,"</td>\n",sep="")
+                } else {
+                    tr1.l<-paste("<td",colspan,">\n<p><B><font color=red>Histogram/Boxplot Not Available</font></B></p>\n</td>\n")
+                }
+
+                if(!is.null(imgfllst[["qqplt"]])) {
+                    tr1.r<-paste("<td",colspan,">\n",imgflprfx,imgfllst[["qqplt"]],imgsffx1,"</td>\n",sep="")
+                } else {
+                    tr1.r<-paste("<td",colspan,">\n<p><B><font color=red>Normal Q-Q plot Not Available</font></B></p>\n</td>\n",sep="")
+                }
+
+                tr1<-paste("<tr>\n",tr1.l,tr1.r,"</tr>\n",sep="")
+            }
+            tr1
+        }
+
+        # create the 2nd and thereafter tr tags: statistics part
+        pt.tr2<-function(statlst, cmbntn){
+            tr2<-""
+            if (cmbntn[1]) {
+                # statistics on
+                # table header
+                tr2<-paste("<tr class=\"",clschm[2],"\">\n<td align=\"left\"><b>Statistic</b></td><td align=\"right\"><b>Value</b></td>\n<td align=\"left\"><b>Statistic</b></td><td align=\"right\"><b>Value</b></td>\n</tr>\n",sep="")
+
+                # statistical data
+                # when # of statistics is not even
+                if (blnkcell){ univarstathdr[length(statlst)+1]<-"&nbsp;"}
+
+                # table body
+                for (j in 1:nrows) {
+                    if (j%%2==FALSE) colorprm <- clschm[3] else colorprm <-clschm[4]
+
+                    tr2<-paste(tr2, 
+                    "<tr class=\"",colorprm,"\">\n",
+                    "<td align=\"left\">",univarstathdr[j],"</td>\n", 
+                    "<td align=\"right\">", prettyNum(statlst[[j]]),"</td>\n", 
+                    "<td align=\"left\">",univarstathdr[j+nrows],"</td>\n", 
+                    "<td align=\"right\">", if ( (j==nrows) & (blnkcell) ) {"&nbsp;"} else {prettyNum(statlst[[j+nrows]])},"</td>\n</tr>\n", sep="")
+                }
+            }
+            tr2
+        }
+
+        # create the chart/statistics table segment
+        pt.tbl<-function(statlst=statlst,cmbntn=cmbntn,imgfllst=imgfllst){
+            tr1<-pt.tr1(imgfllst=imgfllst, cmbntn=cmbntn)
+            tr2<-pt.tr2(statlst=statlst, cmbntn=cmbntn)
+            tbl<-paste("<center>\n<table border=\"",tblprm[1],"\" class=\"",clschm[1],"\" cellspacing=\"",tblprm[1],"\" >\n",tr1,tr2,"</table>\n</center>\n",sep="")
+            tbl
+        }
+
+        # create per variable html segment
+        pt.varunit.cntn<-function(vhdr,vcntnts){varunit<-paste(vhdr,vcntnts,"<hr/>", sep="");varunit}
+        ## end of function definitions ##
+
+        # implementation
+
+        pttbl<-pt.tbl(statlst=statlst, imgfllst=imgfllst, cmbntn=cmbntn)
+        ptvarheader<-pt.varheader(namesi=namesi, varlabelsi=varlabelsi)
+        ptvarunitc<-pt.varunit.cntn(vhdr=ptvarheader, vcntnts=pttbl)
+
+        ptvarunitc
+    } # end of continuous case
+    
+    
+    ######################
+    # discrete case
+
+    univarStatHtml.dscrt<-function(statlst, imgfllst, cmbntn, namesi, varlabelsi, vltbl) {
+        # statlst   STATLST[[as.character(i)]]
+        # imgfllst  imgfllst=CHRTLST[[as.character(i)]]
+        # cmbntn    analysisoptn
+        # function definition sections
+
+        #statlst[["freqtbl"]]
+        # mode and median even if a freq table is not available 
+        nrw<-3
+        # add one for "total" row
+        #if (!is.na(statlst$freqtbl)) {nrw<-length(statlst$freqtbl)+1+nrw}
+
+        if (class(statlst$freqtbl)=="table") {nrw<-length(statlst$freqtbl)+nrw}
+        # nrws: rowspan parameter value if the chart option is chosen
+        nrws<-nrw+1
+
+        pt.tr1<-function(imgfllst, cmbntn){
+            try({
+            # tr1.l: chart part
+            tr1.l<-""
+            sprsstr1r<-FALSE
+            if (cmbntn[2]) {
+                rowspan<-""
+                if (cmbntn[1]) { rowspan<-paste(" rowspan=\"",nrws,"\"",sep="") }
+
+                if(!is.na(imgfllst[["brchrt"]])){
+                    tr1.l<-paste("<td",rowspan," valign=\"top\">\n",imgflprfx,imgfllst[["brchrt"]], imgsffx1, "</td>\n", sep="")
+                } else {
+                    if (class(statlst$freqtbl)=="table"){
+                        rowspan<-paste(" rowspan=\"",nrws,"\"",sep="")
+                        tr1.l<-paste("<td",rowspan," valign=\"top\">\n<p><B><small>The number of categories is more than 10 or equal to 1.<br>Table substitutes for Bar plot</small></B></p>\n</td>\n",sep="")
+                        cmbntn[1]<-1
+                    } else {
+                        tr1.lm<-paste("<td align=\"left\" colspan=\"3\" valign=\"top\">\n<p><B><small>The number of categories is more than 50. Frequency/Percentage tables are not shown here</small></B></p>\n</td></tr>\n",sep="")
+                        
+                        tr1.lhdr<-paste("<tr><td align=\"left\" class=\"",clschm[2],"\" ><b>Value: Value Label</b></td><td align=\"right\" class=\"",clschm[2],"\" ><b>Freq</b></td><td align=\"right\" class=\"",clschm[2],"\" ><b>Percent</b></td>\n",sep="")
+                        
+                        tr1.l<-paste(tr1.lm,tr1.lhdr, sep="")
+                        
+                        sprsstr1r<-TRUE
+                    }
+                }
+            }
+            # tr1.r: freq/pcnt table header part
+            tr1.r<-""
+            if (cmbntn[1]) {
+                if (class(statlst$freqtbl)=="table"){
+                    tr1.r<-paste("<td align=\"left\" class=\"",clschm[2],"\" ><b>Value: Value Label</b></td><td align=\"right\" class=\"",clschm[2],"\" ><b>Freq</b></td><td align=\"right\" class=\"",clschm[2],"\" ><b>Percent</b></td>\n",sep="")
+                } else if (!sprsstr1r){
+                    tr1.rm<-paste("<td align=\"left\" colspan=\"3\" valign=\"top\">\n<p><B><small>The number of categories is more than 50. Frequency/Percentage tables are not shown here</small></B></p>\n</td></tr>\n",sep="")
+                    
+                    tr1.rhdr<-paste("<tr><td align=\"left\" class=\"",clschm[2],"\" ><b>Value: Value Label</b></td><td align=\"right\" class=\"",clschm[2],"\" ><b>Freq</b></td><td align=\"right\" class=\"",clschm[2],"\" ><b>Percent</b></td>\n",sep="")
+                    
+                    tr1.r<-paste(tr1.rm,tr1.rhdr, sep="")
+                }
+            }
+            tr1<-paste("<tr>\n",tr1.l,tr1.r,"</tr>\n",sep="")
+            }) # end of try
+        }
+
+        # create the 2nd and thereafter tr tags: statistics part
+        pt.tr2<-function(statlst, cmbntn, vltbl, imgfllst){
+            try({
+            tr2<-""
+            tableon<-FALSE
+            if ( cmbntn[2]){
+                if (is.na(imgfllst[["brchrt"]])){
+                    tableon<-TRUE
+                }
+            }
+            if (cmbntn[1] | tableon) {
+
+                if (class(statlst$freqtbl)=="table") {tblkey<-names(statlst$freqtbl)}
+                # if freqtbl is NA, tblkey becomes NULL
+                for (j in 1:nrw) {
+                    if (j%%2==FALSE) { colorprm <- clschm[3]} else {colorprm <-clschm[4]}
+                    if (j < (nrw -2)) {
+
+                        catgrylbl<-""
+                        if (!is.null(vltbl)){
+                            if(!is.null(vltbl[[tblkey[j]]])) {
+                                catgrylbl<-paste("(",vltbl[[tblkey[j]]],")",sep="")
+                            }
+                        }
+                        tr2<-paste(tr2, "<tr class=\"",colorprm,"\">\n<td align=\"left\">",tblkey[j],catgrylbl,"</td>\n<td align=\"right\">",statlst$freqtbl[[j]],"</td>\n<td align=\"right\">", signif(statlst$pcnttbl[[j]],3),"</td>\n</tr>\n", sep="")
+
+                    } else if (j == (nrw -2)) {
+                        #cat("entering the total row\n")
+                        tr2<-paste(tr2, "<tr class=\"",colorprm,"\">\n<td align=\"left\">Total</td>\n<td align=\"right\">",statlst$Vald+statlst$Invald,"</td>\n<td align=\"right\">100</td>\n</tr>\n", sep="")
+
+                    } else if (j == (nrw -1)) {
+                        # median
+                        #cat("entering the median\n")
+                        median.vl<- "Not Available"
+                        median.lbl<-""
+                        if (!is.null(statlst$Median)) {
+                            median.vl<- as.character(statlst$Median)
+                            if (!is.null(vltbl) && (nrw>3)){
+                                if (!is.null(vltbl[[median.vl]])) {
+                                    median.lbl<-paste("(",vltbl[[median.vl]],")",sep="")
+                                }
+                            }
+                        }
+
+                        tr2<-paste(tr2,"<tr class=\"",colorprm,"\">\n<td align=\"left\">Median</td>\n<td align=\"right\">",median.vl,"</td>\n<td align=\"right\">",median.lbl,"</td>\n</tr>\n", sep="")
+
+                    } else if (j == nrw) {
+                        # mode
+                        #cat("entering the Mode\n")
+                        mode.vl<-"Not Available"
+                        mode.lbl<-""
+                        if (!is.null(statlst$Mode)) {
+                            mode.vl<-statlst$Mode
+                            if (!is.null(vltbl) && (nrw>3) ) {
+                                if (!is.null(vltbl[[mode.vl]])) {
+                                    mode.lbl<-paste("(",vltbl[[mode.vl]], ")", sep="")
+                                }
+                            }
+                        }
+
+                        tr2<-paste(tr2,"<tr class=\"",colorprm,"\">\n<td align=\"left\">Mode</td>\n<td align=\"right\">",mode.vl,"</td>\n<td align=\"right\">",mode.lbl,"</td>\n</tr>\n", sep="")
+                    }
+                }
+            }
+            tr2
+            }) # end of try
+        }
+
+        # create the chart/statistics table segment
+        pt.tbl<-function(statlst=statlst,cmbntn=cmbntn,imgfllst=imgfllst,vltbl=vltbl){
+            try({
+            tr1<-pt.tr1(imgfllst=imgfllst, cmbntn=cmbntn)
+            tr2<-pt.tr2(statlst=statlst, cmbntn=cmbntn, vltbl=vltbl,imgfllst=imgfllst)
+            tbl<-paste("<center>\n<table border=\"",tblprm[1],"\" class=\"",clschm[1],"\" cellspacing=\"",tblprm[1],"\" >\n",tr1,tr2,"</table>\n</center>\n",sep="")
+            tbl
+            })
+        }
+
+        # create per variable html segment
+        pt.varunit.dscrt<-function(vhdr,vcntnts){varunit<-paste(vhdr,vcntnts,"<hr/>", sep="");varunit}
+        
+        ## end of function definitions ##
+
+
+        # implementation
+        try({
+        #cat("enters the discrete html body function\n", sep="")
+        pttbl<-pt.tbl(statlst=statlst, imgfllst=imgfllst, cmbntn=cmbntn, vltbl=vltbl)
+
+        ptvarheader<-pt.varheader(namesi=namesi, varlabelsi=varlabelsi)
+        ptvarunitd<-pt.varunit.dscrt(vhdr=ptvarheader, vcntnts=pttbl)
+
+        ptvarunitd
+        })
+    } # end of discrete case
+    
+    
+    
+    # main 
+    # implementation
+        rawVarName <- nameset
+        if (length(attr(dtfrm, "Rsafe2raw"))>0){
+            Rsafe2raw <- attr(dtfrm, "Rsafe2raw")
+            for (i in 1:length(nameset)){
+                if (!is.null(Rsafe2raw[[nameset[i]]])){
+                    rawVarName[i] <-  Rsafe2raw[[nameset[i]]];
+                }
+            }
+        }
+    
+    for (i in 1:dim(dtfrm)[2]){
+        try({
+        if (VARTYPE[i]==2) {
+            varsgmnt.c<-univarStatHtml.cntn(statlst=STATLST[[as.character(i)]], imgfllst=CHRTLST[[as.character(i)]], cmbntn=analysisoptn, namesi=rawVarName[i], varlabelsi=varlabelset[i])
+            cat(file=whtml, varsgmnt.c, sep="")
+        } else {
+            if (DBG) {cat(i,"-th var before entering the discrete html function\n", sep="")}
+            #cat("check the value table=",VALTABLE[[VALINDEX[[i]]]],"\n", sep="")
+            if (is.null(VALINDEX[[as.character(i)]])){valtable<-NULL} else {valtable<-VALTABLE[[VALINDEX[[as.character(i)]]]]}
+            varsgmnt.d<-univarStatHtml.dscrt(statlst=STATLST[[as.character(i)]], imgfllst=CHRTLST[[as.character(i)]], cmbntn=analysisoptn, namesi=rawVarName[i], varlabelsi=varlabelset[i], vltbl=valtable)
+            cat(file=whtml, varsgmnt.d, sep="")
+        }
+        }) # end of try
+    } # end of var-wise for-loop
+    
+
+} #end of the function univarStatHtml
+
+
+###########################################################
+univarDataDwnld<-function(dtfrm, dwnldoptn, dsnprfx) {
+    # dtfrm(=z1)        dataset to be downloaded
+    # dwnldoptn(=z2)    data download option
+    # dsnprfx(=z3)      dataset name prefix
+
+# The portion of code immediately below has been added to 
+# convert extra metadata, such as value labels, supplied in
+# the proprietary attributes (below) into standard R notations,
+# such as "comments" for variable labels and "factors" for 
+# value labels. 
+# 
+# This is still work in progress! -- L.A. 
+
+    NAMESET<-names(dtfrm)
+    VARLABELS<-attr(dtfrm,"var.labels")
+
+  attr(x,"orig.names")<-attr(dtfrm,"var.labels")
+
+    CHRTLST<-attr(dtfrm, "univarChart.lst")
+    STATLST<-attr(dtfrm, "univarStat.lst")
+    VARTYPE<-attr(dtfrm, "var.type")
+    VALINDEX<-attr(dtfrm, "val.index")
+    VALTABLE<-attr(dtfrm, "val.table")
+
+    MISSVALINDEX <- attr(x,"missval.index")
+    MISSVALTABLE <- attr(x,"missval.table")
+
+
+	recodemiss<-TRUE
+	recodefactors<-TRUE
+	dropfactorlevels<-FALSE
+	orderfactors<-TRUE
+	
+
+      for (i in 1:length(x)) {
+        cat("inside the for loop\n")
+	cat("class: ")
+	cat(class(x[[i]]))
+	cat("\n")
+	# Recoding discrete, categorical variables as R factors;
+	# But, (experimental...) only if there are value labels supplied. 
+	# This means, among other things, that an ingested R character, 
+	# or integer vector would stay a vector, and not a factor, 
+	# in a saved-as-R subset.  
+
+
+	# -- L.A.
+	
+	if (!is.null(VARTYPE) && VARTYPE[i]<2 && recodefactors) {
+
+	# Additionally, if we are saving as Stata, we're only
+	# recoding discrete numeric values (vartype 1), but not Strings.
+	# This is because of the nature of factors in R.
+	# TODO: add documentation for all of this!
+	# -- L.A. 
+
+	#if ((dwnldoptn != 'D03') || (VARTYPE[i] == 1)) {
+	if (((dwnldoptn != 'D03') || (VARTYPE[i] == 1)) && !(is.null(VALINDEX[[as.character(i)]]))) {
+
+	  if (is.null(VALINDEX[[as.character(i)]])) {
+	    vti <- NULL
+	  } else {
+            vti <- VALTABLE[[VALINDEX[[as.character(i)]]]]
+	    cat(paste(class(vti),"\n"))
+	    cat(paste(length(vti),"\n"))
+	    cat(paste("VTI", vti, "\n", sep=" : "))
+	  }
+
+          if (dropfactorlevels) {
+	    vtilevels<-NULL
+	  } else {
+	    if (is.numeric(x[[i]])) {
+	      vtilevels<-as.numeric(names(vti))
+            } else {
+	      vtilevels<-names(vti) 
+            }
+	  }
+
+          # save / re-attach date/time-related class name
+	  classToken <- class(x[[i]])
+          vlevsi <- as.list(sort(unique.default(c(x[[i]],vtilevels))))
+          if ((classToken[1] == "Date") || (classToken[1] == "POSIXt")) {
+	      class(vlevsi)<- classToken
+	  }
+	  names(vlevsi)<-vlevsi
+          tmatch<-na.omit(match(names(vti),names(vlevsi)))
+          if (length(tmatch)>0) {
+	      names(vlevsi)[tmatch] <- vti
+	  }
+
+	  mti<-integer(0);
+	  mti<-integer(0);
+	  if (recodemiss && !is.null(MISSVALINDEX[[as.character(i)]])) {
+	    mti<-MISSVALTABLE[[MISSVALINDEX[[as.character(i)]]]]
+	    tmatch<-na.omit(match(mti,vlevsi))
+	    if (length(tmatch)>0) {
+	      vlevsi[tmatch]<-NULL
+	    }
+	  }
+
+# TODO: Add explicit nominal/ordinal/dichotomous information to meta data, instead
+#   of assuming non-character vars are ordered
+
+	if ((dwnldoptn == 'D04') && !(is.null(VALORDER[[as.character(i)]]))) {
+	   cat("ordered value labels supplied")
+		x[[i]]  <-  factor(x[[i]],
+				levels=VALORDER[[as.character(i)]],
+				ordered=TRUE)
+	} else {
+	  cat("no ordered value labels supplied\n")
+	  cat(paste(VARTYPE[i],"\n",sep=""))
+	  cat(paste(length(vlevsi),"\n",sep=""))
+	  orderedfct<-(orderfactors &&
+                                      VARTYPE[i]>0 && ((length(vlevsi)-length(mti)>2)))
+          cat(paste(as.character(orderedfct),"\n", sep=""))
+	  paste("MTI", mti,"\n",sep=" : ")
+	  paste("VLEVSI", vlevsi,"\n",sep=" : ")
+	  
+		x[[i]]  <-  factor(x[[i]],
+	        		levels=vlevsi,
+			     	labels=names(vlevsi),
+			     	ordered=(orderfactors &&
+				      VARTYPE[i]>0 && ((length(vlevsi)-length(mti)>2))))
+	}
+
+	attr(x,"vlevsi")<-vlevsi;
+	attr(x,"namesvlevsi")<-names(vlevsi); 
+
+	}
+	}
+
+# try to add variable labels as R comments: (L.A. -- ?)
+
+	comment(x[[i]]) <- VARLABELS[i]
+      }
+
+# end of added recoding -- L.A.
+	
+
+    
+    if (dwnldoptn == 'D01') {
+        # In the future when a sample program file is attached to
+        # a text file, col.names should be set to F to avoid
+        # printing a variable list
+        write.table(dtfrm, file=dsnprfx, sep="\t", row.names=F, na=".")
+    } else if (dwnldoptn == 'D02') {
+# SPLUS: (L.A.)
+        for (i in 1:length(x)) {
+            if (class(x[[i]]) == 'AsIs'){
+                x[[i]]<-as.character(x[[i]]);
+            }
+        }
+        #attach(dtfrm)
+        dump('x', file=dsnprfx)
+        #dump(ls(dtfrm), file=dsnprfx)
+        #detach(dtfrm)
+    } else if (dwnldoptn == 'D03') {
+# STATA: (L.A.)
+	## -- replaced dtfrm with x in the following paragraph; - L.A. (?)
+        # truncate over-sized string variables
+        MaxLenStringVar <- 127
+        vt <- attr(x, 'var.type')
+        for (i in 1:length(vt)){
+            if (vt[i] == 0){
+                #cat(paste(i, "-th var is char type", sep=""), "\n")
+                maxlen <- max(unlist(lapply(x[[i]],nchar)))
+                if (maxlen > MaxLenStringVar) {
+                    #cat(paste(i, "-th var is over-sized string var", sep=""), "\n")
+                    x[[i]] <- strtrim(x[[i]], MaxLenStringVar)
+                }
+            }
+        }
+        write.dta(x, file=dsnprfx, version=7)
+    } else if (dwnldoptn == 'D04') {
+# SAVE AS R WORKSPACE: (L.A.)
+        save(x,file=dsnprfx)
+    }
+} # end of univarDataDwnld.R
+
+###########################################################
+sumStatTabAll.DDI<-function(dtfrm, file="", flid=1, jobNo=0, startno=1, novars=0){
+
+        # sumStatTabAll.DDI(dtfrm=x,file="/tmp/mry/00001/00001.stat.1.tab", flid=1)
+        # arguments
+        # dtfrm data frame furnished with attributes
+        # file  outp file (tab-delimited)
+        # flid  file ID
+        # ordrDDI == 0 if no division of a job
+        # constants
+        entref<-c("&", "<", ">", "'", "\"")
+        nmstr<-c("&amp;","&lt;", "&gt;", "&apos;","&quot;")
+        sumStatset<-c("mean", "medn", "mode", "vald", "invd", "min", "max", "stdev")
+        DEBUG<-FALSE
+
+        fileid<-paste("file", flid, sep="")
+        #varIDprfx<-paste("v", flid, ".", sep="")
+
+        # open the connection
+        if (file!="") {
+            if (jobNo==0){
+                wxml<-file(file, "w")
+            } else if (jobNo >0) {
+                wxml<-file(file, "a")
+            }
+            on.exit(close(wxml))
+            
+        } else {
+            stop("output file name is not specified\n")
+        }
+
+        # An auxiliary function that replaces the above five characters with the named entities
+        chr2xtrf<-function(lbl){
+            for (i in 1:length(entref)) {
+                lbl<-gsub(entref[i], nmstr[i], lbl, perl=T, useBytes = T)
+            }
+        }
+
+        # xml printing up to the section 3
+        #if (jobNo<= 1 ){
+        #   if (jobNo){novars<-"" } else {novars<-dim(dtfrm)[2]}
+        if (jobNo == 0){
+            if (novars==0){novar<-"";} else if (novars > 0) {novar <-novars}
+            cat(file=wxml, sep="",paste(dim(dtfrm)[1],novar,fileid,sep="\t"),"\n" )
+        }
+        
+        
+        VARTYPE<-attr(dtfrm, "var.type")
+        NAMES<-names(dtfrm)
+    for (i in 1: dim(dtfrm)[2]){
+
+        #   sumStatset<-c("mean", "medn", "mode", "vald", "invd", "min", "max", "stdev")
+
+
+        tmpvari<-dtfrm[[i]]
+            if(DEBUG) {cat("variable name =",NAMES[i],"\n")}
+            
+            if (VARTYPE[i] ==0){
+                # set '' to NA   tmpvari[ tmpvari == ""]<-NA; 
+                tmpvari[ sub('^\\s+', '',tmpvari, perl = T,  useBytes=T)==''] <-NA
+            }
+
+            
+            tbl1<-table(tmpvari, useNA='ifany')
+
+            N<-sum(complete.cases(tmpvari))
+
+            if (VARTYPE[i]== 2) {
+                
+                svnm<-summary(tmpvari)
+                if (N) {
+                    min.value <- svnm[[1]]
+                    median.value <- median(tmpvari, na.rm=TRUE)
+                    max.value <- svnm[[6]]
+                    mean.value <- svnm[[4]]
+                }
+                else {
+                    min.value <- NA
+                    median.value <- NA
+                    max.value <- NA
+                    mean.value <- NA
+                }
+                stdv.value <- sd(tmpvari, na.rm=T)
+                # find the maximum frequency cell
+                # index: which.max(table(dtfrm[[i]]))
+                maxfreq<-NA
+                if (length(tmpvari) > length(tbl1)) {
+                    maxfreq<- names(tbl1)[which.max(tbl1)]
+                }
+                statset<- list(
+                    Mean = mean.value, 
+                    Median = median.value, 
+                    Mode.Value = maxfreq,
+                    Vald = N,
+                    Invald = sum(is.na(tmpvari)), 
+                    Minimum = min.value, 
+                    Maximum = max.value, 
+                    Stdev = stdv.value
+                )
+                #if (length(attr(tmpvari, 'Univariate'))==0){attr(dtfrm[[i]],"Univariate") <- statset}
+            } else if ( (VARTYPE[i] < 2) & (VARTYPE[i] >=0) ){
+                
+                if(DEBUG) {cat("pass the point univStat(discrete)\n")}
+                statset<- list(
+                    Vald = N,
+                    Invald = (length(tmpvari) - N), 
+                    Total = length(tmpvari)
+                )
+
+                if (DEBUG){cat("\ttable header:",paste(dimnames(tbl1)[[1]], collapse='|'), "\n")}
+                if (DEBUG){cat("\ttable frequency:",paste(tbl1, collapse='|'), "\n")}
+            } else {
+                if(DEBUG) {cat("out-of-range value", i ,"-th var =", VARTYPE[i],"\n")}
+            }
+
+        # section 4
+        pt.vr.1<-paste(NAMES[i],VARTYPE[i],sep='\t')
+        pt.vr.st<-""
+            #iadj <- (i+startno-1)
+            if (VARTYPE[i] == 2) {
+                # continuous variable case
+                #pt.vr.st<- if (!is.na(statset[[1]])) {statset[[1]]} else {"."} 
+                pt.vr.st<-"8"  
+                for (j in 1:length(sumStatset)) {
+                    pt.vr.st<-paste(pt.vr.st,if (!is.na(statset[[j]])) {statset[[j]]} else {"."}, sep="\t")
+                }
+
+             # if: end of continuous variable part
+            } else {
+
+                # discrete variable case
+
+                # actual value table
+                if (dim(tbl1)[1] <= 100) {
+                    # integer: how many value-freq sets follw? 
+                    pt.vr.st<-dim(tbl1)[1]
+                    for (j in 1:(dim(tbl1)[1])) {
+                        # each value-freq set
+                        tmpvalue<-dimnames(tbl1)[[1]][j]
+                        #if (VARTYPE[i] == 0) {tmpvalue<-chr2xtrf(tmpvalue)}
+                        if (DEBUG) { cat("i=", i, "\tj=", j,"\ttmpvalue:", tmpvalue, "\n", sep="")}
+                        
+                        pt.vr.st<-paste(pt.vr.st,if (!is.na(tmpvalue)) {tmpvalue} else {"."}, tbl1[[j]],sep="\t")
+
+                    }
+                    
+                } else {
+                    # for more-than-100-category vars, print 0
+                    pt.vr.st<-0
+                }
+                
+                # for all cases, valid, invalid and N are printed
+                for (k in 1:length(statset)){
+                    pt.vr.st<-paste(pt.vr.st,if (!is.na(statset[[k]])) {statset[[k]]} else {"."}, sep="\t")
+                }
+                
+                
+                if (DEBUG) {cat("\n");}
+            } # else: end of discrete case
+            
+            u <- unf(tmpvari, version=3) 
+            uxml<-paste(as.character(u),"\n",sep="")
+            # dump this var
+            cat(file=wxml,paste(pt.vr.1,pt.vr.st,uxml,sep="\t"),sep="")
+
+    } # end of the variable-wise loop 
+
+} #end of the sumStatTabAll.DDIx.R
+
+
+checkBinaryResponse<-function(binx){
+    bnryVarTbl <-attr(table(binx), 'dimnames')[[1]];
+    if (length(bnryVarTbl) == 2){
+        if ((bnryVarTbl[1] == 0) && (bnryVarTbl[2]==1)){
+            #cat('this variable is already 0-1\n');
+        } else {
+            #cat('this variable needs the conversion\n');
+            #cat(paste( bnryVarTbl[1],' is recoded to 1; ', bnryVarTbl[2],' is recoded to 0;\n', sep=''));
+            binx<-as.integer(binx == bnryVarTbl[1]);
+        }
+    }
+    invisible(binx)
+}
+
+
+#######################################################################
+univarStatHtmlBody<-function(dtfrm, whtml, analysisoptn, standalone=F){
+    # Description
+    # 
+    # arguments
+    # dtfrm          variable furnished with attributes
+    # tmpimgfile    temporary image file prefix: =$SRVRCGI=$SERVER$CGIDIR
+    # analysisoptn  analysis option
+    # nrows         local variable
+    # tmphtmlfile   temporary html file
+    # file          tmphtmlfile 
+    
+    DBG<-TRUE
+    DBG<-FALSE
+
+    # open the connection
+    #whtml<-file(tmphtmlfile, "w")
+    #on.exit(close(whtml))
+    
+    # color parameters
+    # legend: c(1:background, 2:table header, 3: table body(o), 4: table body(e))
+    # clschm <-c("#FFFFFF", "#CCFFCC","#e0ffff","#f0fff0") # green-based palette
+    # blue-based palette
+    #clschm <-c("#FFFFFF", "#e6e6fa","#ffffff","#f5f5f5")
+    clschm <-c("dvnUnvStatTbl", "dvnUnvStatTblHdr","dvnUnvStatTblRowO","dvnUnvStatTblRowE")
+    
+    # table parameters
+    # legend: c(border, cellspacing)
+     tblprm <-c(0, 2)
+    
+    #cat("\nEntered the function univarStatHtml\n")
+    
+    # values for local tests
+    # set localtest 0 after local tests
+    localtest<-TRUE
+    localtest<-FALSE
+    if (localtest){
+        tmpimgfile<-c("")
+        imgprfx1<-c("<img src=\"")
+        imgprfx2<-c("")
+        univarstathdr<-c("Valid Cases", "Invalid Cases(NAs)", "Total", "Mean", "Standard deviation", "Skewness", "Kurtosis", "Coefficient of variation", "Mode", "Minimum","1st Quartile","Median","3rd Quartile","Maximum","Range","Interquartile Range","Normality Test:Shapiro-Wilk Statistic", "(Shapiro-Wilk Statistic: p value)")
+    }
+    if (standalone) {
+        imgflprfx<-paste(imgprfx1,tmpimgfile,imgprfx2,sep="")
+    } else {
+        imgflprfx<-"<img src=\""
+    }
+    # constant for rendering a table for univariate statistics(continuous vars only)
+    uslstlen<-length(univarstathdr)
+    nrows <-ceiling(uslstlen/2)
+    blnkcell<-uslstlen%%2==TRUE
+    
+    
+    nameset<-names(dtfrm)
+    varlabelset<-attr(dtfrm,"var.labels")
+    CHRTLST<-attr(dtfrm, "univarChart.lst")
+    STATLST<-attr(dtfrm, "univarStat.lst")
+    VARTYPE<-attr(dtfrm, "var.type")
+    VALINDEX<-attr(dtfrm, "val.index")
+    VALTABLE<-attr(dtfrm, "val.table")
+    
+    
+    pt.varheader<-function(namesi, varlabelsi=NA) {h3<-paste("<h3>", namesi, if (!is.na(varlabelsi)) {paste(": ", varlabelsi, sep="")}, "</h3>\n",sep="");h3}
+
+    ###################
+    # continuous case
+    univarStatHtml.cntn<-function(statlst, imgfllst, cmbntn, namesi, varlabelsi){
+
+        # statlst   STATLST[[as.character(i)]]
+        # imgfllst  imgfllst=CHRTLST[[as.character(i)]]
+        # cmbntn    analysisoptn
+        # function definition sections
+
+        # create the first tr tag: chart part
+        pt.tr1<-function(imgfllst, cmbntn){
+            tr1<-""
+            if (cmbntn[2]) {
+
+                if (cmbntn[1]) { colspan<-" colspan=\"2\"" } else { colspan<-""}
+
+                # both
+
+                if(!is.null(imgfllst[["hstbx"]])){
+                    tr1.l<-paste("<td",colspan,">\n",imgflprfx,imgfllst[["hstbx"]],imgsffx1,"</td>\n",sep="")
+                } else {
+                    tr1.l<-paste("<td",colspan,">\n<p><B><font color=red>Histogram/Boxplot Not Available</font></B></p>\n</td>\n")
+                }
+
+                if(!is.null(imgfllst[["qqplt"]])) {
+                    tr1.r<-paste("<td",colspan,">\n",imgflprfx,imgfllst[["qqplt"]],imgsffx1,"</td>\n",sep="")
+                } else {
+                    tr1.r<-paste("<td",colspan,">\n<p><B><font color=red>Normal Q-Q plot Not Available</font></B></p>\n</td>\n",sep="")
+                }
+
+                tr1<-paste("<tr>\n",tr1.l,tr1.r,"</tr>\n",sep="")
+            }
+            tr1
+        }
+
+        # create the 2nd and thereafter tr tags: statistics part
+        pt.tr2<-function(statlst, cmbntn){
+            tr2<-""
+            if (cmbntn[1]) {
+                # statistics on
+                # table header
+                tr2<-paste("<tr class=\"",clschm[2],"\">\n<td align=\"left\"><b>Statistic</b></td><td align=\"right\"><b>Value</b></td>\n<td align=\"left\"><b>Statistic</b></td><td align=\"right\"><b>Value</b></td>\n</tr>\n",sep="")
+
+                # statistical data
+                # when # of statistics is not even
+                if (blnkcell){ univarstathdr[length(statlst)+1]<-"&nbsp;"}
+
+                # table body
+                for (j in 1:nrows) {
+                    if (j%%2==FALSE) colorprm <- clschm[3] else colorprm <-clschm[4]
+
+                    tr2<-paste(tr2, 
+                    "<tr class=\"",colorprm,"\">\n",
+                    "<td align=\"left\">",univarstathdr[j],"</td>\n", 
+                    "<td align=\"right\">", prettyNum(statlst[[j]]),"</td>\n", 
+                    "<td align=\"left\">",univarstathdr[j+nrows],"</td>\n", 
+                    "<td align=\"right\">", if ( (j==nrows) & (blnkcell) ) {"&nbsp;"} else {prettyNum(statlst[[j+nrows]])},"</td>\n</tr>\n", sep="")
+                }
+            }
+            tr2
+        }
+
+        # create the chart/statistics table segment
+        pt.tbl<-function(statlst=statlst,cmbntn=cmbntn,imgfllst=imgfllst){
+            tr1<-pt.tr1(imgfllst=imgfllst, cmbntn=cmbntn)
+            tr2<-pt.tr2(statlst=statlst, cmbntn=cmbntn)
+            tbl<-paste("<center>\n<table border=\"",tblprm[1],"\" class=\"",clschm[1],"\" cellspacing=\"",tblprm[1],"\" >\n",tr1,tr2,"</table>\n</center>\n",sep="")
+            tbl
+        }
+
+        # create per variable html segment
+        pt.varunit.cntn<-function(vhdr,vcntnts){varunit<-paste(vhdr,vcntnts,"<hr/>", sep="");varunit}
+        ## end of function definitions ##
+
+        # implementation
+
+        pttbl<-pt.tbl(statlst=statlst, imgfllst=imgfllst, cmbntn=cmbntn)
+        ptvarheader<-pt.varheader(namesi=namesi, varlabelsi=varlabelsi)
+        ptvarunitc<-pt.varunit.cntn(vhdr=ptvarheader, vcntnts=pttbl)
+
+        ptvarunitc
+    } # end of continuous case
+    
+    
+    ######################
+    # discrete case
+
+    univarStatHtml.dscrt<-function(statlst, imgfllst, cmbntn, namesi, varlabelsi, vltbl) {
+        # statlst   STATLST[[as.character(i)]]
+        # imgfllst  imgfllst=CHRTLST[[as.character(i)]]
+        # cmbntn    analysisoptn
+        # function definition sections
+
+        #statlst[["freqtbl"]]
+        # mode and median even if a freq table is not available 
+        nrw<-3
+        # add one for "total" row
+        #if (!is.na(statlst$freqtbl)) {nrw<-length(statlst$freqtbl)+1+nrw}
+
+        if (class(statlst$freqtbl)=="table") {nrw<-length(statlst$freqtbl)+nrw}
+        # nrws: rowspan parameter value if the chart option is chosen
+        nrws<-nrw+1
+
+        pt.tr1<-function(imgfllst, cmbntn){
+            try({
+            # tr1.l: chart part
+            tr1.l<-""
+            sprsstr1r<-FALSE
+            if (cmbntn[2]) {
+                rowspan<-""
+                if (cmbntn[1]) { rowspan<-paste(" rowspan=\"",nrws,"\"",sep="") }
+
+                if(!is.na(imgfllst[["brchrt"]])){
+                    tr1.l<-paste("<td",rowspan," valign=\"top\">\n",imgflprfx,imgfllst[["brchrt"]], imgsffx1, "</td>\n", sep="")
+                } else {
+                    if (class(statlst$freqtbl)=="table"){
+                        rowspan<-paste(" rowspan=\"",nrws,"\"",sep="")
+                        tr1.l<-paste("<td",rowspan," valign=\"top\">\n<p><B><small>The number of categories is more than 10 or equal to 1.<br>Table substitutes for Bar plot</small></B></p>\n</td>\n",sep="")
+                        cmbntn[1]<-1
+                    } else {
+
+                        tr1.lm<-paste("<td align=\"left\" colspan=\"3\" valign=\"top\">\n<p><B><small>The number of categories is more than 50. Frequency/Percentage tables are not shown here</small></B></p>\n</td></tr>\n",sep="")
+                        
+                        tr1.lhdr<-paste("<tr><td align=\"left\" class=\"",clschm[2],"\" ><b>Value: Value Label</b></td><td align=\"right\" class=\"",clschm[2],"\" ><b>Freq</b></td><td align=\"right\" class=\"",clschm[2],"\" ><b>Percent</b></td>\n",sep="")
+                        tr1.l<-paste(tr1.lm,tr1.lhdr, sep="")
+                        
+                        sprsstr1r<-TRUE
+                    }
+                }
+            }
+            # tr1.r: freq/pcnt table header part
+            tr1.r<-""
+            if (cmbntn[1]) {
+                if (class(statlst$freqtbl)=="table"){
+                    tr1.r<-paste("<td align=\"left\" class=\"",clschm[2],"\" ><b>Value: Value Label</b></td><td align=\"right\" class=\"",clschm[2],"\" ><b>Freq</b></td><td align=\"right\" class=\"",clschm[2],"\" ><b>Percent</b></td>\n",sep="")
+                } else if (!sprsstr1r){
+                
+                    tr1.rm<-paste("<td align=\"left\" colspan=\"3\" valign=\"top\">\n<p><B><small>The number of categories is more than 50. Frequency/Percentage tables are not shown here</small></B></p>\n</td></tr>\n",sep="")
+                    
+                    tr1.rhdr<-paste("<tr><td align=\"left\" class=\"",clschm[2],"\" ><b>Value: Value Label</b></td><td align=\"right\" class=\"",clschm[2],"\" ><b>Freq</b></td><td align=\"right\" class=\"",clschm[2],"\" ><b>Percent</b></td>\n",sep="")
+                    
+                    tr1.r<-paste(tr1.rm,tr1.rhdr, sep="")
+                }
+            }
+            tr1<-paste("<tr>\n",tr1.l,tr1.r,"</tr>\n",sep="")
+            }) # end of try
+        }
+
+        # create the 2nd and thereafter tr tags: statistics part
+        pt.tr2<-function(statlst, cmbntn, vltbl, imgfllst){
+            try({
+            tr2<-""
+            tableon<-FALSE
+            if ( cmbntn[2]){
+                if (is.na(imgfllst[["brchrt"]])){
+                    tableon<-TRUE
+                }
+            }
+            if (cmbntn[1] | tableon) {
+
+                if (class(statlst$freqtbl)=="table") {tblkey<-names(statlst$freqtbl)}
+                # if freqtbl is NA, tblkey becomes NULL
+                for (j in 1:nrw) {
+                    if (j%%2==FALSE) { colorprm <- clschm[3]} else {colorprm <-clschm[4]}
+                    if (j < (nrw -2)) {
+
+                        catgrylbl<-""
+                        if (!is.null(vltbl)){
+                            if(!is.null(vltbl[[tblkey[j]]])) {
+                                catgrylbl<-paste("(",vltbl[[tblkey[j]]],")",sep="")
+                            }
+                        }
+                        tr2<-paste(tr2, "<tr class=\"",colorprm,"\">\n<td align=\"left\">",tblkey[j],catgrylbl,"</td>\n<td align=\"right\">",statlst$freqtbl[[j]],"</td>\n<td align=\"right\">", signif(statlst$pcnttbl[[j]],3),"</td>\n</tr>\n", sep="")
+
+                    } else if (j == (nrw -2)) {
+                        #cat("entering the total row\n")
+                        tr2<-paste(tr2, "<tr class=\"",colorprm,"\">\n<td align=\"left\">Total</td>\n<td align=\"right\">",statlst$Vald+statlst$Invald,"</td>\n<td align=\"right\">100</td>\n</tr>\n", sep="")
+
+                    } else if (j == (nrw -1)) {
+                        # median
+                        #cat("entering the median\n")
+                        median.vl<- "Not Available"
+                        median.lbl<-""
+                        if (!is.null(statlst$Median)) {
+                            median.vl<- as.character(statlst$Median)
+                            if (!is.null(vltbl) && (nrw>3)){
+                                if (!is.null(vltbl[[median.vl]])) {
+                                    median.lbl<-paste("(",vltbl[[median.vl]],")",sep="")
+                                }
+                            }
+                        }
+
+                        tr2<-paste(tr2,"<tr class=\"",colorprm,"\">\n<td align=\"left\">Median</td>\n<td align=\"right\">",median.vl,"</td>\n<td align=\"right\">",median.lbl,"</td>\n</tr>\n", sep="")
+
+                    } else if (j == nrw) {
+                        # mode
+                        #cat("entering the Mode\n")
+                        mode.vl<-"Not Available"
+                        mode.lbl<-""
+                        if (!is.null(statlst$Mode)) {
+                            mode.vl<-statlst$Mode
+                            if (!is.null(vltbl) && (nrw>3) ) {
+                                if (!is.null(vltbl[[mode.vl]])) {
+                                    mode.lbl<-paste("(",vltbl[[mode.vl]], ")", sep="")
+                                }
+                            }
+                        }
+
+                        tr2<-paste(tr2,"<tr class=\"",colorprm,"\">\n<td align=\"left\">Mode</td>\n<td align=\"right\">",mode.vl,"</td>\n<td align=\"right\">",mode.lbl,"</td>\n</tr>\n", sep="")
+                    }
+                }
+            }
+            tr2
+            }) # end of try
+        }
+
+        # create the chart/statistics table segment
+        pt.tbl<-function(statlst=statlst,cmbntn=cmbntn,imgfllst=imgfllst,vltbl=vltbl){
+            try({
+            tr1<-pt.tr1(imgfllst=imgfllst, cmbntn=cmbntn)
+            tr2<-pt.tr2(statlst=statlst, cmbntn=cmbntn, vltbl=vltbl,imgfllst=imgfllst)
+            tbl<-paste("<center>\n<table border=\"",tblprm[1],"\" class=\"",clschm[1],"\" cellspacing=\"",tblprm[1],"\" >\n",tr1,tr2,"</table>\n</center>\n",sep="")
+            tbl
+            })
+        }
+
+        # create per variable html segment
+        pt.varunit.dscrt<-function(vhdr,vcntnts){varunit<-paste(vhdr,vcntnts,"<hr/>", sep="");varunit}
+        
+        ## end of function definitions ##
+
+
+        # implementation
+        try({
+        #cat("enters the discrete html body function\n", sep="")
+        pttbl<-pt.tbl(statlst=statlst, imgfllst=imgfllst, cmbntn=cmbntn, vltbl=vltbl)
+
+        ptvarheader<-pt.varheader(namesi=namesi, varlabelsi=varlabelsi)
+        ptvarunitd<-pt.varunit.dscrt(vhdr=ptvarheader, vcntnts=pttbl)
+
+        ptvarunitd
+        })
+    } # end of discrete case
+    
+    
+    
+    # main 
+    # implementation
+        rawVarName <- nameset
+        if (length(attr(dtfrm, "Rsafe2raw"))>0){
+            Rsafe2raw <- attr(dtfrm, "Rsafe2raw")
+            for (i in 1:length(nameset)){
+                if (!is.null(Rsafe2raw[[nameset[i]]])){
+                    rawVarName[i] <-  Rsafe2raw[[nameset[i]]];
+                }
+            }
+        }
+    
+    for (i in 1:dim(dtfrm)[2]){
+        try({
+        if (VARTYPE[i]==2) {
+            varsgmnt.c<-univarStatHtml.cntn(statlst=STATLST[[as.character(i)]], imgfllst=CHRTLST[[as.character(i)]], cmbntn=analysisoptn, namesi=rawVarName[i], varlabelsi=varlabelset[i])
+            #cat(file=whtml, varsgmnt.c, sep="")
+            HTML(file=whtml, varsgmnt.c)
+        } else {
+            if (DBG) {cat(i,"-th var before entering the discrete html function\n", sep="")}
+            #cat("check the value table=",VALTABLE[[VALINDEX[[i]]]],"\n", sep="")
+            if (is.null(VALINDEX[[as.character(i)]])){valtable<-NULL} else {valtable<-VALTABLE[[VALINDEX[[as.character(i)]]]]}
+            varsgmnt.d<-univarStatHtml.dscrt(statlst=STATLST[[as.character(i)]], imgfllst=CHRTLST[[as.character(i)]], cmbntn=analysisoptn, namesi=rawVarName[i], varlabelsi=varlabelset[i], vltbl=valtable)
+            #cat(file=whtml, varsgmnt.d, sep="")
+            HTML(file=whtml, varsgmnt.d)
+        }
+        }) # end of try
+    } # end of var-wise for-loop
+    
+
+} #end of the function univarStatHtml
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/config/error.xsl	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,39 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+
+<!--
+    Document   : copy.xsl
+    Created on : May 22, 2007, 2:36 PM
+    Author     : Ellen Kraffmiller
+    Description:
+        Purpose of transformation follows.
+-->
+
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0" xmlns:oai="http://www.openarchives.org/OAI/2.0/" >
+    
+
+    <!-- TODO customize transformation rules 
+         syntax recommendation http://www.w3.org/TR/xslt 
+    -->
+    
+  
+    
+    <xsl:template match="/">
+       <OAI-PMH>
+            <xsl:apply-templates select="oai:OAI-PMH/oai:error"/>
+       </OAI-PMH>
+    </xsl:template>
+
+    <xsl:template match="oai:OAI-PMH/oai:error">
+        <error>
+            <xsl:apply-templates select="@* | node()"/>
+        </error>
+    </xsl:template>
+
+   <xsl:template match="@* | node()">
+      <xsl:copy>
+          <xsl:apply-templates select="@* | node()"/>
+      </xsl:copy>
+    </xsl:template>
+    
+   
+</xsl:stylesheet>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/config/fgdc2ddi.xsl	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,224 @@
+<?xml version="1.0" encoding="utf-8"?>
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns="http://www.icpsr.umich.edu/DDI" xmlns:a="http://www.openarchives.org/OAI/2.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" exclude-result-prefixes="a">
+    <xsl:output method="xml" encoding="UTF-8" indent="yes"/>
+    <xsl:strip-space elements="*"/>
+    <xsl:param name="schemaLocation">http://www.icpsr.umich.edu/DDI/Version2-0.xsd</xsl:param>
+    <xsl:template match="/">
+         <xsl:apply-templates select="a:metadata"/>
+    </xsl:template>
+    
+    <xsl:template match="a:metadata">
+	<!-- Nest all in a codebook element -->
+        <xsl:element name="codeBook" namespace="http://www.icpsr.umich.edu/DDI">
+            <xsl:attribute name="xsi:schemaLocation"><xsl:text>http://www.icpsr.umich.edu/DDI </xsl:text>
+		<xsl:value-of select="$schemaLocation"/></xsl:attribute>
+            <docDscr>
+            	<citation>
+        	    <xsl:call-template name="titlStmt"/>
+
+		    <xsl:if test="normalize-space(a:idinfo/a:citation/a:citeinfo/a:onlink) != ''">
+		       <holdings><xsl:attribute name="URI"><xsl:value-of select="normalize-space(a:idinfo/a:citation/a:citeinfo/a:onlink)"/></xsl:attribute>
+		       </holdings>
+		    </xsl:if>
+
+                </citation>
+            </docDscr>
+
+            <stdyDscr>
+            	<citation>
+        	    <xsl:call-template name="titlStmt"/>
+
+                    <rspStmt>
+                     <xsl:for-each select="a:idinfo/a:citation/a:citeinfo/a:origin">
+                       <xsl:if test="normalize-space(.) != ''">
+                         	<AuthEnty>
+                            	 <xsl:value-of select="normalize-space(.)"/>
+                        	</AuthEnty>   
+		       </xsl:if>
+		      </xsl:for-each>
+                     </rspStmt>
+
+        	     <xsl:if test="normalize-space(a:idinfo/a:ptcontac/a:cntinfo/a:cntperp/a:cntper) != '' or normalize-space(a:idinfo/a:citation/a:citeinfo/a:pubdate) != '' or normalize-space(a:idinfo/a:ptcontac/a:cntinfo/a:cntorgp/a:cntorg) != '' or normalize-space(a:idinfo/a:citation/a:citeinfo/a:origin) != ''">
+                     <prodStmt>
+                    	<xsl:if test="normalize-space(a:idinfo/a:ptcontac/a:cntinfo/a:cntorgp/a:cntorg) != ''">
+			   <producer>
+			      <xsl:value-of select="normalize-space(a:idinfo/a:ptcontac/a:cntinfo/a:cntorgp/a:cntorg)"/>
+                           </producer>
+                        </xsl:if>
+			<xsl:if test="normalize-space(a:dataqual/a:lineage/a:procstep/a:proccont/a:cntinfo/a:cntperp/a:cntper) != ''">
+			    <producer>
+			       <xsl:value-of select="normalize-space(a:dataqual/a:lineage/a:procstep/a:proccont/a:cntinfo/a:cntperp/a:cntper)"/>
+			    </producer>
+			</xsl:if>					
+
+                    	<xsl:for-each select="a:idinfo/a:citation/a:citeinfo/a:origin">
+			<xsl:if test="normalize-space(.) != ''">
+			    <producer>
+			       <xsl:value-of select="normalize-space(.)"/>
+			    </producer>
+                        </xsl:if>
+                        </xsl:for-each>
+
+
+			<xsl:if test="normalize-space(a:idinfo/a:citation/a:citeinfo/a:pubdate) != ''">
+                            <prodDate>
+			       <xsl:attribute name="date"><xsl:value-of select="normalize-space(a:idinfo/a:citation/a:citeinfo/a:pubdate)"/></xsl:attribute>
+			    </prodDate>
+                        </xsl:if>
+			<xsl:if test="normalize-space(a:idinfo/a:citation/a:citeinfo/a:pubinfo/a:pubplace) != ''">
+			    <prodPlac>
+			       <xsl:value-of select="normalize-space(a:idinfo/a:citation/a:citeinfo/a:pubinfo/a:pubplace)"/>
+			    </prodPlac>
+			</xsl:if>
+		     </prodStmt>
+                     </xsl:if>
+
+
+                    <distStmt>
+                       <distrbtr>
+				Harvard Geospatial Library
+                       </distrbtr>
+                    </distStmt>
+
+                    <xsl:if test="normalize-space(a:idinfo/a:citation/a:citeinfo/a:serinfo/a:sername) != ''">
+		    <serStmt>
+			<serName>
+                        <xsl:value-of select="normalize-space(a:idinfo/a:citation/a:citeinfo/a:serinfo/a:sername)"/>
+			</serName>
+		    </serStmt>
+		    </xsl:if>
+
+		    <xsl:if test="normalize-space(a:idinfo/a:citation/a:citeinfo/a:onlink) != ''">
+		       <holdings><xsl:attribute name="URI"><xsl:value-of select="normalize-space(a:idinfo/a:citation/a:citeinfo/a:onlink)"/></xsl:attribute>
+		       </holdings>
+		    </xsl:if>
+
+               </citation>
+
+               <stdyInfo>
+		    <subject>
+			<xsl:for-each select="a:idinfo/a:keywords/a:theme">
+			    <xsl:variable name="vocab" select="./a:themekt"/>
+			    <xsl:for-each select="./a:themekey">
+			     <topcClas source="archive">
+			     <xsl:attribute name="vocab"><xsl:value-of select="$vocab"/></xsl:attribute>
+				   <xsl:value-of select="normalize-space(.)"/>
+			     </topcClas>
+			    </xsl:for-each>
+			</xsl:for-each>
+		    </subject>                         
+
+		    <xsl:if test="normalize-space(a:idinfo/a:descript/a:abstract) != ''">
+                    <abstract>
+						<xsl:value-of select="normalize-space(a:idinfo/a:descript/a:abstract)"/>	
+                    </abstract>
+		    </xsl:if>
+
+		    <xsl:if test="normalize-space(a:idinfo/a:timeprd/a:timeinfo/a:rngdates) != '' or normalize-space(a:idinfo/a:spdom/a:bounding) != '' or normalize-space(a:idinfo/a:timeprd/a:timeinfo/a:sngdate) != '' or normalize-space(a:idinfo/a:timeprd/a:timeinfo/a:mdattim) != '' or normalize-space(a:idinfo/a:keywords/a:place/a:placekey) != ''">
+                    <sumDscr>
+			<xsl:if test="normalize-space(a:idinfo/a:timeperd/a:timeinfo/a:sngdate/a:caldate) != ''">
+                         <timePrd event="single">
+			 <xsl:value-of select="normalize-space(a:idinfo/a:timeperd/a:timeinfo/a:sngdate/a:caldate)"/> 
+                         </timePrd>
+                        </xsl:if>
+
+			<xsl:for-each select="a:idinfo/a:timeperd/a:timeinfo/a:mdattim/a:sngdate/a:caldate">
+			<xsl:if test="normalize-space(.) != ''">
+                         <timePrd event="single">
+			 <xsl:value-of select="normalize-space(.)"/> 
+                         </timePrd>
+                        </xsl:if>
+			</xsl:for-each>
+
+			<xsl:if test="normalize-space(a:idinfo/a:timeperd/a:timeinfo/a:rngdates/a:begdate) != ''">
+                         <timePrd event="start">
+			 <xsl:value-of select="normalize-space(a:idinfo/a:timeperd/a:timeinfo/a:rngdates/a:begdate)"/> 
+                         </timePrd>
+                        </xsl:if>
+
+			<xsl:if test="normalize-space(a:idinfo/a:timeperd/a:timeinfo/a:rngdates/a:enddate) != ''">
+                         <timePrd event="end">
+			 <xsl:value-of select="normalize-space(a:idinfo/a:timeperd/a:timeinfo/a:rngdates/a:enddate)"/> 
+                         </timePrd>
+                        </xsl:if>
+
+			<xsl:if test="normalize-space(a:idinfo/a:keywords/a:place/a:placekey) != ''">
+                         <geogCover>
+				<xsl:value-of select="normalize-space(a:idinfo/a:keywords/a:place/a:placekey)"/>
+                         </geogCover>
+                        </xsl:if>
+
+			<xsl:if test="normalize-space(a:idinfo/a:spdom/a:bounding) != ''">
+                         <geoBndBox>
+				<westBL><xsl:value-of select="normalize-space(a:idinfo/a:spdom/a:bounding/a:westbc)"/></westBL>
+				<eastBL><xsl:value-of select="normalize-space(a:idinfo/a:spdom/a:bounding/a:eastbc)"/></eastBL>
+				<southBL><xsl:value-of select="normalize-space(a:idinfo/a:spdom/a:bounding/a:southbc)"/></southBL>
+				<northBL><xsl:value-of select="normalize-space(a:idinfo/a:spdom/a:bounding/a:northbc)"/></northBL>
+                         </geoBndBox>
+                        </xsl:if>
+
+			<xsl:if test="normalize-space(a:idinfo/a:citation/a:citeinfo/a:geoform) != ''">	
+			 <dataKind>
+			   <xsl:value-of select="normalize-space(a:idinfo/a:citation/a:citeinfo/a:geoform)"/>
+			 </dataKind>
+			</xsl:if>
+                    </sumDscr>
+                    </xsl:if>
+                </stdyInfo>
+
+		<xsl:for-each select="a:dataqual/a:lineage/a:procstep/a:procdesc">
+		<xsl:if test="normalize-space(.) != ''">
+		   <method>
+		      <dataColl>
+		         <sampProc>
+			    <xsl:value-of select="normalize-space(.)"/>
+			 </sampProc>
+		      </dataColl>
+		   </method>
+		</xsl:if>
+		</xsl:for-each>
+
+		<xsl:if test="normalize-space(a:idinfo/a:accconst) != '' or normalize-space(a:idinfo/a:useconst) != ''" >
+		<dataAccs>
+		<useStmt>
+		<specPerm>
+			<xsl:value-of select="normalize-space(a:idinfo/a:accconst)"/>
+		</specPerm>
+		<restrctn>
+			<xsl:value-of select="normalize-space(a:idinfo/a:useconst)"/>
+		</restrctn>
+		</useStmt>     	
+		</dataAccs>
+		</xsl:if>
+
+
+            </stdyDscr>
+
+	    <xsl:if test="normalize-space(a:idinfo/a:citation/a:citeinfo/a:onlink) != ''">
+	    <otherMat level="study">
+	    <xsl:attribute name="URI"><xsl:value-of select="normalize-space(a:idinfo/a:citation/a:citeinfo/a:onlink)"/></xsl:attribute>
+	    <xsl:if test="normalize-space(a:distinfo/a:stdorder/a:digform/a:digtinfo/a:formname) != ''">
+	       <labl>
+		<xsl:value-of select="normalize-space(a:distinfo/a:stdorder/a:digform/a:digtinfo/a:formname)"/>
+	       </labl>
+	    </xsl:if>
+	    <xsl:if test="normalize-space(a:distinfo/a:stdorder/a:digform/a:digtinfo/a:filedec) != ''">
+	       <notes>File Decompression Technique: <xsl:value-of select="normalize-space(a:distinfo/a:stdorder/a:digform/a:digtinfo/a:filedec)"/>
+	       </notes>
+	    </xsl:if>
+	    </otherMat>
+	    </xsl:if>
+
+	</xsl:element>
+    </xsl:template>
+
+    <xsl:template name="titlStmt">
+               	   <titlStmt>
+                      <titl>                      
+                           <xsl:value-of select="normalize-space(a:idinfo/a:citation/a:citeinfo/a:title)"/>
+                       </titl>
+
+                    </titlStmt>
+        </xsl:template>
+
+</xsl:stylesheet>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/config/graphml.props	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,7 @@
+neostore.nodestore.db.mapped_memory=90M
+neostore.relationshipstore.db.mapped_memory=1G
+neostore.propertystore.db.mapped_memory=50M
+neostore.propertystore.db.strings.mapped_memory=100M
+neostore.propertystore.db.arrays.mapped_memory=0M
+neostore.propertystore.db.index.mapped_memory=50M
+neostore.propertystore.db.index.keys.mapped_memory=50M
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/config/header.xsl	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,37 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+
+<!--
+    Document   : copy.xsl
+    Created on : May 22, 2007, 2:36 PM
+    Author     : Ellen Kraffmiller
+    Description:
+        Purpose of transformation follows.
+-->
+
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0" xmlns:oai="http://www.openarchives.org/OAI/2.0/" >
+    
+
+    <!-- TODO customize transformation rules 
+         syntax recommendation http://www.w3.org/TR/xslt 
+    -->
+    
+  
+    
+    <xsl:template match="/">	  	
+    	<xsl:apply-templates select="oai:OAI-PMH/oai:GetRecord/oai:record/oai:header"/>    		
+    </xsl:template>
+	
+    <xsl:template match="oai:OAI-PMH/oai:GetRecord/oai:record/oai:header">  	
+    	<header>
+        	<xsl:apply-templates select="@* | node()"/>
+     	</header>
+    </xsl:template>
+
+   <xsl:template match="@* | node()">
+      <xsl:copy>
+          <xsl:apply-templates select="@* | node()"/>
+      </xsl:copy>
+    </xsl:template>
+    
+   
+</xsl:stylesheet>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/config/jhove.conf	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<jhoveConfig version="1.1"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns="http://hul.harvard.edu/ois/xml/ns/jhove/jhoveConfig"
+ xsi:schemaLocation="http://hul.harvard.edu/ois/xml/ns/jhove/jhoveConfig
+                     http://hul.harvard.edu/ois/xml/xsd/jhove/1.4/jhoveConfig.xsd">
+ <jhoveHome>../applications/j2ee-apps/DVN-EAR</jhoveHome>
+ <defaultEncoding>utf-8</defaultEncoding>
+ <tempDirectory>/tmp</tempDirectory>
+ <bufferSize>131072</bufferSize>
+ <mixVersion>1.0</mixVersion>
+ <sigBytes>1024</sigBytes>
+ <module>
+  <class>edu.harvard.hul.ois.jhove.module.AiffModule</class>
+ </module>
+ <module>
+  <class>edu.harvard.hul.ois.jhove.module.WaveModule</class>
+ </module>
+ <module>
+  <class>edu.harvard.hul.ois.jhove.module.PdfModule</class>
+ </module>
+ <module>
+  <class>edu.harvard.hul.ois.jhove.module.Jpeg2000Module</class>
+ </module>
+ <module>
+  <class>edu.harvard.hul.ois.jhove.module.JpegModule</class>
+ </module>
+ <module>
+  <class>edu.harvard.hul.ois.jhove.module.GifModule</class>
+ </module>
+ <module>
+  <class>edu.harvard.hul.ois.jhove.module.TiffModule</class>
+ </module>
+ <module>
+  <class>edu.harvard.hul.ois.jhove.module.HtmlModule</class>
+ </module>
+ <module>
+  <class>edu.harvard.hul.ois.jhove.module.AsciiModule</class>
+ </module>
+ <module>
+  <class>edu.harvard.hul.ois.jhove.module.Utf8Module</class>
+ </module>
+</jhoveConfig>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/config/logging.properties	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,116 @@
+#
+# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
+#
+# Copyright (c) 2010-2011 Oracle and/or its affiliates. All rights reserved.
+#
+# The contents of this file are subject to the terms of either the GNU
+# General Public License Version 2 only ("GPL") or the Common Development
+# and Distribution License("CDDL") (collectively, the "License").  You
+# may not use this file except in compliance with the License.  You can
+# obtain a copy of the License at
+# https://glassfish.dev.java.net/public/CDDL+GPL_1_1.html
+# or packager/legal/LICENSE.txt.  See the License for the specific
+# language governing permissions and limitations under the License.
+#
+# When distributing the software, include this License Header Notice in each
+# file and include the License file at packager/legal/LICENSE.txt.
+#
+# GPL Classpath Exception:
+# Oracle designates this particular file as subject to the "Classpath"
+# exception as provided by Oracle in the GPL Version 2 section of the License
+# file that accompanied this code.
+#
+# Modifications:
+# If applicable, add the following below the License Header, with the fields
+# enclosed by brackets [] replaced by your own identifying information:
+# "Portions Copyright [year] [name of copyright owner]"
+#
+# Contributor(s):
+# If you wish your version of this file to be governed by only the CDDL or
+# only the GPL Version 2, indicate your decision by adding "[Contributor]
+# elects to include this software in this distribution under the [CDDL or GPL
+# Version 2] license."  If you don't indicate a single choice of license, a
+# recipient has the option to distribute your version of this file under
+# either the CDDL, the GPL Version 2 or to extend the choice of license to
+# its licensees as provided above.  However, if you add GPL Version 2 code
+# and therefore, elected the GPL Version 2 license, then the option applies
+# only if the new code is made subject to such option by the copyright
+# holder.
+#
+#GlassFish logging.properties list
+#All attributes details
+handlers=java.util.logging.ConsoleHandler,com.sun.enterprise.server.logging.GFFileHandler
+java.util.logging.ConsoleHandler.formatter=com.sun.enterprise.server.logging.UniformLogFormatter
+com.sun.enterprise.server.logging.GFFileHandler.formatter=com.sun.enterprise.server.logging.UniformLogFormatter
+com.sun.enterprise.server.logging.GFFileHandler.file=${com.sun.aas.instanceRoot}/logs/server.log
+com.sun.enterprise.server.logging.GFFileHandler.rotationTimelimitInMinutes=0
+com.sun.enterprise.server.logging.GFFileHandler.flushFrequency=1
+java.util.logging.FileHandler.limit=50000
+com.sun.enterprise.server.logging.GFFileHandler.logtoConsole=false
+com.sun.enterprise.server.logging.GFFileHandler.rotationLimitInBytes=2000000
+com.sun.enterprise.server.logging.SyslogHandler.useSystemLogging=false
+java.util.logging.FileHandler.count=1
+com.sun.enterprise.server.logging.GFFileHandler.retainErrorsStasticsForHours=0
+log4j.logger.org.hibernate.validator.util.Version=warn
+com.sun.enterprise.server.logging.GFFileHandler.maxHistoryFiles=0
+com.sun.enterprise.server.logging.GFFileHandler.rotationOnDateChange=false
+java.util.logging.FileHandler.pattern=%h/java%u.log
+java.util.logging.FileHandler.formatter=java.util.logging.XMLFormatter
+edu.harvard.level=INFO
+# Used for setting log record format values for setting log data under server.log file
+# Generic Record Format: [#|yyyy-mm-ddTHH:mm:ss:SSS-ZZZZ|Level|ProductId|Logger Name|Name Value Pairs|MsgId: Message|#]
+# User is allowed to change date format for recording data in server.log
+com.sun.enterprise.server.logging.GFFileHandler.logFormatDateFormat=yyyy-MM-dd'T'HH:mm:ss.SSSZ
+
+#All log level details
+com.sun.enterprise.server.logging.GFFileHandler.level=INFO
+javax.enterprise.system.tools.admin.level=INFO
+org.apache.jasper.level=INFO
+javax.enterprise.resource.corba.level=INFO
+javax.enterprise.system.core.level=INFO
+javax.enterprise.system.core.classloading.level=INFO
+javax.enterprise.resource.jta.level=INFO
+java.util.logging.ConsoleHandler.level=INFO
+javax.enterprise.system.webservices.saaj.level=INFO
+javax.enterprise.system.tools.deployment.level=INFO
+javax.enterprise.system.container.ejb.level=INFO
+javax.enterprise.system.core.transaction.level=INFO
+org.apache.catalina.level=INFO
+javax.enterprise.system.container.ejb.mdb.level=INFO
+org.apache.coyote.level=INFO
+org.apache.commons.digester.level=WARNING
+javax.enterprise.system.std.com.sun.enterprise.server.logging.level=WARNING
+javax.level=INFO
+javax.enterprise.resource.javamail.level=INFO
+javax.enterprise.system.webservices.rpc.level=INFO
+javax.enterprise.system.container.web.level=INFO
+javax.enterprise.system.util.level=INFO
+javax.enterprise.resource.resourceadapter.level=INFO
+javax.enterprise.resource.jms.level=INFO
+javax.enterprise.system.core.config.level=INFO
+javax.enterprise.system.level=INFO
+javax.enterprise.system.core.security.level=INFO
+javax.enterprise.system.container.cmp.level=INFO
+javax.enterprise.system.webservices.registry.level=INFO
+javax.enterprise.system.core.selfmanagement.level=INFO
+.level=INFO
+javax.enterprise.resource.jdo.level=INFO
+javax.enterprise.system.core.naming.level=INFO
+javax.enterprise.resource.webcontainer.jsf.application.level=INFO
+javax.enterprise.resource.webcontainer.jsf.resource.level=INFO
+javax.enterprise.resource.webcontainer.jsf.config.level=INFO
+javax.enterprise.resource.webcontainer.jsf.context.level=INFO
+javax.enterprise.resource.webcontainer.jsf.facelets.level=INFO
+javax.enterprise.resource.webcontainer.jsf.lifecycle.level=INFO
+javax.enterprise.resource.webcontainer.jsf.managedbean.level=INFO
+javax.enterprise.resource.webcontainer.jsf.renderkit.level=INFO
+javax.enterprise.resource.webcontainer.jsf.taglib.level=INFO
+javax.enterprise.resource.webcontainer.jsf.timing.level=INFO
+javax.enterprise.resource.sqltrace.level=INFO
+javax.org.glassfish.persistence.level=INFO
+org.jvnet.hk2.osgiadapter.level=INFO
+javax.enterprise.system.tools.backup.level=INFO
+org.glassfish.admingui.level=INFO
+javax.enterprise.system.ssl.security.level=INFO
+ShoalLogger.level=CONFIG
+org.eclipse.persistence.session.level=INFO
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/config/metadata.xsl	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,37 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+
+<!--
+    Document   : copy.xsl
+    Created on : May 22, 2007, 2:36 PM
+    Author     : Ellen Kraffmiller
+    Description:
+        Purpose of transformation follows.
+-->
+
+<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0" xmlns:oai="http://www.openarchives.org/OAI/2.0/" >
+    
+
+    <!-- TODO customize transformation rules 
+         syntax recommendation http://www.w3.org/TR/xslt 
+    -->
+    
+  
+    
+    <xsl:template match="/">
+    	
+       <xsl:apply-templates select="oai:OAI-PMH/oai:GetRecord/oai:record/oai:metadata"/>
+    </xsl:template>
+
+    <xsl:template match="oai:OAI-PMH/oai:GetRecord/oai:record/oai:metadata">
+    
+        <xsl:apply-templates select="@* | node()"/>
+    </xsl:template>
+
+   <xsl:template match="@* | node()">
+      <xsl:copy>
+          <xsl:apply-templates select="@* | node()"/>
+      </xsl:copy>
+    </xsl:template>
+    
+   
+</xsl:stylesheet>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/config/mif2ddi.xsl	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,466 @@
+<?xml version="1.0" encoding="utf-8"?>
+<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns="http://www.icpsr.umich.edu/DDI" xmlns:a="http://www.thedataweb.org/mif" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" exclude-result-prefixes="a">
+    <xsl:output method="xml" encoding="UTF-8" indent="yes"/>
+    <xsl:strip-space elements="*"/>
+    <xsl:param name="schemaLocation">http://www.icpsr.umich.edu/DDI/Version1-3.xsd</xsl:param>
+    <xsl:param name="fileDscrId" select="concat('ID',generate-id())"/>
+    <!-- *** -->
+    <!-- 	Default Template to start processng of MIF Document    -->
+    <!-- *** -->
+    <xsl:template match="/">
+            <xsl:choose>
+                <xsl:when test="a:mifSet">
+                    <xsl:apply-templates select="a:mifSet/a:mif[position() = 1]"/>
+    		    <!-- TODO: Deal with multiple docs in a mifset-->
+                </xsl:when>
+                <xsl:otherwise>
+                    <xsl:apply-templates select="a:mif"/>
+                </xsl:otherwise>
+            </xsl:choose>
+    </xsl:template>
+    
+    <xsl:template match="a:mif">
+	<!-- Nest all in a codebook element -->
+        <xsl:element name="codeBook" namespace="http://www.icpsr.umich.edu/DDI">
+            <xsl:attribute name="xsi:schemaLocation"><xsl:text>http://www.icpsr.umich.edu/DDI </xsl:text>
+		<xsl:value-of select="$schemaLocation"/></xsl:attribute>
+            <docDscr>
+            	<citation>
+        	    <xsl:call-template name="titlStmt"/>
+        	    	<xsl:if test="normalize-space(a:documentDescription/a:producer) != '' or normalize-space(a:documentDescription/a:producer/@prodDate) != ''">
+                    	<prodStmt>
+                    		<xsl:if test="normalize-space(a:documentDescription/a:producer) != ''">
+                         		<producer>
+                                	<xsl:value-of select="normalize-space(a:documentDescription/a:producer)"/>
+                          		</producer>
+                          	</xsl:if>
+                          	<xsl:if test="normalize-space(a:documentDescription/a:producer/@prodDate) != ''">
+                          		<prodDate>
+                          			<xsl:attribute name="date"><xsl:value-of select="normalize-space(a:documentDescription/a:producer/@prodDate)"/></xsl:attribute>
+                          		</prodDate>
+                          	</xsl:if>
+                    	</prodStmt>
+                    </xsl:if>
+                    <distStmt>
+                    <!--  We are adding this temporarily. Census should provide this information in the MIF.  -->
+           			 	<distrbtr>
+               				<ExtLink URI="http://www.thedataweb.org/images/ferrett_dataweb.jpg" title="Logo" role="image"/>
+                        	U.S. Census Bureau
+                        	<ExtLink URI="http://www.thedataweb.org/index.html" title="URL"/>
+            			</distrbtr>
+        			 </distStmt>
+                    <xsl:if test="normalize-space(a:documentDescription/a:version/@versionnum) != ''">
+                    	<verStmt>
+                        	<version>
+                             <xsl:attribute name="date"><xsl:value-of select="normalize-space(a:documentDescription/a:version/@modDate)"/></xsl:attribute>
+                             <xsl:value-of select="normalize-space(a:documentDescription/a:version/@versionnum)"/>
+                        	 </version>
+                    	</verStmt>
+                    </xsl:if>
+                    <!--  Add holdings for all studies from thedataweb -->
+                    <holdings URI="http://www.thedataweb.org"/>
+                </citation>
+            </docDscr>
+            <stdyDscr>
+            	<citation>
+        	    <xsl:call-template name="titlStmt"/>
+                    <xsl:if test="normalize-space(a:dataSet/a:providerInfo) != '' and (normalize-space(a:dataSet/a:providerInfo/@name) != 0 or normalize-space(a:dataSet/a:providerInfo/@name) != '' )">
+                    	<rspStmt>
+                         	<AuthEnty>
+                            	 <xsl:value-of select="normalize-space(a:dataSet/a:providerInfo/@name)"/>
+                    	     	<xsl:text>:</xsl:text>
+                             	<xsl:value-of select="normalize-space(a:dataSet/a:providerInfo)"/>
+                        	</AuthEnty>   
+                    	</rspStmt>
+                    </xsl:if>
+                    <xsl:if test="normalize-space(a:dataSet/a:sponsorInfo/@name) != '' or normalize-space(a:dataSet/a:sponsorInfo/@imageUrl) != '' or normalize-space(a:dataSet/a:sponsorInfo/@homepageUrl) != ''">
+                    <distStmt>
+                       <distrbtr>
+                        <xsl:if test="normalize-space(a:dataSet/a:sponsorInfo/@imageUrl) != ''">
+                       		<ExtLink title="Logo" role="image">
+                       		<xsl:attribute name="URI"><xsl:value-of select="normalize-space(a:dataSet/a:sponsorInfo/@imageUrl)"/></xsl:attribute>
+                       	    </ExtLink>
+                       	</xsl:if>
+                        	<xsl:value-of select="normalize-space(a:dataSet/a:sponsorInfo/@name)"/>
+                        <xsl:if test="normalize-space(a:dataSet/a:sponsorInfo/@homepageUrl) != ''">
+                        	<ExtLink title="URL">
+                       		<xsl:attribute name="URI"><xsl:value-of select="normalize-space(a:dataSet/a:sponsorInfo/@homepageUrl)"/></xsl:attribute>
+                       	    </ExtLink>
+                        </xsl:if>
+                       </distrbtr>
+                    </distStmt>
+                    </xsl:if>
+                </citation>
+                <stdyInfo>
+		    <subject>
+			<topcClas source="archive" vocab="TheDataWeb">TheDataWeb</topcClas> 
+			<!-- TODO: Grab variable concepts and put them here --> 
+		    </subject>
+		    	<xsl:if test="normalize-space(a:dataSet/a:abstract) != ''">
+                    <abstract>
+						<xsl:value-of select="a:dataSet/a:abstract"/>	
+                    </abstract>
+                 </xsl:if>
+                 <xsl:if test="normalize-space(a:dataSet/a:collectDate/@end) != '' or normalize-space(a:dataSet/a:collectDate/@start) != '' or  normalize-space(a:dataSet/a:category) != '' ">               
+                    <sumDscr>
+                       <xsl:if test="normalize-space(a:dataSet/a:collectDate/@end) != '' or normalize-space(a:dataSet/a:collectDate/@start) != ''">
+                         <collDate>
+                             <xsl:attribute name="date"><xsl:value-of select="normalize-space(a:dataSet/a:collectDate/@start)"/></xsl:attribute>
+			    			 <xsl:attribute name="event">start</xsl:attribute>
+						</collDate>
+                         <collDate>
+                            <xsl:attribute name="date"><xsl:value-of select="normalize-space(a:dataSet/a:collectDate/@end)"/></xsl:attribute>
+			     <xsl:attribute name="event">end</xsl:attribute>
+                         </collDate>
+                         </xsl:if>
+                         <xsl:if test="normalize-space(a:dataSet/a:category) != ''">
+                         <dataKind>
+                              <xsl:value-of select="normalize-space(a:dataSet/a:category)"/>
+                         </dataKind>
+                         </xsl:if>
+                    </sumDscr>
+                  </xsl:if>
+                </stdyInfo>
+
+		<!-- The original MIFs should have some text in restrictions explaining where to get the data. -->
+        
+        	<xsl:if test="normalize-space(a:dataSet/a:restriction/@originaluri) != ''">   
+			<dataAccs>
+					<useStmt>
+            			<specPerm>
+            			<xsl:attribute name="URI"><xsl:value-of select="normalize-space(a:dataSet/a:restriction/@originaluri)"/></xsl:attribute>
+            			You must agree to the terms and conditions described here: <xsl:value-of select="normalize-space(a:dataSet/a:restriction/@originaluri)"/></specPerm>
+            		</useStmt>     	
+ 			</dataAccs>
+			</xsl:if>
+
+            </stdyDscr>
+            <xsl:if test="a:dataSet/a:extractionHost">
+               <fileDscr>
+                    <xsl:attribute name="ID"><xsl:value-of select="$fileDscrId"/></xsl:attribute>
+                    <xsl:attribute name="URI">
+			<!-- TODO: Add virtualid match for round-tripping -->
+			<!-- TODO: Need to url-encode these -->
+			<!-- TODO: Need to  add port-->
+			<xsl:value-of select="a:dataSet/a:extractionHost/@uri"/>
+			<xsl:text>/TheDataWeb_Tabulation/VDCRepositoryServlet/</xsl:text>	
+			<xsl:value-of select="a:dataSet/a:shortName"/>
+			<xsl:text>/</xsl:text>	
+			<xsl:value-of select="a:dataSet/a:subsurveyName"/>
+			<xsl:text>/</xsl:text>	
+			<xsl:value-of select="a:dataSet/a:component"/>
+			<xsl:text>/</xsl:text>	
+			<xsl:value-of select="a:dataSet/a:instance"/>
+			<xsl:text>/</xsl:text>	
+			<xsl:value-of select="a:dataSet/a:extractionHost/@type"/>
+                    </xsl:attribute>
+                    <fileTxt>
+                        <fileName>Data File</fileName>
+                        <fileCont><xsl:value-of select="a:dataSet/a:longName"/></fileCont>
+                    </fileTxt>                 
+               </fileDscr>
+            </xsl:if>
+            <xsl:apply-templates select="a:variables"/>
+        </xsl:element>
+    </xsl:template>
+    <!-- ********************************************************************************************************************************-->
+    <!-- ***                        Variables Template: matches variable section of MIF Document                                      ***-->
+    <!-- ********************************************************************************************************************************-->
+    <xsl:template match="a:variables">
+        <dataDscr>
+            <xsl:apply-templates select="a:var"/>
+        </dataDscr>
+    </xsl:template>
+    <!-- ********************************************************************************************************************************-->
+    <!-- ***                        Variable Template: transforms each MIF Variable to a DDI Variable                                 ***-->
+    <!-- ********************************************************************************************************************************-->
+    <xsl:template match="a:var">
+        <var>
+            <xsl:attribute name="name"><xsl:value-of select="@id"/></xsl:attribute>
+            <xsl:attribute name="ID"><xsl:value-of select="@id"/></xsl:attribute>
+            <xsl:if test="./a:type/@isweight = 'Y'">
+                <xsl:attribute name="wgt">wgt</xsl:attribute>
+            </xsl:if>
+            <xsl:if test="./a:type/@weightvar">
+                <xsl:attribute name="wgt-var"><xsl:value-of select="./a:type/@weightvar"/></xsl:attribute>
+            </xsl:if>
+            <xsl:if test="./a:type/@decimal and ./a:type/@decimal > 0">
+                <xsl:attribute name="dcml"><xsl:value-of select="./a:type/@decimal"/></xsl:attribute>
+            </xsl:if>
+            <xsl:if test="./a:type/@datatype and ./a:type/@decimal">
+                <xsl:attribute name="intrvl"><xsl:choose>
+                    <xsl:when test="(./a:type/@datatype = 'numeric' or ./a:type/@datatype = 'impliedDecimal') and ./a:type/@decimal > 0 ">contin</xsl:when>
+                    <xsl:when test="./a:type/@datatype = 'numeric' and ./a:type/@decimal = 0 and ./a:values">discrete</xsl:when>
+                    <xsl:otherwise>contin</xsl:otherwise>
+                </xsl:choose></xsl:attribute>
+            </xsl:if>
+            <location fileid="{$fileDscrId}"/>
+            <xsl:apply-templates select="a:label"/>
+            <xsl:apply-templates select="a:security"/>
+            <xsl:apply-templates select="a:values"/>
+            <xsl:apply-templates select="a:universe"/>
+            <!--xsl:apply-templates select="a:longDscr"/-->
+            <!--xsl:apply-templates select="a:type/a:categories/a:catValu"/-->
+            <!--xsl:apply-templates select="a:concept"/-->
+            <!-- DDI MAPPING NOTE: codeBook/stdyDescr/var/varFormat, var[-->
+	    <varFormat>
+	    <xsl:attribute name="type">
+               <xsl:choose>
+                 <xsl:when test="./a:type[@datatype='floatingPoint']">numeric</xsl:when>
+                 <xsl:when test="./a:type[@datatype='numeric']">numeric</xsl:when>
+                 <xsl:when test="./a:type[@datatype='impliedDecimal']">numeric</xsl:when>
+                 <xsl:otherwise>character</xsl:otherwise>
+                </xsl:choose>
+            </xsl:attribute>
+	    <xsl:attribute name="formatname"><xsl:value-of select="./a:type[@datatype]"/></xsl:attribute>
+	    <xsl:attribute name="schema">other</xsl:attribute>
+	    </varFormat>
+            <xsl:apply-templates select="a:period"/>
+            <xsl:apply-templates select="a:attachment"/>
+            <xsl:apply-templates select="a:synonyms"/>
+        </var>
+    </xsl:template>
+    <xsl:template match="a:synonyms">
+        <xsl:for-each select="a:synonym">
+            <notes type="mif/variables/var/synonyms/synonym">
+                <xsl:value-of select="."/>
+            </notes>
+        </xsl:for-each>
+    </xsl:template>
+    <xsl:template match="a:attachment">
+        <notes type="mif/variables/var/attachment[{position()}]/@type">
+            <xsl:value-of select="@type"/>
+        </notes>
+        <notes type="mif/variables/var/attachment[{position()}]/@title">
+            <xsl:value-of select="@title"/>
+        </notes>
+        <notes type="mif/variables/var/attachment[{position()}]/@uri">
+            <xsl:value-of select="@uri"/>
+        </notes>
+        <notes type="mif/variables/var/attachment[{position()}]/text()">
+            <xsl:value-of select="."/>
+        </notes>
+    </xsl:template>
+    <!-- ********************************************************************************************************************************-->
+    <!-- ***                        Long Description Template                                                                                                 ***-->
+    <!-- ********************************************************************************************************************************-->
+    <xsl:template match="a:longDscr">
+        <xsl:if test=". != ''">
+        <txt>
+            <xsl:value-of select="."/>
+        </txt>
+        </xsl:if>
+    </xsl:template>
+    <!-- ********************************************************************************************************************************-->
+    <!-- ***                        values Templates                                                                                                             ***-->
+    <!-- ********************************************************************************************************************************-->
+    <xsl:template match="a:values">
+    	<xsl:variable name="iNodes" select="count(a:item)"/>
+    	<xsl:variable name="rNodes" select="count(a:range)"/>
+    	
+    	<xsl:if test="$rNodes > 0 or $iNodes > 0">
+    		<valrng>
+    			<xsl:if test="$rNodes > 0">
+    				<xsl:apply-templates select="a:range" mode="range"/>
+    			</xsl:if>
+    			<xsl:if test="$iNodes > 0">
+    				<xsl:apply-templates select="a:item" mode="items"/>
+    			</xsl:if>
+    			<xsl:if test="$rNodes > 0">
+    				<key><xsl:text>&#xA;</xsl:text>
+    					<xsl:apply-templates select="a:range" mode="rkey"/>
+    				</key>
+    			</xsl:if>
+
+    		</valrng>
+    	</xsl:if>
+    	<xsl:if test="$iNodes > 0">
+    		<xsl:apply-templates select="a:item" mode="categry"/>
+    	</xsl:if>
+    </xsl:template>
+    
+    
+    <xsl:template match="a:range" mode="range">
+            <range>
+                <xsl:attribute name="min"><xsl:value-of select="./@min"/></xsl:attribute>
+                <xsl:attribute name="max"><xsl:value-of select="./@max"/></xsl:attribute>
+                <!-- xsl:value-of select="."/ -->
+            </range>
+            <!-- notes>
+                <xsl:value-of select="normalize-space(.)"/>
+            </notes -->
+    </xsl:template>
+    
+    <xsl:template match="a:item" mode="items">
+        <item>
+                <xsl:attribute name="VALUE"><xsl:value-of select="./@value"/></xsl:attribute>
+        </item>
+    </xsl:template>
+
+
+    <xsl:template match="a:range" mode="rkey">
+                <xsl:text>(</xsl:text><xsl:value-of select="./@min"/><xsl:text>-</xsl:text><xsl:value-of select="./@max"/><xsl:text>) = (</xsl:text><xsl:value-of select="normalize-space(.)"/><xsl:text>)&#xA;</xsl:text>
+    </xsl:template>
+
+    <xsl:template match="a:item" mode="categry">
+        <catgry>
+            <xsl:if test="./@missing">
+                <xsl:attribute name="missing"><xsl:choose><xsl:when test="./@missing = 'Y'">Y</xsl:when><xsl:otherwise>N</xsl:otherwise></xsl:choose></xsl:attribute>
+            </xsl:if>
+            <catValu>
+                <xsl:value-of select="./@value"/>
+            </catValu>
+            <labl>
+            	<xsl:value-of select="normalize-space(.)"/>
+            </labl>
+            <xsl:if test="./@count">
+                <catStat type="freq">
+                    <xsl:value-of select="./@count"/>
+                </catStat>
+            </xsl:if>
+        </catgry>
+    </xsl:template>
+    
+    
+    <!-- ********************************************************************************************************************************-->
+    <!-- ***                       Universe Template                                                                                                              ***-->
+    <!-- ********************************************************************************************************************************-->
+    <xsl:template match="a:universe">
+        <universe>
+        	<xsl:if test=". != ''">
+            <txt>
+                <xsl:value-of select="normalize-space(.)"/>
+            </txt>
+            </xsl:if>
+            <concept>
+                <xsl:value-of select="./@type"/>
+            </concept>
+        </universe>
+    </xsl:template>
+    <!-- ********************************************************************************************************************************-->
+    <!-- ***                       Universe Template                                                                                                              ***-->
+    <!-- ********************************************************************************************************************************-->
+    <xsl:template match="a:concept">
+        <concept>
+            <xsl:attribute name="vocab"><xsl:value-of select="./@type"/></xsl:attribute>
+            <xsl:value-of select="normalize-space(.)"/>
+        </concept>
+    </xsl:template>
+    <!-- ********************************************************************************************************************************-->
+    <!-- ***                       Variable Label Template                                                                                                      ***-->
+    <!-- ********************************************************************************************************************************-->
+    <xsl:template match="a:label">
+        <labl level="variable">
+            <xsl:value-of select="normalize-space(.)"/>
+        </labl>
+    </xsl:template>
+    <!-- ********************************************************************************************************************************-->
+    <!-- ***                       Security Template                                                                                                               ***-->
+    <!-- ********************************************************************************************************************************-->
+    <xsl:template match="a:security">
+        <security>
+            <xsl:attribute name="date"><xsl:value-of select="./@date"/></xsl:attribute>
+            <xsl:value-of select="./@level"/>
+        </security>
+    </xsl:template>
+    <!-- ********************************************************************************************************************************-->
+    <!-- ***                       Value Range Template                                                                                                        ***-->
+    <!-- ********************************************************************************************************************************-->
+    <!-- ********************************************************************************************************************************-->
+    <!-- ***                       Variable Period Template                                                                                                    ***-->
+    <!-- ********************************************************************************************************************************-->
+    <xsl:template match="a:period">
+        <notes type="mif/variables/var/period/@start">
+            <xsl:value-of select="./@start"/>
+        </notes>
+        <notes type="mif/variables/var/period/@end">
+            <xsl:value-of select="./@end"/>
+        </notes>
+    </xsl:template>
+
+    <xsl:template match="*|@*">
+        <xsl:comment><xsl:call-template name="full-path"/><xsl:value-of select="concat('=',.)"/></xsl:comment>
+    </xsl:template>
+<!-- ********************************************************************************************************************************-->
+    <!-- ***                       Title/ handle Template                                                                                                    ***-->
+    <!-- ********************************************************************************************************************************-->
+    <xsl:template name="titlStmt">
+               	   <titlStmt>
+                      <titl>                      
+                           <xsl:value-of select="normalize-space(a:dataSet/a:longName)"/>
+                           <xsl:if test="normalize-space(a:dataSet/a:shortName) != ''">
+                    			<xsl:text>(</xsl:text>
+								<xsl:value-of select="normalize-space(a:dataSet/a:shortName)"/>
+                    			<xsl:text>)</xsl:text>
+                    		</xsl:if>
+                    		<xsl:if test="normalize-space(a:dataSet/a:subsurveyName) != ''">
+                    			<xsl:text>:</xsl:text>
+								<xsl:value-of select="normalize-space(a:dataSet/a:subsurveyName)"/>
+							</xsl:if>
+							<xsl:if test="normalize-space(a:dataSet/a:inheritedComponent) != ''">
+                    			<xsl:text>:</xsl:text>
+								<xsl:value-of select="normalize-space(a:dataSet/a:inheritedComponent)"/>
+							</xsl:if>
+							<xsl:if test="normalize-space(a:dataSet/a:component) != ''">	
+                    			<xsl:text>:</xsl:text>
+ 								<xsl:value-of select="normalize-space(a:dataSet/a:component)"/>
+ 							</xsl:if>
+ 							<xsl:if test="normalize-space(a:dataSet/a:component) != ''">	
+                    			<xsl:text>:</xsl:text>
+ 								<xsl:value-of select="normalize-space(a:dataSet/a:instance)"/>
+ 							</xsl:if>
+ 							
+                       </titl>
+                       <IDNo agency="producer">
+                                <xsl:value-of select="normalize-space(a:dataSet/a:shortName)"/>
+                    		<xsl:text>/</xsl:text>
+				<xsl:value-of select="normalize-space(a:dataSet/a:subsurveyName)"/>
+                    		<xsl:text>/</xsl:text>
+				<xsl:value-of select="normalize-space(a:dataSet/a:component)"/>
+                    		<xsl:text>/</xsl:text>
+				<xsl:value-of select="normalize-space(a:dataSet/a:instance)"/>
+                        </IDNo>
+                    </titlStmt>
+        </xsl:template>
+
+        <xsl:template name="full-path">
+            <xsl:for-each select="ancestor-or-self::*">
+		<xsl:variable name="id" select="generate-id(.)"/>
+		<xsl:variable name="name" select="name()"/>
+		<xsl:value-of select="concat('/',name())"/>
+		<xsl:for-each select="../*[name()=$name]">
+                    <xsl:if test="generate-id(.)=$id">
+			<xsl:text>[</xsl:text>
+			<xsl:value-of select="position()"/>
+			<xsl:text>]</xsl:text>
+                    </xsl:if>
+		</xsl:for-each>
+            </xsl:for-each>
+            <xsl:if test="not(self::*)">
+                <xsl:choose>
+                    <xsl:when test="self::text()">
+                    	<xsl:text>/text()</xsl:text>
+                    	<xsl:text>[</xsl:text>
+							<xsl:value-of select="position()"/>
+						<xsl:text>]</xsl:text>
+                    </xsl:when>
+                    <xsl:when test="self::comment()">      
+                        <xsl:text>/comment()</xsl:text>
+                        <xsl:text>[</xsl:text>
+							<xsl:value-of select="position()"/>
+						<xsl:text>]</xsl:text>
+                    </xsl:when>
+                    <xsl:when test="self::processing-instruction()">   
+                       <xsl:text>/processing-instruction()</xsl:text>
+                       <xsl:text>[</xsl:text>
+							<xsl:value-of select="position()"/>
+						<xsl:text>]</xsl:text>
+                    </xsl:when>
+                    <xsl:otherwise>
+                        <xsl:value-of select="concat('/@',name())"/>
+                    </xsl:otherwise>
+                </xsl:choose>
+            </xsl:if>
+        </xsl:template>
+
+</xsl:stylesheet>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/config/neodb.props	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,5 @@
+neostore.nodestore.db.mapped_memory=32M
+neostore.relationshipstore.db.mapped_memory=384M
+neostore.propertystore.db.mapped_memory=256M
+neostore.propertystore.db.strings.mapped_memory=1M
+neostore.propertystore.db.arrays.mapped_memory=1M
Binary file DVN-web/installer/dvninstall/config/networkData/lib/collections-generic-4.01.jar has changed
Binary file DVN-web/installer/dvninstall/config/networkData/lib/colt-1.2.0.jar has changed
Binary file DVN-web/installer/dvninstall/config/networkData/lib/concurrent-1.3.4.jar has changed
Binary file DVN-web/installer/dvninstall/config/networkData/lib/geronimo-jta_1.1_spec-1.1.1.jar has changed
Binary file DVN-web/installer/dvninstall/config/networkData/lib/jung-algorithms-2.0.jar has changed
Binary file DVN-web/installer/dvninstall/config/networkData/lib/jung-api-2.0.jar has changed
Binary file DVN-web/installer/dvninstall/config/networkData/lib/jung-visualization-2.0.jar has changed
Binary file DVN-web/installer/dvninstall/config/networkData/lib/junit-3.8.1.jar has changed
Binary file DVN-web/installer/dvninstall/config/networkData/lib/lucene-core-2.9.2.jar has changed
Binary file DVN-web/installer/dvninstall/config/networkData/lib/neo4j-index-1.1.jar has changed
Binary file DVN-web/installer/dvninstall/config/networkData/lib/neo4j-kernel-1.1.jar has changed
Binary file DVN-web/installer/dvninstall/config/networkData/lib/neo4j-utils-1.1.jar has changed
Binary file DVN-web/installer/dvninstall/config/networkData/lib/nestedvm-1.0.jar has changed
Binary file DVN-web/installer/dvninstall/config/networkData/lib/network_utils-1.0-SNAPSHOT.jar has changed
Binary file DVN-web/installer/dvninstall/config/networkData/lib/sqlite-jdbc-3.6.16.jar has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/config/oai_dc2ddi.xsl	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,135 @@
+<xsl:stylesheet version="1.0"
+		xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
+		xmlns:dc="http://purl.org/dc/elements/1.1/"
+		exclude-result-prefixes="dc"
+>
+<xsl:output method="xml" version="1.0" encoding="UTF-8"
+		indent="yes" />
+<xsl:template match="/">
+<codeBook 
+          xmlns="http://www.icpsr.umich.edu/DDI" 
+          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
+          xsi:schemaLocation="http://www.icpsr.umich.edu/DDI 
+          http://www.icpsr.umich.edu/DDI/Version2-0.xsd">
+        <stdyDscr>
+            <citation>
+                <titlStmt>
+		   <titl>
+		   <xsl:for-each select="//dc:title">
+			<xsl:value-of select="."/>
+		   </xsl:for-each>
+		   </titl>			
+		   <xsl:for-each select="//dc:identifier">
+		      <IDNo>
+		        <xsl:attribute name="agency">
+		        <xsl:choose>
+		         <xsl:when test='starts-with(.,"hdl:")'>handle</xsl:when>
+			 <xsl:when test='starts-with(.,"http://hdl.handle.net/")'>handle</xsl:when>
+		        </xsl:choose>
+		        </xsl:attribute>
+		        <xsl:choose>
+			 <xsl:when test='starts-with(.,"http://hdl.handle.net/")'>hdl:<xsl:value-of select='substring(.,23)'/></xsl:when>
+		         <xsl:otherwise><xsl:value-of select="."/></xsl:otherwise>
+		        </xsl:choose>
+		      </IDNo>
+		   </xsl:for-each>
+	        </titlStmt>
+		<rspStmt>
+		   <xsl:for-each select="//dc:creator">
+		   <AuthEnty><xsl:value-of select="."/></AuthEnty>
+		   </xsl:for-each>
+		</rspStmt>
+
+		<prodStmt>
+		<xsl:for-each select="//dc:publisher">
+		<xsl:if test="normalize-space(.)!=''">
+		   <producer>
+		   <xsl:value-of select="."/>
+		   </producer>
+		</xsl:if>		
+		</xsl:for-each>
+
+		<xsl:for-each select="//dc:date">
+		<xsl:if test="normalize-space(.)!=''">
+		   <prodDate>
+		   <xsl:value-of select="normalize-space(.)"/>
+		   </prodDate>
+		</xsl:if>		
+		</xsl:for-each>		
+		</prodStmt>		
+
+	    </citation>
+	    <stdyInfo>
+		<subject>
+		<xsl:for-each select="//dc:subject">
+		   <keyword><xsl:value-of select="."/></keyword>
+		</xsl:for-each>	
+		</subject>
+		<xsl:for-each select="//dc:description">
+		<abstract>
+		   <xsl:value-of select="."/>
+		</abstract>
+		</xsl:for-each>	
+		<sumDscr>
+		<xsl:for-each select="//dc:coverage">
+		<xsl:if test="normalize-space(.)!=''">
+		   <geogCover>
+		      <xsl:value-of select="."/>
+		   </geogCover>
+		</xsl:if>
+		</xsl:for-each>	
+		<xsl:for-each select="//dc:type">
+		<xsl:if test="normalize-space(.)!=''">
+		   <dataKind>
+		      <xsl:value-of select="."/>
+		   </dataKind>
+		</xsl:if>
+		</xsl:for-each>	
+		</sumDscr>
+	    </stdyInfo>
+
+	    <xsl:if test="normalize-space(//dc:source)!=''">
+	    <method>
+	       <dataColl>
+	          <sources>
+		     <xsl:for-each select="//dc:source">
+		     <xsl:if test="normalize-space(.)!=''">
+	             <dataSrc>
+		        <xsl:value-of select="normalize-space(.)"/>
+	             </dataSrc>
+		     </xsl:if>
+		     </xsl:for-each>
+	          </sources>
+	       </dataColl>
+	     </method>
+	     </xsl:if>
+
+
+	    <xsl:for-each select="//dc:rights">
+	    <xsl:if test="normalize-space(.)!=''">
+            <dataAccs>
+	       <useStmt>
+	     	  <restrctn>
+		   	<xsl:value-of select="normalize-space(.)"/>
+		  </restrctn>
+               </useStmt>
+            </dataAccs>
+            </xsl:if>
+	    </xsl:for-each>
+	    <xsl:if test="normalize-space(//dc:relation)!=''">
+	    <othrStdyMat>
+	    <xsl:for-each select="//dc:relation">
+	    <xsl:if test="normalize-space(.)!=''">
+	       <relMat>
+	          <xsl:value-of select="normalize-space(.)"/>
+	       </relMat>
+	    </xsl:if>
+	    </xsl:for-each>
+	    </othrStdyMat>
+	    </xsl:if>
+	</stdyDscr>
+</codeBook>
+</xsl:template>
+</xsl:stylesheet>
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/config/oaicat.properties	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,51 @@
+# OAICat Configuration
+
+# This paramter is optional. Comment it out if you want the code to get the baseURL
+# from the request instead. Setting it here is only necessary if your firewall messes
+# with the request in a way that alters the baseURL from your expectations.
+# OAIHandler.baseURL=http://alcme.oclc.org/oaicat/OAIHandler
+
+# Uncomment this line if you want to include an XSL stylesheet reference
+# in the OAI response so they can be rendered into HTML by browsers.
+# OAIHandler.styleSheet=/oaicat/oaicat.xsl
+
+# Uncomment this line if you want OAICat to render the responses into HTML on the server using
+# OAIHandler.stylesheet when the request comes from an old browser.
+# OAIHandler.renderForOldBrowsers=true
+OAIHandler.appBase=../applications/j2ee-modules
+
+AbstractCatalog.oaiCatalogClassName=edu.harvard.iq.dvn.core.web.oai.catalog.DVNOAICatalog
+AbstractCatalog.recordFactoryClassName=edu.harvard.iq.dvn.core.web.oai.catalog.DVNXMLRecordFactory
+AbstractCatalog.secondsToLive=18000
+
+# Choose one of the following two
+#AbstractCatalog.granularity=YYYY-MM-DD
+AbstractCatalog.granularity=YYYY-MM-DDThh:mm:ssZ
+
+# Change to include properties peculiar to your implementation of AbstractCatalog
+DVNOAICatalog.maxListSize=10
+DVNOAICatalog.homeDir=../applications/j2ee-apps/DVN-EAR/DVN-web_war/WEB-INF/
+DVNOAICatalog.hideExtension=true
+
+FileRecordFactory.repositoryIdentifier=oaicat.oclc.org
+
+# Custom Identify response values
+Identify.repositoryName=Dataverse Network repository
+Identify.adminEmail=mailto:dataverse@lists.hmdc.harvard.edu
+Identify.earliestDatestamp=2000-01-01T00:00:00Z
+Identify.deletedRecord=no
+
+# This is the old way to specify <description><oai-identifier> for the Identify verb
+# Identify.repositoryIdentifier=oaicat.oclc.org
+# Identify.sampleIdentifier=oai:oaicat.oclc.org:OCLCNo/ocm00000012
+
+# This is the new way to specify <description> elements in general for the Identify verb
+# Append something unique like .1, .2, etc to 'Identify.description' for each occurrence
+#Identify.description.1=<description><oai-identifier xmlns=\"http://www.openarchives.org/OAI/2.0/oai-identifier\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://www.openarchives.org/OAI/2.0/oai-identifier http://www.openarchives.org/OAI/2.0/oai-identifier.xsd\"><scheme>oai</scheme><repositoryIdentifier>oaicat.oclc.org</repositoryIdentifier><delimiter>:</ delimiter><sampleIdentifier>oai:oaicat.oclc.org:OCLCNo/ocm00000012</sampleIdentifier></oai-identifier></description>
+
+# List the supported metadataPrefixes along with the class that performs the associated crosswalk
+Crosswalks.oai_dc=ORG.oclc.oai.server.crosswalk.XML2oai_dc
+# Crosswalks.oai_etdms=ORG.oclc.oai.server.crosswalk.XML2oai_etdms
+Crosswalks.ddi=edu.harvard.iq.dvn.core.web.oai.catalog.DVNXML2ddi
+
+#FileMap2oai_dc.xsltName=../applications/j2ee-modules/oaicat/WEB-INF/etdms2dc.xsl
Binary file DVN-web/installer/dvninstall/doc/guides/_images/application-octet-stream.png has changed
Binary file DVN-web/installer/dvninstall/doc/guides/_images/application-pdf.png has changed
Binary file DVN-web/installer/dvninstall/doc/guides/_images/complex_exploration.png has changed
Binary file DVN-web/installer/dvninstall/doc/guides/_images/complex_graph_screenshot.png has changed
Binary file DVN-web/installer/dvninstall/doc/guides/_images/displaytabscreenshot.png has changed
Binary file DVN-web/installer/dvninstall/doc/guides/_images/editfiltersscreenshot.png has changed
Binary file DVN-web/installer/dvninstall/doc/guides/_images/editmeasuresscreenshot.png has changed
Binary file DVN-web/installer/dvninstall/doc/guides/_images/edittimevariablescreenshot.png has changed
Binary file DVN-web/installer/dvninstall/doc/guides/_images/measure_selected.png has changed
Binary file DVN-web/installer/dvninstall/doc/guides/_images/simple_explore_data.png has changed
Binary file DVN-web/installer/dvninstall/doc/guides/_images/sourcetabscreenshot.png has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/_sources/dataverse-R-ingest.txt	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,189 @@
+============
+Ingest of R (.RData) files
+============
+
+Overview.
+=========
+
+Support for ingesting R data files has been added in version 3.5. R
+has been increasingly popular in the research/academic community,
+owing to the fact that it is free and open-source (unlike SPSS and
+STATA). Consequently, more and more data is becoming available
+exclusively as R data files. This long-awaited feature makes it
+possible to ingest such data into DVN as "subsettable" files.
+
+Requirements.
+============
+
+R ingest relies on R having been installed, configured and made
+available to the DVN application via RServe (see the Installers
+Guide). This is in contrast to the SPSS and Stata ingest - which can
+be performed without R present. (though R is still needed to perform
+most subsetting/analysis tasks on the resulting data files).
+
+The data must be formatted as an R dataframe (data.frame()). If an
+.RData file contains multiple dataframes, only the 1st one will be
+ingested.
+
+Data Types, compared to other supported formats (Stat, SPSS)
+===========================================================
+
+Integers, Doubles, Character strings
+------------------------------------
+
+The handling of these types is intuitive and straightforward. The
+resulting tab file columns, summary statistics and UNF signatures
+should be identical to those produced by ingesting the same vectors
+from SPSS and Stata.
+
+**A couple of things that are unique to R/new in DVN:** 
+
+R explicitly supports Missing Values for all of the types above;
+Missing Values encoded in R vectors will be recognized and preserved
+in TAB files (as 'NA'), counted in the generated summary statistics
+and data analysis.
+
+In addition to Missing Values, R recognizes "Not a Value" (NaN) and
+positive and negative infinity for floating point variables. These
+are now properly supported by the DVN.
+
+Also note, that unlike Stata, that does recognize "float" and "double"
+as distinct data types, all floating point values in R are in fact
+double precision. 
+
+R Factors 
+---------
+
+These are ingested as "Categorical Values" in the DVN. 
+
+One thing to keep in mind: in both Stata and SPSS, the actual value of
+a categorical variable can be both character and numeric. In R, all
+factor values are strings, even if they are string representations of
+numbers. So the values of the resulting categoricals in the DVN will
+always be of string type too.
+
+| **New:** To properly handle *ordered factors* in R, the DVN now supports the concept of an "Ordered Categorical" - a categorical value where an explicit order is assigned to the list of value labels.
+
+(New!) Boolean values
+---------------------
+
+R Boolean (logical) values are supported. 
+
+
+Limitations of R, as compared to SPSS and STATA. 
+------------------------------------------------
+
+Most noticeably, R lacks a standard mechanism for defining descriptive
+labels for the data frame variables.  In the DVN, similarly to
+both Stata and SPSS, variables have distinct names and labels; with
+the latter reserved for longer, descriptive text.
+With variables ingested from R data frames the variable name will be
+used for both the "name" and the "label".
+
+| *Optional R packages exist for providing descriptive variable labels;
+ in one of the future versions support may be added for such a
+ mechanism. It would of course work only for R files that were
+ created with such optional packages*.
+
+Similarly, R categorical values (factors) lack descriptive labels too.
+**Note:** This is potentially confusing, since R factors do
+actually have "labels".  This is a matter of terminology - an R
+factor's label is in fact the same thing as the "value" of a
+categorical variable in SPSS or Stata and DVN; it contains the actual
+meaningful data for the given observation. It is NOT a field reserved
+for explanatory, human-readable text, such as the case with the
+SPSS/Stata "label". 
+
+Ingesting an R factor with the level labels "MALE" and "FEMALE" will
+produce a categorical variable with "MALE" and "FEMALE" in the
+values and labels both.
+
+
+Time values in R
+================
+
+This warrants a dedicated section of its own, because of some unique
+ways in which time values are handled in R.
+
+R makes an effort to treat a time value as a real time instance. This
+is in contrast with either SPSS or Stata, where time value
+representations such as "Sep-23-2013 14:57:21" are allowed; note that
+in the absence of an explicitly defined time zone, this value cannot
+be mapped to an exact point in real time.  R handles times in the
+"Unix-style" way: the value is converted to the
+"seconds-since-the-Epoch" Greenwitch time (GMT or UTC) and the
+resulting numeric value is stored in the data file; time zone
+adjustments are made in real time as needed.
+
+Things still get ambiguous and confusing when R **displays** this time
+value: unless the time zone was explicitly defined, R will adjust the
+value to the current time zone. The resulting behavior is often
+counter-intuitive: if you create a time value, for example:
+
+		   timevalue<-as.POSIXct("03/19/2013 12:57:00", format = "%m/%d/%Y %H:%M:%OS");
+
+on a computer configured for the San Francisco time zone, the value
+will be differently displayed on computers in different time zones;
+for example, as "12:57 PST" while still on the West Coast, but as
+"15:57 EST" in Boston.
+
+If it is important that the values are always displayed the same way,
+regardless of the current time zones, it is recommended that the time
+zone is explicitly defined. For example: 
+
+     attr(timevalue,"tzone")<-"PST"
+or 
+   timevalue<-as.POSIXct("03/19/2013 12:57:00", format = "%m/%d/%Y %H:%M:%OS", tz="PST");
+
+Now the value will always be displayed as "15:57 PST", regardless of
+the time zone that is current for the OS ... **BUT ONLY** if the OS
+where R is installed actually understands the time zone "PST", which
+is not by any means guaranteed! Otherwise, it will **quietly adjust**
+the stored GMT value to **the current time zone**, yet it will still
+display it with the "PST" tag attached!** One way to rephrase this is
+that R does a fairly decent job **storing** time values in a
+non-ambiguous, platform-independent manner - but gives you no guarantee that 
+the values will be displayed in any way that is predictable or intuitive. 
+
+In practical terms, it is recommended to use the long/descriptive
+forms of time zones, as they are more likely to be properly recognized
+on most computers. For example, "Japan" instead of "JST".  Another possible
+solution is to explicitly use GMT or UTC (since it is very likely to be
+properly recognized on any system), or the "UTC+<OFFSET>" notation. Still, none of the above
+**guarantees** proper, non-ambiguous handling of time values in R data
+sets. The fact that R **quietly** modifies time values when it doesn't
+recognize the supplied timezone attribute, yet still appends it to the
+**changed** time value does make it quite difficult. (These issues are
+discussed in depth on R-related forums, and no attempt is made to
+summarize it all in any depth here; this is just to made you aware of
+this being a potentially complex issue!)
+
+An important thing to keep in mind, in connection with the DVN ingest
+of R files, is that it will **reject** an R data file with any time
+values that have time zones that we can't recognize. This is done in
+order to avoid (some) of the potential issues outlined above.
+
+It is also recommended that any vectors containing time values
+ingested into the DVN are reviewed, and the resulting entries in the
+TAB files are compared against the original values in the R data
+frame, to make sure they have been ingested as expected. 
+
+Another **potential issue** here is the **UNF**. The way the UNF
+algorithm works, the same date/time values with and without the
+timezone (e.g. "12:45" vs. "12:45 EST") **produce different
+UNFs**. Considering that time values in Stata/SPSS do not have time
+zones, but ALL time values in R do (yes, they all do - if the timezone
+wasn't defined explicitely, it implicitly becomes a time value in the
+"UTC" zone!), this means that it is **impossible** to have 2 time
+value vectors, in Stata/SPSS and R, that produce the same UNF.
+
+| **A pro tip:** if it is important to produce SPSS/Stata and R versions of
+the same data set that result in the same UNF when ingested, you may
+define the time variables as **strings** in the R data frame, and use
+the "YYYY-MM-DD HH:mm:ss" formatting notation. This is the formatting used by the UNF
+algorithm to normalize time values, so doing the above will result in
+the same UNF as the vector of the same time values in Stata.
+
+Note: date values (dates only, without time) should be handled the
+exact same way as those in SPSS and Stata, and should produce the same
+UNFs.
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/_sources/dataverse-api-main.txt	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,533 @@
+====================================
+APIs Guide
+====================================
+
+.. _api:
+
+**Introduction**
+
+We strongly encourage anyone interested in building tools to
+interoperate with the Dataverse Network to utilize our open source
+APIs. Please visit our `website <http://thedata.org/book/apps>`__  for
+examples of external apps that have been built to work with our APIs.
+
+.. _data-sharing-api:
+
+Data Sharing API
+++++++++++++++++++++++++++
+
+As of version 3.0, a new API for programmatic access to the DVN data and
+metadata has been added. The API allows a remote, non-DVN
+archive/application to search the holdings and download files from a
+Dataverse Network.
+
+The Data Sharing API documentation is available below:
+
+API URLs
+====================
+
+The URLs for the Data Sharing API resources are of the form:
+
+``/dvn/api/{/arg}{?{{arg}&...}}``
+
+Generally, mandatory arguments are embedded in the URL and optional
+arguments are supplied as query parameters, in the ``?param=...`` notation.
+See the documentation for the individual resources below for details.
+
+The API supports basic HTTP Authentication. So that the access
+credentials are not transmitted in the clear, the API verbs (methods)
+below are **only accessible over HTTPS**.
+
+Metadata API
+==========================
+
+The API for accessing Dataverse Network metadata is implemented in 4 verbs
+(resources):
+
+| ``metadataSearchFields`` 
+| ``metadataSearch`` 
+| ``metadataFormatsAvailable`` 
+| ``metadata``
+
+metadataSearchFields
+----------------------------------
+
+**Arguments:** 
+
+``none``
+
+**URL example:** 
+
+``/dvn/api/metadataSearchFields/``
+
+**Output:** 
+
+XML record in the format below: 
+
+.. code-block:: guess
+
+	<MetadataSearchFields>
+	<SearchableField>
+	<fieldName>title</fieldName>
+	<fieldDescription>title</fieldDescription>
+	</SearchableField>
+	<SearchableField>
+	<fieldName>authorName</fieldName>
+	<fieldDescription>authorName</fieldDescription>
+	</SearchableField>
+	<SearchableField>
+	<fieldName>otherId</fieldName>
+	<fieldDescription>otherId</fieldDescription>
+	</SearchableField>
+	...
+	</MetadataSearchableFields>
+
+metadataSearch
+------------------------------------
+
+**Arguments:**
+
+| ``queryString: mandatory, embedded.``
+| *Standard Lucene-style search queries are supported; (same query format currently used to define OAI sets, etc.)*
+
+**URLs examples:**
+
+| ``/dvn/api/metadataSearch/title:test``
+| ``/dvn/api/metadataSearch/title:test AND authorName:leonid``
+
+**Output:**
+
+XML record in the format below:
+
+.. code-block:: guess
+
+	<MetadataSearchResults>
+	<searchQuery>title:test</searchQuery>
+	<searchHits>
+	<study ID="hdl:TEST/10007"/>
+	...
+	</searchHits>
+	</MetadataSearchResults>
+
+**Error Conditions:**
+
+Note that when the query does not produce any results, the resource returns an XML record
+with an empty ``<searchHits>`` list, NOT a 404.
+
+metadataFormatsAvailable
+--------------------------------------
+
+**Arguments:**
+
+| ``objectId: mandatory, embedded.``
+| *Both global and local (database) IDs are supported.*
+
+**URLs examples:**
+ 
+| ``/dvn/api/metadataFormatsAvailable/hdl:1902.1/6635``
+| ``/dvn/api/metadataFormatsAvailable/9956``
+
+**Output:** 
+
+XML record in the format below:
+
+.. code-block:: guess
+
+	<MetadataFormatsAvailable studyId="hdl:TEST/10007">
+	<formatAvailable selectSupported="true" excludeSupported="true">
+	<formatName>ddi</formatName>
+	<formatSchema>http://www.icpsr.umich.edu/DDI/Version2-0.xsd</formatSchema>
+	<formatMime>application/xml</formatMime>
+	</formatAvailable>
+	<formatAvailable>
+	<formatName>oai_dc</formatName>
+	<formatSchema>http://www.openarchives.org/OAI/2.0/oai_dc.xsd</formatSchema>
+	<formatMime>application/xml</formatMime>
+	</formatAvailable>
+	</MetadataFormatsAvailable> 
+
+(**Note** the ``selectSupported`` and ``excludeSupported`` attributes above!)
+
+**Error Conditions:**
+
+``404 NOT FOUND`` if study does not exist
+
+metadata
+-------------------------
+
+**Arguments:**
+
+| ``objectId: mandatory, embedded.``
+| *Both global and local (database) IDs are supported.*
+
+| ``formatType: optional, query.`` 
+| *Defaults to DDI if not supplied.*
+
+**URLs examples:**
+
+| ``/dvn/api/metadata/hdl:1902.1/6635 /dvn/api/metadata/9956``
+| ``/dvn/api/metadata/hdl:1902.1/6635?formatType=ddi``
+
+**Output:**
+
+Metadata record in the format requested, if available. No extra
+headers, etc.
+
+**Partial selection of metadata sections:**
+
+When requesting partial records is supported (see
+``metadataFormatsAvailable``, above for more info), these additional parameters can be supplied:
+
+| ``partialExclude: optional, query.``
+| *Xpath query representing metadata section to drop, where supported.*
+
+| ``partialInclude: optional, query.`` 
+| *Xpath query representing metadata section to include, where supported.*
+
+**Examples:**
+
+| ``/dvn/api/metadata/hdl:1902.1/6635?formatType=ddi&partialExclude=codeBook/dataDscr``
+| will produce a DDI without the dataDscr section. 
+| *[I’m expecting this to be the single most useful and common real-life application of thisfeature - L.A.]*
+
+| ``/dvn/api/metadata/hdl:1902.1/6635?formatType=ddi&partialInclude=codeBook/stdyDscr``
+| will produce a DDI with the stdyDscr section only. 
+
+(**Note**: for now, only simple top-level Xpath queries like the above are supported).
+
+One other limitation of the current implementation: it does not validate the supplied ``partialExclude`` and ``partialInclude`` arguments; no error messages/diagnostics will be given if the Xpath queries are not part of the metadata schema. For example, if you request partialInclude=foobar, it will quietly produce an empty DDI, and ``partialExclude=foobar`` will not exclude anything (and you will get a complete DDI).
+
+**Error Conditions:**
+
+| ``404 NOT FOUND``
+| if study does not exist
+
+| ``503 SERVICE UNAVAILABLE``
+| if study exists, but the format requested is not available; 
+| also, when partial exclude or include is requested, if it’s not supported by the service (see the documenation for metadataFormatsAvailable above).
+
+**Notes:**
+
+A real-life workflow scenario may go as follows: 
+
+a. Find the searchable index fields on this DVN (meatadataSearchFields)
+b. Run a search (metadataSearch) 
+c. For [select] studies returned, find what metadata formats are available (metadataFormatsAvailable) 
+d. Retrieve the metadata in the desired format (metadata)
+
+File Access API
+=====================
+
+The Dataverse Network API for downloading digital objects (files) is implemented in 2
+verbs (resources): 
+
+| ``downloadInfo`` 
+| ``download``
+
+downloadInfo
+-----------------------------
+
+**Arguments:**
+
+| ``objectId: mandatory, embedded.``
+| Database ID of the Dataverse Network Study File.
+
+**URLs example:**
+
+``/dvn/api/downloadInfo/9956``
+
+**Output:**
+
+XML record in the format below: 
+
+*(Note: the record below is only an example; we will provide full schema/documentation of theFileDownloadInfo record format below)*
+
+.. code-block:: guess
+
+	<FileDownloadInfo>
+	<studyFile fileId="9956">
+
+	<fileName>prettypicture.jpg</fileName>
+	<fileMimeType>image/jpeg</fileMimeType>
+	<fileSize>52825</fileSize>
+
+	<Authentication>
+		<authUser>testUser</authUser>
+		<authMethod>password</authMethod>
+	</Authentication>
+
+	<Authorization directAccess="true"/>
+
+	<accessPermissions accessGranted="true">Authorized Access only</accessPermissions>
+
+	<accessRestrictions accessGranted="true">Terms of Use</accessRestrictions>
+
+	<accessServicesSupported>
+
+		<accessService>
+			<serviceName>thumbnail</serviceName>
+			<serviceArgs>imageThumb=true</serviceArgs>
+			<contentType>image/png</contentType>
+			<serviceDesc>Image Thumbnail</serviceDesc>
+		</accessService>
+
+	</accessServicesSupported>
+	</studyFile>
+	</FileDownloadInfo>
+
+**Error Conditions:**
+
+| ``404 NOT FOUND`` 
+| Study file does not exist.
+
+download
+---------------------------------
+
+**Arguments:**
+
+| ``objectId: mandatory, embedded.`` 
+| Database ID of the DVN Study File.
+
+| ``Optional Query args:``
+| As specified in the output of downloadInfo, above.
+
+**URLs examples:**
+ 
+| ``/dvn/api/download/9956``
+| ``/dvn/api/download/9956?imageThumb=true``
+| ``/dvn/api/download/9957?fileFormat=stata``
+
+**Output:**
+
+Byte Stream (with proper HTTP headers specifying the content
+type, file name and such)
+
+**Error Conditions:**
+
+| ``404 NOT FOUND`` 
+| Study file does not exist.
+
+| ``401 AUTHORIZATION REQUIRED``
+| Access to restricted object attempted without HTTP Authorization header supplied.
+
+| ``403 PERMISSION DENIED HTTP``
+| Authorization header supplied, but the authenticated user is not
+| authorized to directly access the object protected by Access
+| Permissions and/or Access Restrictions (“Terms of Use”).
+
+.. _data-deposit-api:
+
+Data Deposit API
+++++++++++++++++
+
+As of version 3.6, a new API for programmatic deposit of data and metadata to the Dataverse Network has been added. The API allows a remote, non-Dataverse Network archive/application to deposit files and metadata to a Dataverse Network installation.
+
+Overview of Data Deposit API
+============================
+
+"v1" of the DVN Data Deposit API is a partial implementation of the SWORDv2 protocol, the specification for which available at http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html
+
+Please reference the SWORDv2 specification for expected HTTP status codes (i.e. 201, 204, 404, etc.), headers (i.e. "Location"), etc.
+
+Data Deposit API v1 `curl` examples
+-----------------------------------
+
+The following `curl` commands demonstrate supported operations:
+
+Retrieve SWORD service document
+*******************************
+
+The service document enumerates the dataverses ("collections" from a SWORD perspective) the user can deposit data into. The "collectionPolicy" element for each dataverse contains the deposit terms of use for the network and dataverse.
+
+``curl https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/service-document``
+
+Create a study with an Atom entry (XML file)
+********************************************
+
+``curl --data-binary "@atom-entry-study.xml" -H "Content-Type: application/atom+xml" https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS``
+
+.. code-block:: guess
+
+        <?xml version="1.0"?>
+        <!--
+        modified from http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html#protocoloperations_editingcontent_metadata
+        -->
+        <entry xmlns="http://www.w3.org/2005/Atom"
+               xmlns:dcterms="http://purl.org/dc/terms/">
+           <!-- some embedded metadata -->
+           <dcterms:title>Roasting at Home</dcterms:title>
+           <dcterms:creator>Peets, John</dcterms:creator>
+           <dcterms:creator>Stumptown, Jane</dcterms:creator>
+           <!-- Producer with financial or admin responsibility of the data -->
+           <dcterms:publisher>Coffee Bean State University</dcterms:publisher>
+           <!-- related publications --> 
+           <dcterms:isReferencedBy holdingsURI="http://dx.doi.org/10.1038/dvn333" agency="DOI"
+               IDNo="10.1038/dvn333">Peets, J., &amp; Stumptown, J. (2013). Roasting at Home. New England Journal of Coffee, 3(1), 22-34.</dcterms:isReferencedBy>
+           <!-- production date -->
+           <dcterms:date>2013-07-11</dcterms:date>
+           <!-- Other Identifier for the data in this study (or potentially global id if unused) -->
+           <!--
+           <dcterms:identifier>hdl:1XXZY.1/XYXZ</dcterms:identifier>
+           -->
+           <dcterms:description>Considerations before you start roasting your own coffee at home.</dcterms:description>
+           <!-- keywords -->
+           <dcterms:subject>coffee</dcterms:subject>
+           <dcterms:subject>beverage</dcterms:subject>
+           <dcterms:subject>caffeine</dcterms:subject>
+           <!-- geographic coverage -->
+           <dcterms:coverage>United States</dcterms:coverage>
+           <dcterms:coverage>Canada</dcterms:coverage>
+           <!-- kind of data -->
+           <dcterms:type>aggregate data</dcterms:type>
+           <!-- List of sources of the data collection-->
+           <dcterms:source>Stumptown, Jane. 2011. Home Roasting. Coffeemill Press.</dcterms:source>
+           <!-- restrictions -->
+           <dcterms:rights>Creative Commons CC-BY 3.0 (unported) http://creativecommons.org/licenses/by/3.0/</dcterms:rights>
+           <!-- related materials -->
+           <dcterms:relation>Peets, John. 2010. Roasting Coffee at the Coffee Shop. Coffeemill Press</dcterms:relation>
+        </entry>
+        
+Dublin Core (DC) Qualified Mapping - DDI - Dataverse Network DB Element Crosswalk
+***********************************************************************************
+
++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+
+|DC (terms: namespace)        |                DVN DB Element                |        DDI Element 2.x         |                                                                    Note                                                                    |
++=============================+==============================================+================================+============================================================================================================================================+
+|dcterms:title                |                    title                     |         2.1.1.1 title          |                                                                                                                                            |
++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+
+|dcterms:creator              |         author (LastName, FirstName)         |        2.1.2.1 AuthEnty        |                                                                                                                                            |
++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+
+|dcterms:subject              |                   keyword                    |        2.2.1.1. keyword        |                                                                                                                                            |
++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+
+|dcterms:description          |                   abstract                   |         2.2.2 abstract         |                                     Describing the purpose, scope or nature of the data collection...                                      |
++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+
+|dcterms:publisher            |                   producer                   |        2.1.3.1 producer        |                                person or agency financially or administratively responsible for the dataset                                |
++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+
+|dcterms:contributor          |                     n/a                      |              n/a               |                                                         see dcterms:creator above                                                          |
++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+
+|dcterms:date                 |productionDate (YYYY-MM-DD or YYYY-MM or YYYY)|        2.1.3.3 prodDate        |                                                  production or published date of dataset                                                   |
++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+
+|dcterms:type                 |                  kindOfData                  |       2.2.3.10 dataKind        |                     Type of data included in the file: survey data, census/enumeration data, aggregate data, clinical                      |
++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+
+|dcterms:format               |                     n/a                      |              n/a               |                                                                                                                                            |
++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+
+|dcterms:identifier           |                   otherID                    |          2.1.1.5 IDNo          |                        Don't use this field to map a journal article ID. Only ID's that directly belong to dataset                         |
++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+
+|dcterms:source               |                 dataSources                  |       2.3.1.8.1 dataSrc        |                       List of books, articles, data files if any that served as the sources for the data collection                        |
++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+
+|dcterms:language             |                     n/a                      |              n/a               |                                                                                                                                            |
++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+
+|dcterms:relation             |               relatedMaterial                |          2.5.1 relMat          |                      any related material (journal article is not included here - see: dcterms:isReferencedBy below)                       |
++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+
+|dcterms:coverage             |              geographicCoverage              |       2.2.3.4 geogCover        |                                                Info on the geographic coverage of the data                                                 |
++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+
+|dcterms:rights               |                 restrictions                 |        2.4.2.3 restrctn        |                                            any restrictions on the access or use of the dataset                                            |
++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+
+|dcterms:bibliographicCitation|                 dataCitation                 |       ? (2.1.7 biblCit)        |                                            data citation for the study in the Dataverse Network                                            |
++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+
+|dcterms:isReferencedBy       |             studyRelPublications             |? (not set by DDI community yet)|the publication (journal article, book, other work) that uses this dataset (include citation, permanent identifier (DOI), and permanent URL)|
++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+
+
+        
+
+Add files to a study with a zip file
+************************************
+
+``curl --data-binary @example.zip -H "Content-Disposition: filename=example.zip" -H "Content-Type: application/zip" -H "Packaging: http://purl.org/net/sword/package/SimpleZip" https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/study/hdl:TEST/12345``
+
+Display a study atom entry
+**************************
+
+Contains data citation (bibliographicCitation), alternate URI [persistent URI of study], edit URI, edit media URI, statement URI.
+
+``curl https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/hdl:TEST/12345``
+
+Display a study statement
+*************************
+
+Contains feed of file entries, latestVersionState, locked boolean
+
+``curl https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/statement/study/hdl:TEST/12345``
+
+Delete a file by database id
+****************************
+
+``curl -i -X DELETE https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/file/2325541``
+
+Replacing cataloging information (title, author, etc.) for a study
+******************************************************************
+
+Please note that all cataloging information will be replaced, including fields that can not be expressed with "dcterms" fields.
+
+``curl --upload-file "atom-entry-study2.xml" -H "Content-Type: application/atom+xml" https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/hdl:TEST/12345``
+
+.. code-block:: guess
+
+        <?xml version="1.0"?>
+        <!--
+        for modifying a study created with atom-entry-study.xml
+        -->
+        <entry xmlns="http://www.w3.org/2005/Atom"
+               xmlns:dcterms="http://purl.org/dc/terms/">
+           <!-- some embedded metadata -->
+           <dcterms:title>The Levels of Caffeine in Cold Brew Coffee</dcterms:title>
+           <dcterms:creator>Peets, John L.</dcterms:creator>
+           <dcterms:creator>Stumptown Research Institute</dcterms:creator>
+           <dcterms:isReferencedBy holdingsURI="http://dx.doi.org/10.1038/dvn333" agency="DOI"
+               IDNo="10.1038/dvn333">Peets, J., &amp; Stumptown, J. (2013). Roasting at Home. New England Journal of Coffee, 3(1), 22-34.</dcterms:isReferencedBy>
+           <dcterms:date>2013-08-11</dcterms:date>
+           <dcterms:description>This study evaluates the caffeine levels of a cold brewed coffee.</dcterms:description>
+           <dcterms:subject>coffee bean</dcterms:subject>
+           <dcterms:subject>caffeine</dcterms:subject>
+           <dcterms:subject>cold brew process</dcterms:subject>
+           <dcterms:subject>Stumptown Coffee Company</dcterms:subject>
+           <dcterms:rights>Creative Commons CC-BY 3.0 (unported) http://creativecommons.org/licenses/by/3.0/</dcterms:rights>
+        </entry>
+
+List studies in a dataverse
+***************************
+
+``curl https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS``
+
+Delete a study (non-released studies only)
+******************************************
+
+``curl -i -X DELETE https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/hdl:TEST/12345``
+
+Deaccession a study (released studies only)
+****************************************************
+
+``curl -i -X DELETE https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/hdl:TEST/12345``
+
+Release a study
+***************
+
+``curl -X POST -H "In-Progress: false" --upload-file zero-length-file.txt https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/hdl:TEST/12345``
+
+Determine if a dataverse has been released 
+******************************************
+
+Look for a `dataverseHasBeenReleased` boolean.
+
+``curl https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS``
+
+`curl` reference
+----------------
+
+Per http://curl.haxx.se/docs/manpage.html 
+
+* `--upload-file` is an HTTP `PUT`
+* `--data-binary` is an HTTP `POST`
+
+DVN Data Deposit API v1 client sample code (Python)
+===================================================
+
+https://github.com/dvn/swordpoc/tree/master/dvn_client contains sample Python code for writing a DVN Data Deposit API v1 client. It makes use of a Python client library which conforms to the SWORDv2 specification: https://github.com/swordapp/python-client-sword2
+
+SWORDv2 client libraries
+========================
+
+* Python: https://github.com/swordapp/python-client-sword2
+* Java: https://github.com/swordapp/JavaClient2.0
+* Ruby: https://github.com/swordapp/sword2ruby
+* PHP: https://github.com/swordapp/swordappv2-php-library
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/_sources/dataverse-developer-main.txt	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,688 @@
+====================================
+DVN Developers Guide
+====================================
+
+Please note: This guide was updated in October 2013 to reflex the switch
+from Ant to Maven in DVN 3.6.1.
+
+Build Environment (Configuring NetBeans)
+++++++++++++++++++++++++++++++++++++++++
+
+This chapter describes setting up the build environment that you will
+need to build the DVN application from source code. 
+
+Install NetBeans and GlassFish
+==============================
+
+As of DVN version 3.6.1 and the switch to Maven, a DVN development
+environment should not have any dependency on a particular IDE, but use
+of NetBeans 7.2.1 is encouraged because it's the version used by most of
+the current developers (on Mac OS X).
+
+The NetBeans project is currently offering an installer bundle that
+contains both NetBeans 7.2.1 and a supported version of GlassFish
+(3.1.2.2). If they choose to discontinue the bundle, you will have to
+download and install the two packages separately. Note that you can have
+multiple versions of both NetBeans and GlassFish on your system.
+
+Please note: While we intend to investigate NetBeans 7.4 and GlassFish
+4, these are not yet known to provide a suitable development
+environment.
+
+We strongly recommend that you run both installs **as a regular user**. There's no reason to run your development environment as root.
+
+Install NetBeans bundle
+-----------------------
+
+Download NetBeans 7.2.1 Java EE + GlassFish Open Source Edition 3.1.2.2
+bundle from https://netbeans.org/downloads/7.2.1
+
+For Mac OS X, you will download a .dmg disk image that will open
+automatically and start the installer for you. Choose the typical
+installation but be sure to install GlassFish and JUnit when prompted.
+
+Note that you don't have to uninstall your existing NetBeans version.
+You can have as many versions installed as you need in parallel.
+
+When you start NetBeans 7.2.1 for the first time, you will be asked if
+you want to import the settings from the previous installations. If you
+have an existing, pre-DVN 3.\* development environment on your system, 
+**answer "no" -- we want to create the new configuration from scratch.**
+
+[If you have to] Install GlassFish 3.1.2.2
+------------------------------------------
+
+We **strongly** recommend that you install GlassFish Server 3.1.2.2,
+Open Source Edition, **Full Platform**. If you have to install it
+separately from NetBeans, it can be obtained from
+http://glassfish.java.net/downloads/3.1.2.2-final.html
+
+The page above contains a link to the installation instructions, but the
+process is very straightforward - just download and run the installer.
+
+It is strongly recommended that you use Sun/Oracle Java JDK version 1.6.
+Please make sure you have the newest (or at least, recent) build number
+available for your platform. (On Mac OS X 10.8, since the JDK can be
+installed as part of OS distribution, the version currently provided by
+Apple should be sufficient). In other words, we do not recommend
+building DVN under JDK 1.7 until the ticket regarding the move from Java
+6 to 7 has been closed: https://redmine.hmdc.harvard.edu/issues/3306
+
+Note that you don't have to uninstall older versions of GlassFish you
+may still have around. It's ok to have multiple versions installed. But
+make sure you have the 3.1.2.2 installation selected as the active
+server in NetBeans.
+
+**Important:** During the installation, leave the admin password fields
+blank. This is not a security risk since out of the box, GlassFish
+3.1.2.2 will only be accepting admin connections on the localhost
+interface. Choosing a password at this stage, however, will complicate
+the installation process unnecessarily. Since this is a development
+system, you can probably keep this configuration unchanged (admin on
+localhost only). If you need to be able to connect to the admin console
+remotely, please see the note in the Appendix section of the main
+Installers Guide.
+
+Install JUnit (if you haven't already)
+--------------------------------------
+
+Depending on how you installed NetBeans, you might already have JUnit
+installed. JUnit can be installed from Tools -> Plugins.
+
+Check out a new copy of the DVN source tree
+===========================================
+
+Create a GitHub account [if you don't have one already]
+-------------------------------------------------------
+
+Sign up at https://github.com
+
+Please note that primary audience of this guide (for now) is people who
+have push access to https://github.com/IQSS/dvn . If you do not have
+push access and want to contribute (and we hope you do!) please fork the
+repo per https://help.github.com/articles/fork-a-repo and make
+adjustments below when cloning the repo.
+
+Set up an ssh keypair (if you haven't already)
+-----------------------------------------------------
+
+You *can* use git with passwords over HTTPS but it's much nicer to set
+up SSH keys.
+
+https://github.com/settings/ssh is the place to manage the ssh keys
+GitHub knows about for you. That page also links to a nice howto:
+https://help.github.com/articles/generating-ssh-keys
+
+From the terminal, ``ssh-keygen`` will create new ssh keys for you:
+
+-  private key: ``~/.ssh/id_rsa``
+
+   -  It is **very important to protect your private key**. If someone
+      else acquires it, they can access private repositories on GitHub
+      and make commits as you! Ideally, you'll store your ssh keys on an
+      encrypted volume and protect your private key with a password when
+      prompted for one by ``ssh-keygen``. See also "Why do passphrases
+      matter" at https://help.github.com/articles/generating-ssh-keys
+
+-  public key: ``~/.ssh/id_rsa.pub``
+
+After you've created your ssh keys, add the public key to your GitHub
+account.
+
+Clone the repo
+--------------
+
+Please see `branches <#branches>`__ for detail, but in short, the
+"develop" branch is where new commits go. Below we will assume you want
+to make commits to "develop".
+
+In NetBeans, click Team, then Git, then Clone.
+
+Remote Repository
+*****************
+
+-  Repository URL: ``github.com:IQSS/dvn.git``
+-  Username: ``git``
+-  Private/Public Key
+
+   -  Private Key File: ``/Users/[YOUR_USERNAME]/.ssh/id_rsa``
+
+-  Passphrase: (the passphrase you chose while running ``ssh-keygen``)
+
+Click Next.
+
+If you are prompted about the authenticity of github.com's RSA key fingerprint, answer "Yes" to continue connecting. GitHub's RSA key fingerprint is listed at https://help.github.com/articles/generating-ssh-keys
+
+Remote Branches
+***************
+
+Under Select Remote Branches check the "develop" branch.
+
+Please note: You may see other branches listed, such as "master", but
+there is no need to check them out at this time.
+
+Click Next.
+
+Destination Directory
+*********************
+
+The defaults should be fine:
+
+-  Parent Directory: ``/Users/[YOUR_USERNAME]/NetBeansProjects``
+-  Clone Name: ``dvn``
+-  Checkout Branch: ``develop*``
+-  Remote Name: ``origin``
+
+Click Finish.
+
+You should see a message that 3 projects were cloned. Click "Open
+Project".
+
+Open Projects
+=============
+
+In the "Open Projects" dialog you should see three projects, DVN-lockss,
+DVN-root, and DVN-web (a child of DVN-root).
+
+Highlight DVN-root and check "Open Required" (to include DVN-web) and click "Open".
+
+At this point, you should have two (and only two) projects open in
+NetBeans: DVN-root and DVN-web. If you hover over the projects, it's
+normal at this point to see warnings such as "Some dependency artifacts
+are not in the local repository" or "Cannot find application server:
+GlassFish Server 3+". We'll correct these next.
+
+Build for the first time
+========================
+
+In NetBeans, right-click DVN-root and click "Build". This will download
+many dependencies via Maven and may take several minutes.
+
+When this process has completed, right-click DVN-web and click "Build".
+You should expect to see "BUILD SUCCESS". This means you have
+successfully built the .war application package, but do not attempt to
+deploy the application just yet! We need to configure the server
+environment first, which consists of GlassFish and PostgreSQL
+
+Application Environment (Configuring GlassFish and PostgreSQL)
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+In this chapter, we describe the process of setting up your own local
+application environment into which you will deploy the DVN application. 
+
+Install PostgreSQL database server 
+==================================
+
+For Mac OS X (our default development OS), you can get the installer
+from http://www.postgresql.org/download/macosx
+
+The installation is very straightforward; just make sure you answer
+"yes" when asked if Postgres should be accepting network connections.
+(The application will be accessing the database at the "localhost"
+address). 
+
+Once installed, we recommend that you also allow connections
+over local Unix sockets. This way the installer won't have to ask you
+for the Postgres password every time it needs to talk to the database.
+To do so, modify the "local all all" line in the data/pg\_hba.conf file
+to look like this:
+
+| local all all trust
+
+**Note** that this only opens Postgres to the local socket connections,
+and should not be considered a security risk. But if you are extra
+cautious, you may use instead:
+
+| local all all ident sameuser
+
+Restart Postgres for the changes to take effect!
+
+Please note: if you have any problems with the PostgreSQL setup, please
+ensure the right ``psql`` is in your ``$PATH``.
+
+You can check the instructions in the main Installers Guide for more info:
+:ref:`PostgreSQL section<postgresql>`;
+but the above should be sufficient to get your environment set up.
+
+Run the install-dev script
+==========================
+
+The installer is supplied with the DVN source in the tools directory.
+You must run it as root (for direct access to Postgres).
+
+| To run the script:
+| ``sudo su -``
+| ``cd /Users/[YOUR_USERNAME]/NetBeansProjects/dvn/tools/installer/dvninstall``
+
+| then execute
+| ``./install-dev``
+
+When prompted for various settings, you will likely be able to accept
+all the default values (in a development environment, they are for the
+most part the same for everybody).
+
+Testing login
+=============
+
+Once the ``install-dev`` script has completed successfully, you will
+have a fully functional Dataverse Network server. After making sure
+GlassFish has been started per the output of the script, you should be
+able to log in DVN with these credentials:
+
+- http://localhost:8080/dvn/
+- username: networkAdmin
+- password: networkAdmin
+
+Please note that when deploying from NetBeans for the first time, you
+will be prompted to select a deployment server. From the drop down,
+select "GlassFish Server 3.1.2", click "Remember in Current IDE Session"
+and click "OK". 
+
+Developing with Git
+++++++++++++++++
+
+
+.. _commit:
+
+Commit
+==================
+
+**Committing Changes**
+
+By following the instructions in the :ref:`build <build>` step, you
+should be in the "develop" branch, which is where we want to make
+commits as we work toward the next release.
+
+You can verify which branch you are on by clicking Team then "Repository
+Browser".
+
+You should see ``dvn [develop]`` at the root of the tree and **develop**
+in bold under Branches -> Local
+
+Click Team, then "Show Changes". Select the desired files and
+right-click to commit.
+
+To publish your changes on GitHub, you'll need to follow the next step:
+:ref:`push <push>`.
+
+.. _push:
+
+Push
+===========
+
+**Pushing your commits to GitHub**
+
+After making your :ref:`commit <commit>`, push it to GitHub by clicking
+Team -> Remote -> Push, then Next (to use your configured remote
+repository), then checking **develop** and Finish.
+
+Your commit should now appear on GitHub in the develop branch:
+https://github.com/IQSS/dvn/commits/develop
+
+Your commit should **not** appear in the master branch on GitHub:
+https://github.com/IQSS/dvn/commits/master . Not yet anyway. We only
+merge commits into master when we are ready to release.  Please see the
+`branches <#branches>`__ section for for detail.
+
+
+Release
+============
+
+Merge develop into master
+--------------------------------------
+
+Tag the release
+***************************
+
+Here is an example of how the 3.4 tag (
+`https://github.com/IQSS/dvn/tree/3.4 <https://github.com/IQSS/dvn/tree/3.4>`__) was created and pushed to GitHub:
+
+.. code-block:: guess
+
+    murphy:dvn pdurbin$ git branch
+    * develop
+      master
+    murphy:dvn pdurbin$ git pull
+    Already up-to-date.
+    murphy:dvn pdurbin$ git checkout master
+    Switched to branch 'master'
+    murphy:dvn pdurbin$ git merge develop
+    Updating fdbfe57..6ceb24f
+    (snip)
+     create mode 100644 tools/installer/dvninstall/readme.md
+    murphy:dvn pdurbin$ git tag
+    3.3
+    murphy:dvn pdurbin$ git tag -a 3.4 -m 'merged develop, tagging master as 3.4'
+    murphy:dvn pdurbin$ git tag
+    3.3
+    3.4
+    murphy:dvn pdurbin$ git push origin 3.4
+    Counting objects: 1, done.
+    Writing objects: 100% (1/1), 182 bytes, done.
+    Total 1 (delta 0), reused 0 (delta 0)
+    To git@github.com:IQSS/dvn.git
+     * [new tag]         3.4 -> 3.4
+    murphy:dvn pdurbin$ 
+    murphy:dvn pdurbin$ git push origin master
+    Total 0 (delta 0), reused 0 (delta 0)
+    To git@github.com:IQSS/dvn.git
+       fdbfe57..6ceb24f  master -> master
+    murphy:dvn pdurbin$ 
+
+Make release available for download
+******************************************************
+
+On dvn-build:
+
+.. code-block:: guess
+
+    cd tools/installer
+    make installer
+
+Rename the resulting "dvninstall.zip" to include the release number
+(i.e. "dvninstall\_v3\_4.zip") and upload it, the separate war file, a
+readme, and a buildupdate script (all these files should include the
+release number) to SourceForge (i.e.
+`http://sourceforge.net/projects/dvn/files/dvn/3.4/ <http://sourceforge.net/projects/dvn/files/dvn/3.4/>`__).
+
+Increment the version number
+*******************************************************
+
+The file to edit is:
+
+| `https://github.com/IQSS/dvn/blob/develop/src/DVN-web/src/VersionNumber.properties <https://github.com/IQSS/dvn/blob/develop/src/DVN-web/sr/VersionNumber.properties>`__
+
+Branches
+===========
+
+Current list of branches
+-------------------------------------
+
+`https://github.com/IQSS/dvn/branches <https://github.com/IQSS/dvn/branches>`__
+
+New branching model: develop vs. master
+-------------------------------------------------
+
+Please note that with the move to git, we are adopting the branching
+model described at
+`http://nvie.com/posts/a-successful-git-branching-model/ <http://nvie.com/posts/a-successful-git-branching-model/>`__
+
+In this branching model there are two persistent branches:
+
+-  develop: where all new commits go
+-  master: where code gets merged and tagged as a release
+
+That is to say, **please make your commits on the develop branch, not
+the master branch**.
+
+Feature branches
+------------------------
+
+    "The essence of a feature branch is that it exists as long as the
+    feature is in development, but will eventually be merged back into
+    develop (to definitely add the new feature to the upcoming release)
+    or discarded (in case of a disappointing experiment)." --
+    `http://nvie.com/posts/a-successful-git-branching-model/ <http://nvie.com/posts/a-successful-git-branching-model/>`__
+
+Example feature branch: 2656-lucene
+---------------------------------------------------
+
+First, we create the branch and check it out:
+
+::
+
+    murphy:dvn pdurbin$ git branch
+      2656-solr
+    * develop
+    murphy:dvn pdurbin$ git branch 2656-lucene
+    murphy:dvn pdurbin$ 
+    murphy:dvn pdurbin$ git branch
+      2656-lucene
+      2656-solr
+    * develop
+    murphy:dvn pdurbin$ git checkout 2656-lucene
+    Switched to branch '2656-lucene'
+    murphy:dvn pdurbin$ 
+    murphy:dvn pdurbin$ git status
+    # On branch 2656-lucene
+    nothing to commit (working directory clean)
+    murphy:dvn pdurbin$ 
+
+| Then, we make a change and a commit, and push it to:
+
+| `https://github.com/iqss/dvn/tree/2656-lucene <https://github.com/iqss/dvn/tree/2656-lucene>`__ (creating a new remote branch):
+
+
+::
+
+    murphy:dvn pdurbin$ vim src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/Indexer.java
+    murphy:dvn pdurbin$ 
+    murphy:dvn pdurbin$ git commit -m 'start lucene faceting branch' src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/Indexer.java
+    [2656-lucene 3b82f88] start lucene faceting branch
+     1 file changed, 73 insertions(+), 2 deletions(-)
+    murphy:dvn pdurbin$ 
+    murphy:dvn pdurbin$ git push origin 2656-lucene
+    Counting objects: 25, done.
+    Delta compression using up to 8 threads.
+    Compressing objects: 100% (10/10), done.
+    Writing objects: 100% (13/13), 2.23 KiB, done.
+    Total 13 (delta 6), reused 0 (delta 0)
+    To git@github.com:IQSS/dvn.git
+     * [new branch]      2656-lucene -> 2656-lucene
+    murphy:dvn pdurbin$ 
+
+| 
+
+As we work on the feature branch, we merge the latest changes from
+"develop". We want to resolve conflicts in the feature branch itself so
+that the feature branch will merge cleanly into "develop" when we're
+ready. In the example below, we use ``git mergetool`` and ``opendiff``
+to resolve conflicts and save the merge. Then we push the newly-merged
+2656-lucene feature branch to GitHub:
+
+| 
+
+::
+
+    murphy:dvn pdurbin$ git branch
+    * 2656-lucene
+      2656-solr
+      develop
+    murphy:dvn pdurbin$ git checkout develop
+    murphy:dvn pdurbin$ git branch
+      2656-lucene
+      2656-solr
+    * develop
+    murphy:dvn pdurbin$ git pull
+    remote: Counting objects: 206, done.
+    remote: Compressing objects: 100% (43/43), done.
+    remote: Total 120 (delta 70), reused 96 (delta 46)
+    Receiving objects: 100% (120/120), 17.65 KiB, done.
+    Resolving deltas: 100% (70/70), completed with 40 local objects.
+    From github.com:IQSS/dvn
+       8fd223d..9967413  develop    -> origin/develop
+    Updating 8fd223d..9967413
+    Fast-forward
+     .../admin/EditNetworkPrivilegesServiceBean.java  |    5 +-
+    (snip)
+     src/DVN-web/web/study/StudyFilesFragment.xhtml   |    2 +-
+     12 files changed, 203 insertions(+), 118 deletions(-)
+    murphy:dvn pdurbin$ murphy:dvn pdurbin$ git pull
+    remote: Counting objects: 206, done.
+    remote: Compressing objects: 100% (43/43), done.
+    remote: Total 120 (delta 70), reused 96 (delta 46)
+    Receiving objects: 100% (120/120), 17.65 KiB, done.
+    Resolving deltas: 100% (70/70), completed with 40 local objects.
+    From github.com:IQSS/dvn
+       8fd223d..9967413  develop    -> origin/develop
+    Updating 8fd223d..9967413
+    Fast-forward
+     .../admin/EditNetworkPrivilegesServiceBean.java  |    5 +-
+    (snip)
+     .../harvard/iq/dvn/core/web/study/StudyUI.java   |    2 +-
+     src/DVN-web/web/HomePage.xhtml                   |    5 +-
+    murphy:dvn pdurbin$ 
+    murphy:dvn pdurbin$ git checkout 2656-lucene
+    Switched to branch '2656-lucene'
+    murphy:dvn pdurbin$ 
+    murphy:dvn pdurbin$ 
+    murphy:dvn pdurbin$ git merge develop
+    Auto-merging src/DVN-web/web/BasicSearchFragment.xhtml
+    CONFLICT (content): Merge conflict in src/DVN-web/web/BasicSearchFragment.xhtml
+    Auto-merging src/DVN-web/src/edu/harvard/iq/dvn/core/web/BasicSearchFragment.java
+    Auto-merging src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/Indexer.java
+    Automatic merge failed; fix conflicts and then commit the result.
+    murphy:dvn pdurbin$ 
+    murphy:dvn pdurbin$ git status
+    # On branch 2656-lucene
+    # Changes to be committed:
+    #
+    #       modified:   src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/admin/EditNetworkPrivilegesServiceBean.java
+    (snip)
+    #       new file:   src/DVN-web/web/admin/ChooseDataverseForCreateStudy.xhtml
+    #       modified:   src/DVN-web/web/study/StudyFilesFragment.xhtml
+    #
+    # Unmerged paths:
+    #   (use "git add/rm <file>..." as appropriate to mark resolution)
+    #
+    #       both modified:      src/DVN-web/web/BasicSearchFragment.xhtml
+    #
+    murphy:dvn pdurbin$ git mergetool
+    merge tool candidates: opendiff kdiff3 tkdiff xxdiff meld tortoisemerge gvimdiff diffuse ecmerge p4merge araxis bc3 emerge vimdiff
+    Merging:
+    src/DVN-web/web/BasicSearchFragment.xhtml
+
+    Normal merge conflict for 'src/DVN-web/web/BasicSearchFragment.xhtml':
+      {local}: modified file
+      {remote}: modified file
+    Hit return to start merge resolution tool (opendiff):
+    murphy:dvn pdurbin$ 
+    murphy:dvn pdurbin$ git add .
+    murphy:dvn pdurbin$ 
+    murphy:dvn pdurbin$ git commit -m "Merge branch 'develop' into 2656-lucene"
+    [2656-lucene 519cd8c] Merge branch 'develop' into 2656-lucene
+    murphy:dvn pdurbin$ 
+    murphy:dvn pdurbin$ git push origin 2656-lucene
+    (snip)
+    murphy:dvn pdurbin$ 
+
+
+| When we are ready to merge the feature branch back into the develop branch, we can do so.
+
+| Here's an example of merging the 2656-lucene branch back into develop:
+
+::
+
+    murphy:dvn pdurbin$ git checkout 2656-lucene
+    Switched to branch '2656-lucene'
+    murphy:dvn pdurbin$ git pull
+    Already up-to-date.
+    murphy:dvn pdurbin$ git checkout develop
+    Switched to branch 'develop'
+    murphy:dvn pdurbin$ git pull
+    Already up-to-date.
+    murphy:dvn pdurbin$ git merge 2656-lucene
+    Removing lib/dvn-lib-EJB/lucene-core-3.0.0.jar
+    Merge made by the 'recursive' strategy.
+     lib/dvn-lib-EJB/lucene-core-3.0.0.jar                                     |  Bin 1021623 -> 0 bytes
+     lib/dvn-lib-EJB/lucene-core-3.5.0.jar                                     |  Bin 0 -> 1466301 bytes
+     lib/dvn-lib-EJB/lucene-facet-3.5.0.jar                                    |  Bin 0 -> 293582 bytes
+     src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/DvnQuery.java          |  160 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+     src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/IndexServiceBean.java  |   56 ++++++++++++++++++++
+     src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/IndexServiceLocal.java |   16 +++++-
+     src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/Indexer.java           |  432 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++--
+     src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/ResultsWithFacets.java |   71 +++++++++++++++++++++++++
+     src/DVN-web/src/SearchFieldBundle.properties                              |    4 +-
+     src/DVN-web/src/edu/harvard/iq/dvn/core/web/AdvSearchPage.java            |   86 +++++++++++++++++++++++++++++++
+     src/DVN-web/src/edu/harvard/iq/dvn/core/web/BasicSearchFragment.java      |  102 +++++++++++++++++++++++++++++++++++-
+     src/DVN-web/src/edu/harvard/iq/dvn/core/web/StudyListing.java             |   11 ++++
+     src/DVN-web/src/edu/harvard/iq/dvn/core/web/StudyListingPage.java         |  428 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++-
+     src/DVN-web/src/edu/harvard/iq/dvn/core/web/study/FacetResultUI.java      |   42 +++++++++++++++
+     src/DVN-web/src/edu/harvard/iq/dvn/core/web/study/FacetUI.java            |   62 ++++++++++++++++++++++
+     src/DVN-web/web/AdvSearchPage.xhtml                                       |    3 +-
+     src/DVN-web/web/BasicSearchFragment.xhtml                                 |    9 ++--
+     src/DVN-web/web/StudyListingPage.xhtml                                    |   43 +++++++++++-----
+     18 files changed, 1500 insertions(+), 25 deletions(-)
+     delete mode 100644 lib/dvn-lib-EJB/lucene-core-3.0.0.jar
+     create mode 100644 lib/dvn-lib-EJB/lucene-core-3.5.0.jar
+     create mode 100644 lib/dvn-lib-EJB/lucene-facet-3.5.0.jar
+     create mode 100644 src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/DvnQuery.java
+     create mode 100644 src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/ResultsWithFacets.java
+     create mode 100644 src/DVN-web/src/edu/harvard/iq/dvn/core/web/study/FacetResultUI.java
+     create mode 100644 src/DVN-web/src/edu/harvard/iq/dvn/core/web/study/FacetUI.java
+    murphy:dvn pdurbin$ 
+    murphy:dvn pdurbin$ git status
+    # On branch develop
+    # Your branch is ahead of 'origin/develop' by 68 commits.
+    #
+    nothing to commit (working directory clean)
+    murphy:dvn pdurbin$ 
+    murphy:dvn pdurbin$ git push
+    Counting objects: 51, done.
+    Delta compression using up to 8 threads.
+    Compressing objects: 100% (12/12), done.
+    Writing objects: 100% (19/19), 1.41 KiB, done.
+    Total 19 (delta 7), reused 0 (delta 0)
+    To git@github.com:IQSS/dvn.git
+       b7fae01..2b88b68  develop -> develop
+    murphy:dvn pdurbin$ 
+
+Switching to the master branch to merge commits from the develop branch
+-------------------------------------------------------------------------------------------------------
+
+We should really only need to switch from the develop branch to the
+master branch as we prepare for a release.
+
+First, we check out the master branch by clicking Team -> Git -> Branch
+-> Switch to Branch.
+
+Change Branch to "origin/master" and check the box for "Checkout as New
+Branch" and fill in "master" as the "Branch Name" to match the name of
+the branch we're switching to. Then click "Switch".
+
+Now, in the Git Repository Browser (from Team -> Repository Browser) the
+root of the tree should say ``dvn [master]`` and you should see two
+branches under Branches -> Local. **master** should be in bold and
+develop should not.
+
+Tips
+=========
+
+Previewing changes before a pull
+--------------------------------
+
+If the build fails overnight you may want to hold off on doing a pull
+until the problem is resolved. To preview what has changed since your
+last pull, you can do a ``git fetch`` (the first part of a pull) then
+``git log HEAD..origin/develop`` to see the commit messages.
+``git log -p`` or ``git diff`` will allow you to see the contents of the
+changes:
+
+::
+
+    git checkout develop
+    git fetch
+    git log HEAD..origin/develop
+    git log -p HEAD..origin/develop
+    git diff HEAD..origin/develop
+
+After the build is working again, you can simply do a pull as normal.
+
+Errors
+===========
+
+Duplicate class
+---------------
+
+The error "duplicate class" can result whenever you resolve a merge
+conflict in git.
+
+The fix is to close NetBeans and delete (or move aside) the cache like
+this:
+
+::
+
+    cd ~/Library/Caches/NetBeans
+    mv 7.2.1 7.2.1.moved
+
+According to https://netbeans.org/bugzilla/show_bug.cgi?id=197983 this might be fixed in NetBeans 7.3.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/_sources/dataverse-installer-main.txt	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,1090 @@
+====================================
+Installers Guide
+====================================
+
+.. _introduction:
+
+**Introduction**
+
+This is our "new and improved" installation guide, it was first
+released with the Dataverse Network application versions 2.2.4, when we
+introduced the new, automated and much simplified installation process.
+As of February 2012, it has been updated to reflect the changes made in
+the newly released version 3.0 of the software. (Our existing users will
+notice however, that the changes in the installation process have been
+fairly minimal).
+
+The guide is intended for anyone who needs to install the DVN app,
+developers and Dataverse Network administrators alike.
+
+The top-down organization of the chapters and sections is that of
+increasing complexity. First a very basic, simple installation scenario
+is presented. The instructions are straightforward and only the required
+components are discussed. This use case will in fact be sufficient for
+most DVN developers and many Dataverse Network administrators. Chances
+are you are one of such users, so if brave by nature, you may stop
+reading this section and go straight to the :ref:`“Quick Install” <quick-install>`   chapter.
+
+The “basic” installation process described in the first chapter is
+fully automated, everything is performed by a single interactive script.
+This process has its limitations. It will likely work only on the
+supported platforms. Optional components need to be configured  outside
+of the Installer (these are described in the "Optional Components"
+section).
+
+For an advanced user, we provide the detailed explanations of all the
+steps performed by the Installer. This way he or she can experiment with
+individual configuration options, having maximum flexibility and control
+over the process. Yet we tried to organize the advanced information in
+such a way that those who only need the most basic instructions would
+not have to read through it unnecessarily.  Instead we provide them with
+an easy way to get a bare-bones configuration of the DVN up and running.
+
+If you are interested in practicing a DVN installation in a Vagrant
+environment you can later throw away, please follow the instructions at
+https://github.com/dvn/dvn-install-demo to spin up a Linux virtual
+machine on your laptop with ``vagrant up``. When you are finished with
+this temporary DVN installation, you can delete the virtual machine with
+``vagrant destroy``.
+
+If you encounter any problems during installation, please contact the
+development team
+at `support@thedata.org <mailto:support@thedata.org>`__
+or our `Dataverse Users
+Community <https://groups.google.com/forum/?fromgroups#!forum/dataverse-community>`__.
+
+.. _quick-install:
+
+Quick Install
+++++++++++++++++++++++
+
+For an experienced and/or rather bold user, this is a 1
+paragraph version of the installation instructions: 
+
+This should work on RedHat and its derivatives, and MacOS X. If this
+does not describe your case, you will very likely have to install and
+configure at least some of the components manually. Meaning, you may
+consider reading through the chapters that follow! Still here? Great.
+Prerequisites: Sun/Oracle Java JDK 1.6\_31+ and a “virgin” installation
+of Glassfish v3.1.2; PostgreSQL v8.3+, configured to listen to network
+connections and support password authentication on the localhost
+interface; you may need R as well. See the corresponding sections under
+“2. Prerequisites”, if necessary. Download the installer package from
+SourceForge:
+
+`http://sourceforge.net/projects/dvn/files/dvn <http://sourceforge.net/projects/dvn/files/dvn>`__
+
+Choose the latest version and download the dvninstall zip file.
+
+Unzip the package in a temp location of your choice (this will create
+the directory ``dvninstall``). Run the installer, as root: 
+
+          ``cd dvninstall``
+           ./ ``install``
+
+Follow the installation prompts. If it all works as it should, you
+will have a working DVN instance running in about a minute from now.
+
+Has it worked? Awesome! Now you may read the rest of the guide
+chapters at your own leisurely pace, to see if you need any of the
+optional components described there. And/or if you want to understand
+what exactly has just been done to your system.
+
+SYSTEM REQUIREMENTS
+++++++++++++++++++++++++++++++++++
+
+Or rather, recommendations. The closer your configuration is to what’s
+outlined below, the easier it will be for the DVN team to provide
+support and answer your questions.
+
+-  Operating system - The production version of the Dataverse Network at
+   IQSS (dvn.iq.harvard.edu) runs on RedHat Linux 5. Most of the DVN
+   development is currently done on MacOS X. Because of our experience
+   with RedHat and MacOS X these  are the recommended platforms. You
+   should be able to deploy the application .ear file on any other
+   platform that supports Java. However, the automated installer we
+   provide will likely work on RedHat and MacOS only. Some information
+   provided in this guide is specific to these 2 operating systems. (Any
+   OS-specific instructions/examples will be clearly marked, for
+   example:\ ``[MacOS-specific:]``)
+
+-  CPU - The production IQSS Dataverse Network runs on generic,
+   multi-core 64-bit processors. 
+
+-  Memory - The application servers currently in production at the IQSS
+   have 64 GB of memory each.  Development and testing systems require a
+   minimum of 2 gigabyte of memory.
+
+-  Disk space - How much disk space is required depends on the amount of
+   data that you expect to serve. The IQSS Dataverse Network file system
+   is a standalone NetApp with 2 TB volume dedicated to the DVN data.
+
+-  Multiple servers – All the DVN components can run on the same server.
+   On a busy, hard-working production network the load can be split
+   across multiple servers. The 3 main components, the application
+   server (Glassfish), the database (Postgres) and R can each run on its
+   own host. Furthermore, multiple application servers sharing the same
+   database and R server(s)  can be set up behind a load balancer.
+   Developers would normally run Glassfish and Postgres on their
+   workstations locally and use a shared R server.
+
+-  If it actually becomes a practical necessity to bring up more servers
+   to handle your production load, there are no universal instructions
+   on how to best spread it across extra CPUs. It will depend on the
+   specifics of your site, the nature of the data you serve and the
+   needs of your users, whether you’ll benefit most from dedicating
+   another server to run the database, or to serve R requests. Please
+   see the discussion in the corresponding sections of the Prerequisites
+   chapter.
+
+.. _prerequisites:
+
+PREREQUISITES
+++++++++++++++++++++++++++
+
+In this chapter, an emphasis is made on clearly identifying those
+components that are absolutely required for  every installation and
+marking any advanced, optional instructions as such.
+
+Glassfish
+=======================
+
+Version 3.1.2 is required.
+
+Make sure you have **Sun/Oracle**\ **Java JDK version 1.6, build 31**
+or newer\. It is available from
+`http://www.oracle.com/technetwork/java/javase/downloads/index.html <http://www.oracle.com/technetwork/java/javase/downloads/index.html>`__.
+
+
+**[note for developers:]**
+
+If you are doing this installation as part of your DVN software
+development setup: The version of NetBeans currently in use by the DVN
+team is 7.0.1, and it is recommended that you use this same version if
+you want to participate in the development. As of writing of this
+manual, NetBeans 7.0.1 installer bundle comes with an older version of
+Glassfish. So you will have to install Glassfish version 3.1.2
+separately, and then select it as the default server for your NetBeans
+project.
+
+**[/note for developers]**
+
+We **strongly** recommend that you install GlassFish Server 3.1.2,
+Open Source Edition, **Full Platform**. You are very likely to run into
+installation issues if you attempt to run the installer and get the
+application to work with a different version! Simply transitioning from
+3.1.1 to 3.1.2 turned out to be a surprisingly complex undertaking,
+hence this recommendation to all other installers and developers to stay
+with the same version.
+
+It can be obtained from
+
+`http://glassfish.java.net/downloads/3.1.2-final.html <http://glassfish.java.net/downloads/3.1.2-final.html>`__
+
+The page contains a link to the installation instructions. However,
+the process is completely straightforward. You are given 2 options for
+the format of the installer package. We recommend that you choose to
+download it as a shell archive; you will need to change its executable
+permission, with **chmod +x**, and then run it, as root:
+
+./**installer-filename.sh**
+
+[**Important:]**
+
+Leave the admin password fields blank. This is not a security risk,
+since out of the box, Glassfish will only be accepting admin connections
+on the localhost interface. Choosing password at this stage however will
+complicate the installation process unnecessarily\ **.**\ If this is a
+developers installation, you can probably keep this configuration
+unchanged (admin on localhost only). If you need to be able to connect
+to the admin console remotely, please see the note in the Appendix
+section of the manual.
+
+**[/Important]**
+
+| **[Advanced:]**
+| **[Unix-specific:`]**
+
+The installer shell script will normally attempt to run in a graphic
+mode. If you are installing this on a remote Unix server, this will
+require X Windows support on your local workstation. If for whatever
+reason it's not available, you have an option of running it in a *silent
+mode* - check the download page, above, for more information.
+
+| **[/Unix-specific]**
+| **[/Advanced]**
+
+.. _postgresql:
+
+PostgreSQL
+=======================
+
+| **Version 8.3 or higher is required.**
+| Installation instructions specific to RedHat Linux and MacOS X are
+| provided below.
+| Once the database server is installed, you'll need to configure access
+| control to suit your installation.
+| Note that any modifications to the configuration files above require you to restart Postgres:
+| ``service postgresql restart`` (RedHat)
+
+| or
+| "Restart Server" under Applications -> PostgreSQL (MacOS X)
+
+By default, most Postgres distributions are configured to listen to network connections on the localhost interface only; and to only support ident for authentication. (The MacOS installer may ask you if network connections should be allowed - answer "yes"). At a minimum, if GlassFish is running on the same host, it will also need to allow password authentication on localhost. So you will need to modify the "``host all all 127.0.0.1``\ " line in your ``/var/lib/pgsq1/data/pg_hba.conf`` so that it looks like this:
+
+|         ``host all all 127.0.0.1/32 password``
+
+Also, the installer script needs to have direct access to the local PostgresQL server via Unix domain sockets. So this needs to be set to either "trust" or "ident". I.e., your **pg\_hba.conf** must contain either of the 2 lines below:
+
+| **local   all  all   ident    sameuser**
+| or
+| **local   all  all  trust**
+
+("ident" is the default setting; but if it has been changed to
+"password" or "md5", etc. on your system, Postgres will keep prompting
+you for the master password throughout the installation)
+
+**[optional:]**
+
+If GlassFish will be accessing the database remotely, add or modify the following line in your ``<POSTGRES DIR>/data/postgresql.conf``:
+
+| ``listen_addresses='*'``
+
+to enable network connections on all interfaces; and add the following
+line to ``pg_hba.conf``:
+
+| host       all      all      ``[ADDRESS]      255.255.255.255 password``
+
+| where ``[ADDRESS]`` is the numeric IP address of the GlassFish server.
+| Using the subnet notation above you can enable authorization for multiple hosts on | your network. For example,
+
+| ``host all all 140.247.115.0 255.255.255.0 password``
+
+| will permit password-authenticated connections from all hosts on the ``140.247.115.*`` subnet.
+| **[/optional:]**
+
+| 
+| **[RedHat-specific:]**
+| **[Advanced:]**
+
+Please note that the instructions below are meant for users who have some experience with basic RedHat admin tasks. You should be safe to proceed if an instruction such as “uninstall the postgres rpms” makes sense to you immediately. I.e., if you already know how to install or uninstall an rpm package. Otherwise we recommend that you contact your systems administrator.
+
+For RedHat (and relatives), version 8.4 is now part of the distribution. As of RedHat 5, the default ``postgresql`` rpm is still version 8.1. So you may have to un-install the ``postgresql`` rpms, then get the ones for version 8.4:
+
+|         ``yum install postgresql84 postgresql84-server``
+
+Before you start the server for the first time with
+
+| ``service postgresql start``
+
+You will need to populate the initial database with
+
+
+| ``service postgresql initdb``
+
+
+| **[/advanced]**
+| **[/RedHat-specific]**
+
+
+**[MacOS-specific:]**
+
+
+Postgres Project provides a one click installer for Mac OS X 10.4 and
+above at
+`http://www.postgresql.org/download/macosx <http://www.postgresql.org/download/macosx>`__.
+Fink and MacPorts packages are also available.
+
+
+**[/MacOS-specific]`**
+
+
+| **[advanced:]**
+| **[optional:]**
+
+See the section :ref:`PostgresQL setup <postgresql-setup>` in the Appendix for the description of the steps that the automated installer takes to set up PostgresQL for use with the DVN.  
+
+| **[/optional]**
+| **[/advanced]**
+
+.. _r-and-rserve:
+
+R and RServe
+=======================
+
+Strictly speaking, R is an optional component. You can bring up a
+running DVN instance without it. The automated installer will allow such
+an installation, with a warning. Users of this Dataverse Network will be
+able to upload and share some data. Only the advanced modes of serving
+quantitative data to the users require R ``[style?]``. Please consult
+the :ref:`"Do you need R?" <do-you-need-r>` section in the Appendix for an extended discussion of this.
+
+
+| **Installation instructions:** 
+
+Install the latest version of R from your favorite CRAN mirror (refer to `http://cran.r-project.org/ <http://cran.r-project.org/>`__ for more information). Depending on your OS distribution, this may be as simple as typing
+
+| **[RedHat/Linux-specific:]**
+
+``yum install R R-devel``
+
+(for example, the above line will work in CentOS out of the box; in RedHat, you will have to add support for EPEL repository -- see
+`http://fedoraproject.org/wiki/EPEL <http://fedoraproject.org/wiki/EPEL>`__
+-- then run the ``yum install`` command)
+
+| **[/RedHat/Linux-specific]**
+
+Please make sure to install the "devel" package too! you will need it
+to build the extra R modules.
+
+Once you have R installed, download the package ``dvnextra.tar`` from this location:
+
+`http://dvn.iq.harvard.edu/dist/R/dvnextra.tar <http://dvn.iq.harvard.edu/dist/R/dvnextra.tar>`__
+
+Unpack the archive:
+
+``tar xvf dvnextra.tar``
+
+then run the supplied installation shell script as root:
+
+|  ``cd dvnextra``
+| ``./installModules.sh``
+
+This will install a number of R modules needed by the DVN to run statistics and analysis, some from CRAN and some supplied in the bundle; it will also configure Rserve to run locally on your system and install some startup files that the DVN will need.
+
+**Please note that the DVN application requires specific versions of the 3rd-party R packages. For example, if you obtain and install the version of Zelig package currently available from CRAN, it will not work with the application. This is why we distribute the sources of the correct versions in this tar package.**
+
+
+| **[advanced:]**
+| We haven’t had much experience with R on any platforms other than RedHat-and-the-like. Our developers use MacOS X, but point their DVN instances to a shared server running Rserve under RedHat.
+
+The R project ports their distribution to a wide range of platforms. However, the installer shell script above will only run on Unix; and is not really guaranteed to work on anything other than RedHat. If you have some experience with either R or system administration, you should be able to use the script as a guide to re-create the configuration steps on any other platform quite easily. You will, however, be entirely on your own while embarking on that adventure.
+**[/advanced]**
+
+
+
+System Configuration
+================================
+
+**[Advanced/optional:]**
+
+Many modern OS distributions come pre-configured so that all the
+network ports are firewalled off by default.
+
+Depending on the configuration of your server, you may need to open some
+of the following ports.
+
+On a developers personal workstation, the user would normally access his
+or her DVN instance on the localhost interface. So no open ports are
+required unless you want to give access to your DVN to another
+user/developer.
+
+When running a DVN that is meant to be accessible by network users: At a
+minimum, if all the components are running on the same server, the HTTP
+port 80 needs to be open. You may also want to open TCP 443, to be able
+to access Glassfish admin console remotely.
+
+If the DVN is running its own HANDLE.NET server (see Chapter 4.
+"Optional Components"), the TCP port 8000 and TCP/UDP ports 2641 are
+also needed.
+
+If the DVN application needs to talk to PostgreSQL and/or Rserve running
+on remote hosts, the TCP ports 5432 and 6311, respectively, need to be
+open there.
+
+**[/Advanced/optional]**
+
+
+
+RUNNING THE INSTALLER
++++++++++++++++++++++++++++++++++++++++++
+
+Once the :ref:`Prerequisites <prerequisites>` have been take care of, the DVN application can be installed.
+
+The installer package can be downloaded from our repository on SourceForge at
+
+`http://sourceforge.net/projects/dvn/files/dvn/3.0/dvninstall\_v3\_0.zip <http://sourceforge.net/projects/dvn/files/dvn/3.0/dvninstall_v3_0.zip>`_
+
+| Unzip the package in a temp location of your choice (this will create the directory | ``dvninstall``). Run the installer, as root:
+|         ``cd dvninstall``
+|         ``./install``
+
+Follow the installation prompts. The installer will first verify the contents of the package and check if the required components
+(in :ref:`Prerequisites <prerequisites>`) are present on the system. Then it will lead you through the application setup.
+
+| **[Advanced:]**
+
+The limitations of the installer package:
+
+Some extra configuration steps will be required if the PostgreSQL database is being set up on a remote server.
+
+It will most likely only work on the supported platforms, RedHat and Mac OS X.
+
+It is only guaranteed to work on a fresh Glassfish installation. If you already have more than one Glassfish domains created and/or have applications other than the DVN running under Glassfish, please consult the :ref:`"What does the Installer do?" <what-does-the-intstaller-do>` section.
+
+It does not install any of the optional components (:ref:`see Chapter 4<optional-components>`.) 
+
+For the detailed explanation of the tasks performed by the Installer, see the :ref:`"What does the Installer do?" <what-does-the-intstaller-do>` section.
+
+| **[/Advanced]**
+
+.. _optional-components:
+
+Optional Components
+++++++++++++++++++++++++++
+
+``[The sections on ImageMagick, Google Analytics and Captcha have been rewritten and, hopefully, made less confusing. The Handles instructions have also been modified, but I would like to work on it some more. Namely I'd like to read their own technical manual, and see if we should provide our own version of installation instructions, similarly to what we do with some other packages; we've heard complaints from users about their manual not being very easy to follow]``
+
+reCAPTCHA bot blocker
+=================================
+
+We found that our “email us” feature can be abused to send spam
+messages. You can choose to use the reCAPTCHA filter to help prevent
+this. Configure the filter as follows:
+
+#. | Go to reCAPTCHA web site at
+   | `http://recaptcha.net/ <http://recaptcha.net/>`_ 
+   | and sign up for an account.
+   | Register your website domain to acquire a public/private CAPTCHA key pair.
+   | Record this information in a secure location.
+#. Insert the the public/private key pair and domain for your reCAPTCHA
+   account into the ``captcha`` table of the DVN PostgreSQL database.
+   Use ``psql``, ``pgadmin`` or any other database utility; the SQL
+   query will look like this:
+   ``INSERT INTO captcha (publickey, domainname, privatekey) VALUES ('sample', 'sample.edu', 'sample')``
+#. Verify that the Report Issue page is now showing the reCAPTCHA
+   challenge.
+
+Google Analytics
+================================
+
+Network Admins can use the Google Analytics tools to view Dataverse Network website usage statistics.
+
+Note: It takes about 24 hours for Google Analytics to start monitoring
+your website after the registration.
+
+| 
+| To enable the use of Google Analytics:
+
+#. Go to the Google Analytics homepage at
+   `http://www.google.com/analytics/indexu.html <http://www.google.com/analytics/indexu.html>`__.
+#. Set up a Google Analytics account and obtain a tracking code for your Dataverse Network installation.
+#. Use the Google Analytics Help Center to find how to add the tracking code to the content you serve.
+#. Configure the DVN to use the tracking key (obtained in Step 2,
+    above), by setting | the ``dvn.googleanalytics.key`` JVM option in
+    Glassfish.
+    
+    This can be done by adding the following directly to the
+    ``domain.xml`` config file (for example: ``/usr/local/glassfish/domains/domain1/confi/domain.xml``):
+    ``<jvm-options>-Ddvn.googleanalytics.key=XX-YYY</jvm-options>`` (this will require Glassfish restart)
+
+    Or by using the Glassfish Admin Console configuration GUI. Consult the “Glassfish Configuration” section in the Appendix. 
+
+Once installed and activated, the usage statistics can be accessed from
+the Network Options of the DVN.
+
+ImageMagick
+=======================
+
+When image files are ingested into a DVN, the application
+automatically creates small "thumbnail" versions to display on the
+Files View page. These thumbnails are generated once, then cached for
+future use.
+
+Normally, the standard Java image manipulation libraries are used to
+do the scaling. If you have studies with large numbers of large
+images, generating the thumbnails may become a time-consuming task. If
+you notice that the Files view takes a long time to load for the first
+time because of the images, it is possible | to improve the
+performance by installing the ``ImageMagick`` package. If it is
+installed, the application will automatically use its
+``/usr/bin/convert`` utility to do the resizing, which appears to be
+significantly faster than the Java code.
+
+``ImageMagick`` is available for, or even comes with most of the popular OS distributions.
+
+ 
+| **<RedHat-Specific:>**
+
+It is part of the full RedHat Linux distribution, although it is not
+included in the default "server" configuration. It can be installed on a
+RedHat server with the ``yum install ImageMagick`` command.
+
+**</RedHat-Specific>**
+
+Handle System
+===========================
+
+DVN administrators may choose to set up a `HANDLE.NET <http://www.handle.net/>`_ server to issue and register persistent, global identifiers for their studies. The DVN app can be modified to support other naming services, but as of now it comes
+pre-configured to use Handles.
+
+To install and set up a local HANDLE.NET server:
+
+#. Download HANDLE.NET.
+   Refer to the HANDLE.NET software download page at
+   `http://handle.net/download.html <http://handle.net/download.html>`__.
+#. Install the server on the same host as GlassFish.
+   Complete the installation and setup process as described in the
+   HANDLE.NET Technical Manual:
+   `http://www.handle.net/tech_manual/Handle_Technical_Manual.pdf <http://www.handle.net/tech_manual/Handle_Technical_Manual.pdf>`__.
+#. Accept the default settings during installation, **with one
+   exception:** do not encrypt private keys (this will make it easier to
+   manage the service). **Note** that this means answer 'n' when
+   prompted "Would you like to encrypt your private key?(y/n). [y]:" If
+   you accept the default 'y' and then hit return when prompted for
+   passphrase, this **will** encrypt the key, with a blank pass phrase!
+#. During the installation you will be issued an "authority prefix".
+   This is an equivalent of a domain name. For example, the prefix
+   registered to the IQSS DVN is "1902.1". The IDs issued to IQSS
+   studies are of a form "1902.1/XXXX", where "XXXX" is some unique
+   identifier.
+#. Use ``psql`` or ``pgAdmin`` to execute the following SQL command:
+   ``insert into handleprefix (prefix) values( '<your HANDLE.NET prefix>')``;
+#. ``(Optional/advanced)`` If you are going to be assigning HANDLE.NET
+   ids in more than 1 authority prefix (to register studies harvested
+   from remote sources): Once you obtain the additional HANDLE.NET
+   prefixes, add each to the ``handleprefix`` table, using the SQL
+   command from step 3.
+#. Use ``psql`` or ``pgAdmin`` to execute the following SQL
+   command: ``update vdcnetwork set handleregistration=true, authority='<your HANDLE.NET prefix>';``
+
+ 
+
+Note: The DVN app comes bundled with the HANDLE.NET client libraries.
+You do not need to install these separately.
+
+Twitter setup
+======================
+
+To set up the ability for users to enable Automatic Tweets in your
+Dataverse Network:
+
+#. You will first need to tell twitter about you Dataverse Network Application. Go to `https://dev.twitter.com/apps <https://dev.twitter.com/apps>`_ and login (or create a new Twitter account).
+#. Click "Create a new application".
+#. Fill out all the fields. For callback URL, use your Dataverse Network Home Page URL.
+#. Once created, go to settings tab and set Application Type to "Read and Write". You can optionally also upload an Application
+   Icon and fill out Organization details (the end user will see these.
+#. Click details again. You will need both the Consumer key and secret as JVM Options. Add via Glassfish console:
+      -Dtwitter4j.oauth.consumerKey=***
+
+
+      -Dtwitter4j.oauth.consumerSecret=***
+#. Restart Glassfish.
+#. To verify that Automatic Tweets are now properly set up, you can go to the Dataverse Network Options page or any Dataverse Options page and see that their is a new option, "Enable Twitter".
+
+Digital Object Identifiers
+==========================
+
+Beginning with version 3.6, DVN will support the use of Digital Object Identifiers.  Similar to the currently enabled Handle System, these DOIs will enable a permanent link to studies in a DVN network.  
+
+DVN uses the EZID API (`www.n2t.net/ezid <http://www.n2t.net/ezid>`__) to facilitate the creation and maintenance of DOIs.  Network administrators will have to arrange to get their own account with EZID in order to implement creation of DOIs.  Once an account has been set up the following settings must be made in your DVN set-up:
+
+Update your database with the following query:
+
+Use ``psql`` or ``pgAdmin`` to execute the following SQL command: 
+``update vdcnetwork set handleregistration=true,  protocol = 'doi', authority='<the namespace associated with your EZID account> where id = 0;``
+
+Add the following JVM options:
+
+``-Ddoi.username=<username of your EZID account>``
+
+``-Ddoi.password=<password of your EZID account>``
+
+``-Ddoi.baseurlstring=https://ezid.cdlib.org``
+
+Note: The DVN app comes bundled with the EZID API client libraries. You do not need to install these separately.
+
+Appendix
++++++++++++++++++++++++
+
+.. _do-you-need-r:
+
+Do you need R?
+==========================
+
+This is a more detailed explanation of the statement made earlier in the "Prerequisites" section: "Only the advanced modes of serving quantitative data to the users require R." ``[style?]``
+
+In this context, by “quantitative data” we mean data sets for which
+machine-readable, variable-level metadata has been defined in the DVN
+database. “Subsettable data” is another frequently used term, in the
+DVN parlance. The currently supported sources of subsettable data are
+SPSS and STATA files, as well as row tabulated or CSV files, with
+extra control cards defining the data structure and variable
+metadata. (See full documentation in User Guide for :ref:`Finding and Using Data <finding-and-using-data>`
+
+Once a “subsettable” data set is create, users can run online statistics and analysis on it. That’s where R is used. In our experience, most of the institutions who have installed the DVN did so primarily in order to share and process quantitative data. When this is the case, R must be considered a required component. But a DVN network built  to serve a collection of strictly human-readable (text, image, etc.) data, R will not be necessary at all.
+
+.. _what-does-the-intstaller-do:
+
+What does the Installer do?
+===================================
+
+The Installer script (chapters Quick Install, Running the Installer.) automates the following tasks:
+
+#. Checks the system for required components;
+#. Prompts the user for the following information:
+
+   a) Location of the Glassfish directory;
+
+   b) Access information (host, port, database name, username, password) for PostgresQL;
+
+   c) Access information (host, port, username, password) for Rserve;
+
+#. Attempts to create the PostgreSQL user (role) and database, from :ref:`prerequisiste PostgreSQL setup step <postgresql>` above; see the :ref:`"PostgreSQL configuration"<postgresql-setup>` Appendix section for details.
+#. Using the :ref:`Glassfish configuration template (section the Appendix) <glassfish-configuration-template>` and the information collected in step 2.b. above, creates the config file domain.xml and installs it the Glassfish domain directory.
+#. Copies additional configuration files (supplied in the dvninstall/config directory of the Installer package) into the config directory of the Glassfish domain.
+#. Installs Glassfish Postgres driver (supplied in the dvninstall/pgdriver directory of the Installer package) into the lib directory in the Glassfish installation tree.
+#. Attempts to start Glassfish. The config file at this point contains the configuration settings that the DVN will need to run (see section :ref:`Glassfish Configuration, individual settings section<glassfish-configuration-individual-settings>` of the Appendix), but otherwise it is a "virgin", fresh config. Glassfish will perform some initialization tasks on this first startup and deploy some internal apps.
+#. If step 5. succeeds, the Installer attempts to deploy the DVN application (the Java archive DVN-EAR.ear supplied with the installer).
+#. Stops Glassfish, populates the DVN database with the initial content (section :ref:`"PostgreSQL configuration"<postgresql-setup>`" of the Appendix), starts Glassfish.
+#. Attempts to establish connection to Rserve, using the access information obtained during step 2.c. If this fails, prints a warning message and points the user to the Prerequisites section of this guide where R installation is discussed.
+#. Finally, prints a message informing the user that their new DVN should be up and running, provides them with the server URL and suggests that they visit it, to change the default passwords and perhaps start  setting up their Dataverse Network.
+
+Throughout the steps above, the Installer attempts to diagnose any
+potential issues and give the user clear error messages when things go
+wrong ("version of Postgres too old", "you must run this as root",
+etc.).
+
+Enough information is supplied in this manual to enable a user (a
+skilled and rather patient user, we may add) to perform all the steps
+above without the use of the script.
+
+.. _glassfish-configuration-template:
+
+Glassfish configuration template
+====================================
+
+The configuration template (``domain.xml.TEMPLATE``) is part of the
+installer zip package. The installer replaces the placeholder
+configuration tokens (for example, ``%POSTGRES_DATABASE%``) with the
+real values provided by the user to create the Glassfish configuration
+file ``domain.xml``.
+
+``[I was thinking of copy-and-pasting the entire template file here;
+but it is 30K of XML, so I decided not to. The above explains where it
+can be found, if anyone wants to look at it, for reference or
+whatever]``
+
+.. _glassfish-configuration-individual-settings:
+
+Glassfish Configuration, individual settings
+=====================================================
+
+As explained earlier in the Appendix, the Installer configures Glassfish
+by cooking a complete domain configuration file (``domain.xml``) and
+installing it in the domain directory.
+
+All of the settings and options however can be configured individually
+by an operator, using the Glassfish Admin Console.
+
+The Console can be accessed at the network port 4848 when Glassfish is
+running, by pointing a browser at
+
+     ``http://[your host name]:4848/``
+
+and logging in as ``admin``. The initial password is ``adminadmin``. It
+is of course strongly recommended to log in and change it first thing
+after you run the Installer.
+
+The sections below describe all the configuration settings that would
+need to be done through the GUI in order to replicate the configuration
+file produced by the Installer. This information is provided for the
+benefit of an advanced user who may want to experiment with individual
+options. Or to attempt to install DVN on a platform not supported by our
+installer; although we wish sincerely that nobody is driven to such
+desperate measures ever.
+
+.. _jvm-options:
+
+JVM options
+-----------------------
+
+Under Application Server->JVM Settings->JVM Options:
+
+If you are installing Glassfish in a production environment, follow
+these steps:
+
+#. | Delete the following options: -Dsun.rmi.dgc.server.gcInterval=3600000
+   | -Dsun.rmi.dgc.client.gcInterval=3600000
+#. | Add the following options:
+   | -XX:MaxPermSize=192m
+   | -XX:+AggressiveHeap
+   | -Xss128l
+   | -XX:+DisableExplicitGC
+   | -Dcom.sun.enterprise.ss.ASQuickStartup=false
+#. | To install on a multi-processor machine, add the following:
+   | ``-XX:+UseParallelOldGC``
+#. | To enable the optional HANDLE.NET installation and provide access to
+   | study ID registration, add the following (see the "Handles System"
+   | section in the "Optional Components" for
+   | details):
+   | ``-Ddvn.handle.baseUrl=<-Dataverse Network host URL>/dvn/study?globalId=hdl:``
+   | ``-Ddvn.handle.auth=<authority>``
+   | ``-Ddvn.handle.admcredfile=/hs/svr_1/admpriv.bin``
+#. | To enable the optional Google Analytics option on the Network Options
+   | page and provide access to site usage reports, add the following (see
+   | the "Google Analytics" section in the "Optional Components" for
+   | details):
+   |  ``-Ddvn.googleanalytics.key=<googleAnalyticsTrackingCode>``
+#. | Configure the following option only if you run multiple instances
+   | of the GlassFish server for load balancing. This option controls
+   | which GlassFish instance runs scheduled jobs, such as harvest or
+   | export.
+   | For the server instance that will run scheduled jobs, include the
+   | following JVM option:
+   | ``-Ddvn.timerServer=true``
+   | For all other server instances, include this JVM option:
+   | ``-Ddvn.timerServer=false``
+   | If you are installing Glassfish in either a production or development
+   | environment, follow these steps:
+
+   -  | Change the following options’ settings:
+      | Change ``-client`` to ``-server``.
+      | Change ``-Xmx512m`` to whatever size you can allot for the maximum 
+      | Java heap  space.
+      | Set `` –Xms512m`` to the same value to which you set ``–Xmx512m``.
+   -  | To configure permanent file storage (data and documentation files
+      | uploaded to studies) set the following:
+      | ``-Dvdc.study.file.dir=${com.sun.aas.instanceRoot}/config/files/studies``
+   -  | To configure the temporary location used in file uploads add the
+      | following:
+      | ``-Dvdc.temp.file.dir=${com.sun.aas.instanceRoot}/config/files/temp``
+   -  | To configure export and import logs (harvesting and importing),
+      | add the following:
+      | -Dvdc.export.log.dir=${com.sun.aas.instanceRoot}/logs/export
+      | -Dvdc.import.log.dir=${com.sun.aas.instanceRoot}/logs/import
+   -  | Add the following:
+      | -Djhove.conf.dir=${com.sun.aas.instanceRoot}/config
+      | -Ddvn.inetAddress=<host or fully qualified domain name of server
+      | on which Dataverse Network runs>
+      | -Ddvn.networkData.libPath=${com.sun.aas.instanceRoot}/applications/j2ee-  
+      |  apps/DVN-EAR
+   -  | To manage calls to RServe and the R host (analysis and file upload), add 
+      | the following:
+      | ``-Dvdc.dsb.host=<RServe server hostname>``
+      | ``-Dvdc.dsb.rserve.user=<account>``
+      | ``-Dvdc.dsb.rserve.pwrd=<password>``
+      | ``-Dvdc.dsb.rserve.port=<port number>``
+     
+      
+      | For Installing R, see: 
+      | :ref:`R and R-Serve <r-and-rserve>`
+      | for information about configuring these values in the ``Rserv.conf``
+      | file.
+      | These settings must be configured for subsetting and analysis to
+      | work.
+   -  | To configure search index files set the following:
+      | ``-Ddvn.index.location=${com.sun.aas.instanceRoot}/config``
+   -  | To use the optional customized error logging and add more information 
+      | to your log files, set the following:
+      | ``-Djava.util.logging.config.file= ${com.sun.aas.instanceRoot} /config/logging.properties``
+      | **Note**: To customize the logging, edit the ``logging.properties`` file
+   -  | The default size limit for file downloads is 100MB.  To override this
+      | default add the following JVM option:
+      | ``-Ddvn.batchdownload.limit=<max download bytes>``
+
+EJB Container
+-----------------------------
+
+Under Configuration->EJB Container->EJB Timer Service:
+
+#. | Set the Timer Datasource to the following:
+   | ``jdbc/VDCNetDS``
+#. | Save the configuration.
+
+HTTP Service
+-----------------------------
+
+The HTTP Service configuration settings described in this section are suggested defaults. These settings are very important. There are no right values to define; the values depend on the specifics of your web traffic, how many requests you get, how long they take to process on average, and your hardware. For detailed the 
+| Sun Microsystems Documentation web site at the following URL:
+
+`http://docs.sun.com/ <http://docs.sun.com/>`_
+
+
+| **Note**: If your server becomes so busy that it drops connections,
+| adjust the Thread Counts to improve performance.
+
+#. Under Configuration->HTTP Service->HTTP
+   Listeners->\ ``http-listener-1``:
+
+   -  Listener Port: 80
+   -  Acceptor Threads: The number of CPUs (cores) on your server
+
+#. Under Configuration->HTTP Service, in the RequestProcessing tab:
+
+   -  Thread Count: Four times the number of CPUs (cores) on your server
+   -  Initial Thread Count: The number of CPUs (cores)
+
+#. Under Configuration->HTTP Service->Virtual Servers->server: add new property ``allowLinking`` with the value ``true``.
+
+    #. | Under Configuration->HTTP Service, configure Access Logging: 
+
+    |              format=%client.name% %auth-user-name% %datetime% %request%        %status%
+    |              %response.length%             
+    |              rotation-enabled=true            
+    |              rotation-interval-in-minutes=15               
+    |              rotation-policy=time               
+    |              rotation-suffix=yyyy-MM-dd
+
+JavaMail Session
+------------------------------------
+
+Under Resources->JavaMail Sessions\ ``->mail/notifyMailSession:``
+
+-  | Mail Host: ``<your mail server>``
+   | **Note**: The Project recommends that you install a mail server on the same machine as GlassFish and use ``localhost`` for this entry. Since email notification is used for workflow events such as creating a dataverse or study, these functions may not work properly if a valid mail server is not configured.
+-  Default User: ``dataversenotify``
+    This does not need to be a real mail account.
+-  Default Return Address: ``do-not-reply@<your mail server>``
+
+JDBC Resources
+------------------------------------
+
+**Under Resources->JDBC->Connection Pools:**
+
+
+| Add a new Connection Pool entry:
+
+-  entryName: ``dvnDbPool``
+-  Resource Type: ``javax.sql.DataSource``
+-  Database Vendor: ``PostgreSQL``
+-  DataSource ClassName: ``org.postgresql.ds.PGPoolingDataSource``
+-  Additional Properties:
+
+   -  ConnectionAttributes: ``;create=true``
+   -  User: ``dvnApp``
+   -  PortNumber: ``5432`` (Port 5432 is the PostgreSQL default port.)
+   -  Password: ``<Dataverse Network application database password>``
+   -  DatabaseName: ``<your database name>``
+   -  ServerName: ``<your database host>``
+   -  JDBC30DataSource: ``true``
+
+| 
+
+**Under Resources->JDBC->JDBC Resources:**
+
+| Add a new JDBC Resources entry:
+
+-  JNDI Name: ``jdbc/VDCNetDS``
+-  Pool Name: ``dvnDbPool``
+
+JMS Resources
+-----------------------------------------
+
+Under Resources->JMS Resources:
+
+#. Add a new Connection Factory for the DSB Queue:
+
+   -  JNDI Name: ``jms/DSBQueueConnectionFactory``
+   -  Resource Type: ``javax.jms.QueueConnectionFactory``
+
+#. Add a new Connection Factory for the Index Message:
+
+   -  JNDI Name: ``jms/IndexMessageFactory``
+   -  Resource Type: ``javax.jms.QueueConnectionFactory``
+
+#. Add a new Destination Resource for the DSB Queue:
+
+   -  JNDI Name: ``jms/DSBIngest``
+   -  Physical Destination Name: ``DSBIngest``
+   -  Resource Type: ``javax.jms.Queue``
+
+#. Add a new Destination Resource for the Index Message:
+
+   -  JNDI Name: ``jms/IndexMessage``
+   -  Physical Destination Name: ``IndexMessage``
+   -  Resource Type: ``javax.jms.Queue``
+
+.. _postgresql-setup:
+
+PostgreSQL setup
+=======================
+
+The following actions are normally performed by the automated installer
+script. These steps are explained here for reference, and/or in case
+your need to perform them manually:
+
+1. Start as root, then change to user postgres:
+   
+   ``su postgres``
+
+  Create DVN database usert (role):
+
+  ``createuser -SrdPE [DB_USERNAME]``
+
+  (you will be prompted to choose a user password).
+
+  Create DVN database:
+
+  ``createdb [DB_NAME] --owner=[DB_USERNAME]``
+  
+  ``[DB_NAME]`` and ``[USER_NAME]`` are the names you choose for your DVN database and database user. These, together with the password you have assigned, will be used in the Glassfish configuration so that the application can talk to the database.
+
+2. Before Glassfish can be configured for the DVN app, the Postgres driver needs to be installed in the <GLASSFISH ROOT>/lib directory. We supply a version of the driver known to work with the DVN in the dvninstall/pgdriver directory of the Installer bundle. (This is the :ref:`"What does the Installer do?" <what-does-the-intstaller-do>` section of this appendix) An example of the installed location of the driver:
+
+  ``/usr/local/glassfish/lib/postgresql-8.3-603.jdbc4.jar``
+
+3. Finally, after the DVN application is deployed under Glassfish for the first time, the database needs to be populated with the initial content:
+
+  ``su postgres``
+  ``psql -d [DB_NAME] -f referenceData.sql``
+  
+  The file referenceData.sql is provided as part of the installer zip package.
+
+RedHat startup file for glassfish, example
+====================================================
+
+Below is an example of a glassfish startup file that you may want to
+install on your RedHat (or similar) system to have glassfish start
+automatically on boot.
+
+| Install the file as ``/etc/init.d/glassfish``, then run ``chkconfig glassfish on``
+
+Note that the extra configuration steps before the domain start line,
+for increasing the file limit and allowing "memory overcommit". These
+are useful settings to have on a production server.
+
+| You may of course add extra custom configuration specific to your
+  setup.
+
+.. code-block:: guess
+
+	#! /bin/sh 
+	# chkconfig: 2345 99 01 
+	# description: GlassFish App Server 
+	set -e 
+	ASADMIN=/usr/local/glassfish/bin/asadmin 
+	case "$1" in 
+	  start) 
+		echo -n "Starting GlassFish server: glassfish" 
+		# Increase file descriptor limit: 
+		ulimit -n 32768 
+		# Allow "memory overcommit": 
+		# (basically, this allows to run exec() calls from inside the 
+		# app, without the Unix fork() call physically hogging 2X 
+		# the amount of memory glassfish is already using) 
+		echo 1 > /proc/sys/vm/overcommit_memory 
+		$ASADMIN start-domain domain1 echo "." 
+		;; 
+	  stop) 
+		echo -n "Stopping GlassFish server: glassfish" 
+		$ASADMIN stop-domain domain1 
+		echo "."
+		 ;; 
+	  *) 
+		echo "Usage: /etc/init.d/glassfish {start|stop}" 
+
+		exit 1 
+	esac 
+	exit 0
+
+
+Enabling secure remote access to Asadmin
+========================================
+
+As was mentioned in the Glassfish section of the manual, in version
+3.1.2 admin interface (asadmin) is configured to be accessible on the
+localhost interface only. If you need to be able to access the admin
+console remotely, you will have to enable secure access to it. (It will
+be accessible over https only, at ``https://<YOUR HOST>:4848``; connections
+to ``http://<YOUR HOST>:4848`` will be automatically redirected to the https
+interface)
+
+The following must be done as root:
+
+#. First you need to configure the admin password: 
+
+   ``<GF LOCATION>/glassfish3/bin/asadmin change-admin-password`` 
+
+   (since you didn't create one when you were installing Glassfish, leave the "current password" blank, i.e., hit ENTER)
+    
+#. Enable the secure access: 
+
+  ``<GF LOCATION>/glassfish3/bin/asadmin enable-secure-admin`` 
+
+  (Note that you will need to restart Glassfish after step 2. above)
+
+.. _using-lockss-with-dvn:
+
+Using LOCKSS with DVN
+=======================================
+
+DVN holdings can be crawled by LOCKSS servers (`www.lockss.org <http://www.lockss.org>`__). It is made possible by the special plugin developed and maintained by the DVN project, which a LOCKSS daemon utilizes to crawl and access materials served by a Dataverse network.
+
+The current stable version of the plugin is available at the following location:
+
+`http://lockss.hmdc.harvard.edu/lockss/plugin/DVNOAIPlugin.jar <http://lockss.hmdc.harvard.edu/lockss/plugin/DVNOAIPlugin.jar>`__
+
+
+As of January 2013 and DVN version 3.3, the plugin is compatible with the  LOCKSS daemon version 1.55. The plugin sources can be found in the main DVN source tree in `https://dvn.svn.sourceforge.net/svnroot/dvn/dvn-app/trunk/src/DVN-lockss <https://dvn.svn.sourceforge.net/svnroot/dvn/dvn-app/trunk/src/DVN-lockss>`_ (please note that the DVN project is currently **in the process of moving to gitHub!** The preserved copy of the 3.3 source will be left at the URL above, together with the information on the current location of the source repository).
+
+In order to crawl a DVN, the following steps need to be performed:
+
+#. Point your LOCKSS daemon to the plugin repository above. (Refer to the LOCKSS documentation for details);
+#. Create a LOCKSS Archival Unit for your target DVN:
+
+   In the LOCKSS Admin Console, go to **Journal Configuration** -> **Manual Add/Edit** and click on **Add Archival Unit**.
+
+   On the next form, select **DVNOAI** in the pull down menu under **Choose a publisher plugin** and click **Continue**.
+
+   Next configure the parameters that define your DVN Archival Unit. LOCKSS daemon can be configured to crawl either the entire holdings of a DVN (no OAI set specified), or a select Dataverse.
+
+Note that LOCKSS crawling must be authorized on the DVN side. Refer to
+the :ref:`"Edit LOCKSS Settings" <edit-lockss-harvest-settings>`
+section of the DVN Network Administrator Guide for the instructions on
+enabling LOCKSS crawling on the network level, and/or to the
+:ref:`Enabling LOCKSS access to the Dataverse <enabling-lockss-access-to-the-dataverse>`
+of the Dataverse Administration Guide. Once you allow LOCKSS crawling of
+your Dataverse(s), you will need to enter the URL of the "LOCKSS
+Manifest" page provided by the DVN in the configuration above. For the
+network-wide archival unit this URL will be
+``http``\ ``://<YOUR SERVER>/dvn/faces/ManifestPage.xhtml``; for an
+individual dataverse it is
+``http``\ ``://<YOUR SERVER>/dvn/dv/<DV ALIAS>/faces/ManifestPage.xhtml.``
+
+| The URL of the DVN OAI server is ``http``\ ``://<YOUR DVN HOST>/dvn/OAIHandler``.
+
+Read Only Mode
+===================
+
+A Read Only Mode has been established in DVN to allow the application to remain available while deploying new versions or patches.  Users will be able to view data and metadata, but will not be able to add or edit anything.  Currently there is no way to switch to Read Only Mode through the application. 
+In order to change the application mode you must apply the following queries through ``psql`` or ``pgAdmin``:
+
+To set to Read Only Mode:
+
+      | ``BEGIN;``
+      | ``SET TRANSACTION READ WRITE;``
+      | ``-- Note database and user strings may have to be modified for your particular installation;``
+      | ``-- You may also customize the status notice which will appear on all pages of the application;``
+      | ``update vdcnetwork set statusnotice = "This network is currently in Read Only state. No saving of data will be allowed.";``
+      | ``ALTER DATABASE "dvnDb" set default_transaction_read_only=on;``
+      | ``Alter user "dvnApp" set default_transaction_read_only=on;``
+      | ``update vdcnetwork set statusnotice = "";``
+      | ``END;``
+
+To return to regular service:
+
+      | ``BEGIN;``
+      | ``SET TRANSACTION READ WRITE;``
+      | ``-- Note database and user strings may have to be modified for your particular installation;``
+      | ``ALTER DATABASE "dvnDb" set default_transaction_read_only=off;``
+      | ``Alter user "dvnApp" set default_transaction_read_only=off;``
+      | ``update vdcnetwork set statusnotice = "";``
+      | ``END;``
+
+Backup and Restore
+================================
+
+**Backup**
+
+| The PostgreSQL database and study files (contained within the Glassfish directory by default but this is :ref:`configurable via JVM options <jvm-options>`) are the most critical components to back up. The use of standard PostgreSQL tools (i.e. pg\_dump) is recommended.
+
+Glassfish configuration files (i.e. domain.xml, robots.txt) and local
+customizations (i.e. images in the docroot) should be backed up as well.
+In practice, it is best to simply back up the entire Glassfish directory
+as other files such as logs may be of interest.
+
+| **Restore**
+
+Restoring DVN consists of restoring the PostgreSQL database and the
+Glassfish directory.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/_sources/dataverse-user-main.txt	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,4744 @@
+====================================
+User Guide
+====================================
+
+Common Tasks
+++++++++++++++++++
+
+Here is a list of the most common ways people use the Dataverse Network.
+Activities can be grouped into finding and using data or publishing
+data. A brief description of each activity follows with more detailed
+information available in the Users Guide.
+
+Finding Data
+===============
+
+Visitors to the site can browse dataverses looking for data of
+interest or they can search by keywords. There are Basic and Advanced
+Searches.
+
+**Browsing the Site**
+
+
+The Network Homepage presents a list of recently released dataverses on the left side of the page.  
+A dataverse is a container for studies that can be managed as a group by the dataverse administrator. 
+Most often a dataverse represents a single organization or scholar and so their studies are often related. 
+On the right side of the page there are lists of both recently released studies and studies that have been 
+downloaded most often.  At the bottom of these lists, the View More link brings the user to a complete list 
+of released dataverses or studies as applicable.  The home page also includes a scrolling list of datverse 
+collections called subnetworks, if applicable.
+
+Clicking on the name of a dataverse, study or subnetwork displays its home page.
+
+**Browsing Dataverses**
+
+If you click the View More link under the recently released dataverse list on the Network Homepage you'll be brought to 
+the Browse Dataverses page.  Here you can sort the dataverses by Name, Affiliation, Release Date and Download Count.  You 
+may also filter the dataverses by typing a filter term in the "filter" text box.  The filter will only display those 
+dataverses whose name or affiliation matches the filter term.  Clicking on the name of a dataverse displays its home page.
+
+**Search**
+
+For many purposes, Basic Search is sufficient. On the center top of the network homepage enter keywords or
+complete sentences and click **Search**. A resulting list of studies is
+displayed. Further refinement can be made by clicking facets such as
+"Original Dataverse" or "Author" under "Refine Results" on the left side
+of the page. After a facet has been clicked, it will appear at the top
+of the page under "Search Results for" and clicking the selected facet
+will remove it, restoring the previous results. In addition to the
+network homepage, Basic Search can be found on the upper right of the
+dataverse home pages as well as on the search results and Advanced
+Search pages.  Be aware that searching from a dataverse limits the scope
+of search to studies within that dataverse while searching from the
+network home page searches all released studies.
+
+When a more specific search is needed, use Advanced Search. Advanced
+Search allows searching on keywords found in specific cataloging
+information fields, in particular collections in a dataverse where
+available, or by variable name. The link to Advanced Search is next to
+the Basic Search feature on the network and dataverse home pages and the
+search results page.
+
+Using Data
+==============
+
+Data in the Dataverse Network is stored in files. Files of any
+type are allowed but some types of tabular and network data files are
+supported by additional functionality, including downloading in
+different formats, downloading subsets of variables, and analytical
+tools.
+
+**Download Files**
+
+To download files, click on a study of interest, then select the
+data tab. Individual files can be downloaded or groups of files by
+checking files of interest or entire file categories and clicking
+Download All Selected Files. Groups of files are packaged into a single
+``.zip`` file. Group downloads have a download size limit and any selected
+files not downloaded will be indicated in the ``.zip`` file.
+
+Downloading individual files in an alternate format where available is
+straightforward. Choose the format from the Download As select box next
+to the file and the file will download.
+
+**Subset or Analyze Files**
+
+Tabular and Network data files of recognized formats (Stata, SPSS, RData,
+Graphml) can be further manipulated through downloading subsets of
+variables and by performing various statistical analyses. Where
+available these options appear as an additional link, Access
+Subset/Analysis, below the Download As format select box next to each
+file. The functionality is quite different for tabular versus network
+data files so refer to the Users Guide for additional information.
+
+Publishing Data
+====================
+
+Publishing data through the Dataverse Network is straightforward:
+create an account and a place to store your data, organize your data,
+upload files, and release your data for public access.
+
+**Create a Dataverse and Account**
+
+The first step to publishing your data is to create a place to
+store it that can be managed by you. To do this you need an account.
+Create a dataverse and account by clicking on the Create a Dataverse
+link on the upper right side of the network homepage. This leads you
+through a series of steps at the end of which you will have a dataverse
+and user account to manage it.
+
+Newly created dataverses are unreleased and not available for
+browsing. Make note of the link to your dataverse at the end of the
+process so you can return to it until it becomes released. Another way
+to access your unreleased dataverse is to log in, click on your user
+name in the upper right of the page, dataverses tab, then the name of
+your dataverse.
+
+**Create Studies**
+
+Once you have a user account and a place to store your data, you
+need to take the first step toward organizing your data into studies.
+Many data have been or will be used to publish a study so this step may
+be clear. If not, a study should represent a particular thesis or
+inquiry with accompanying data. First, log in with your new user account
+and navigate to your dataverse home page. Next, click Options in the
+upper right of the page. From there click Create a Study and complete
+the form. Most of the fields on the study form are optional -only the
+title is required. If you are unsure of what these values should be,
+enter a title and these fields can be completed later before releasing
+the study.
+
+Be aware that a newly created study is unreleased and not available
+for browsing. To access an unreleased study for further editing, click
+on Options->Manage Studies and click on your study's name. You can also
+click on your username, studies tab, then the study name.
+
+**Upload Files**
+
+
+Now that you have a place to store and manage your data and a
+study to associate it with, you can upload your data and documentation
+files. Files are uploaded to a study. Navigate to the study you want to
+upload particular files to and click on Add Files on the upper right
+side of the page. The add files page requires you to first select a file
+type, then browse for the file on your local system. Some file types
+undergo additional processing to support extended functionality but if
+you are unsure which type to choose, select Other. At this time you can
+enter a descriptive Category which can be used to group related files
+and a file description. If you are unsure of these values they can be
+added later.
+
+Though files are selected individually, several files can be added
+to this page at one time. It is recommended to upload only a few files
+at a time since this can take some time to complete, depending on file
+type.
+
+An alternative to selecting files individually is to first create an
+archive of files in ``.zip`` or ``.tar`` format and then select the
+appropriate "multiple files" Data Type when uploading your archive. The
+zip file or tarball will be unpacked so that the individual files will
+be added to the page.
+
+If you upload an SPSS (``.por``, ``.sav``), Stata (``.dta``) or R
+(``.RData``) file, your study will be temporarily unavailable for
+editing until the additional processing on the file is completed. This
+can be brief or take some time depending on the size and complexity of
+the file. A message at the top of the file indicates it is unavailable
+for editing and an email will be sent when finished to the address you
+indicate on the add files page.
+
+**Release Studies**
+
+Once your study is in a state where it's ready to be published or
+shared with others, it should be released. This is done either by
+clicking Release on the upper right of the study page or by navigating
+to your dataverse, clicking Options, Manage Studies, then clicking
+release next to the study you want released. Note that releasing a study
+fixes the version number. Additional changes to the study will create a
+new draft version. The draft can be repeatedly edited without changing
+the version number until it is released. At this point your study is
+visible within your dataverse. If your dataverse is also released it
+will be searchable and viewable by others. If your dataverse is not yet
+released, it will only be visible to people with access to your
+dataverse.
+
+**Release Dataverse**
+
+Releasing a dataverse makes it appear in the list of dataverses on
+the network home page and makes it viewable by others. This may require
+adding a study or other details to your dataverse depending on site
+policy. By default, releasing a dataverse requires nothing but changing
+the Dataverse Release Settings to Released on the Manage Permissions
+page. To release your dataverse, navigate to the dataverse home page,
+choose Options from the upper right of the page, click on Dataverse
+Settings, then Manage Permissions. At the top of the page, change
+Dataverse Release Settiings to Released and click Save Changes.
+
+Any studies that are released are now visible to others. Those
+that are unreleased do not appear in the list of studies on the
+dataverse home page.
+
+At this point you have published one or more studies and their data and
+made them available for browsing or searching.
+
+Things to Consider, Next Steps
+===============================
+
+The above tasks are fundamental activities and may be all that is
+needed for most users. Some situations are more complex and require
+additional consideration. These include publishing and organizing data
+for large organizations, shared research between scholars, and enabling
+contributions by a geographically diverse team while keeping data
+private until ready for publication.
+
+For **large organizations**, a single dataverse may suffice. Collections
+within a dataverse can further organize studies by sub unit or topic.
+The dataverse itself can be **customized** with the organizations own
+website header and footer. In some cases, sub units or organizations
+want to maintain their own distinct branding. In such cases each can
+create and maintain their own dataverse and the parent dataverse can
+link to their studies through a link collection.
+
+For **shared research**, the model is similar: a single dataverse based
+on the research project can be created to which both researchers have
+administration rights. Additionally, researchers can maintain their own
+dataverses for other work and link back to the studies in the shared
+project dataverse.
+
+**Allowing a diverse team to contribute** to an unreleased dataverse is
+simply a matter of granting the appropriate level of **permissions** to
+each team member. At minimum, each team member would need to be added as
+a contributor to the dataverse. By default, they can only contribute to
+studies they themselves have created. However, this can be expanded from
+the dataverse Manage Permissions page to allow contributors to edit all
+studies in the dataverse. Changes made by contributors need to be
+approved by a curator or admin before a study can be released.
+
+How the Guides Are Organized
+============================
+
+The guides are reference documents that explain how to use
+the Dataverse Network functionality: Installers Guide, Developers Guide, APIs Guide, and Users
+Guide. The Users Guide is further divided into primary activities: using
+data, creating studies, administering dataverses or the network. Details
+on all of the above tasks can be found in the Users Guide. The
+Installers Guide is for people or organizations who want to host their
+own Dataverse Network. The Developers Guide contains instructions for
+people who want to contribute to the Open Source Dataverse Network
+project or who want to modify the code to suit their own needs. Finally, the 
+APIs Guide is for people who would like to use our APIs in order to build apps that 
+can work with the Dataverse Network web application. This `page <http://thedata.org/book/apps>`__ lists some current apps
+which have been developed with our APIs.
+
+Other Resources
+=========================
+
+**Dataverse Network Project Site**
+
+
+Additional information about the Dataverse Network project itself
+including presentations, information about upcoming releases, data
+management and citation, and announcements can be found at
+`http://thedata.org <http://thedata.org/>`__
+
+**User Group**
+
+As the user community grows we encourage people to shares ideas, ask
+questions, or offer suggestions for improvement. Go to
+`https://groups.google.com/group/dataverse-community <https://groups.google.com/group/dataverse-community>`__ to register to our dataverse community group.
+
+**Follow Us on Twitter**
+
+For up to date news, information and developments, follow our twitter account: `https://twitter.com/thedataorg <https://twitter.com/thedataorg>`__
+
+**Support**
+
+We maintain an email based support service that's free of charge. We
+attempt to respond within one business day to all questions and if it
+cannot be resolved immediately, we'll let you know what to expect.
+
+Contact Us
+=======================
+
+The support email address is
+`support@thedata.org <mailto:support@thedata.org>`__.
+
+This is the same address as the Report Issue link. We try to respond
+within one business day.
+
+.. _finding-and-using-data:
+
+Finding and Using Data
++++++++++++++++++++++++
+
+Ends users, without need to login to the Dataverse Network, can browse
+dataverses, search studies, view study description and data files for
+public studies, and subset, analyze and visualize data for public data
+files. If entire studies or individual data files are restricted, end
+users need to be given permission from the dataverse administrator to
+access the data.
+
+
+Search
+=======
+
+To find a study or data set, you can search or browse studies offered
+in any released dataverse on the Network homepage. Each dataverse offers
+a hierarchical organization comprising one or more collections of data
+sets with a particular theme. Most dataverses allow you to search for
+data within their files, or you can start browsing through the dataverse
+classifications that are closest to your substantive interests.
+
+**Browse Collections**
+
+You can browse all public dataverses from the Network homepage. Click
+the title of a dataverse to browse that dataverse's collections and
+studies. Click the title of a collection to view a list of studies and
+subcollections for that selection. Click the title of a study to view
+the Cataloging Information and study files for that selection.
+
+When you select a dataverse to view its contents, the homepage opens to
+the \ *root collection*, and the dataverse's studies are displayed
+directly under the root collection name. If the root collection contains
+other collections, then those collections are listed and not the studies
+within them. You must select a collection title to view the studies
+contained within it.
+
+Note: If a dataverse includes links to collections from another
+dataverse and the root collection does not contain other collections,
+the homepage opens to a list of the root and linked collections.
+
+**Search - Basic**
+
+You can search for studies across the entire Dataverse Network from the
+Network homepage, or search within a dataverse from the dataverse
+homepage. When you search across the Network, studies from restricted
+dataverses are not included in the search. Restricted studies are
+included in search results, and a lock icon appears beside those studies
+in the results list. After your search is complete, you can further
+narrow your list of data by searching again in the results. See Search
+Tips for search examples and guidelines.
+
+When you enter more than one term in the search text field, the results
+list contains studies that have these terms near each other within the
+study fields searched. For example, if you enter ``United Nations``,
+the results include studies where the words *United* and *Nations* are
+separated by no more than four words in the same study field, such as
+abstract or title.
+
+It supports a search in any field of the studies' Cataloging
+Information, which includes citation information, abstract and other
+scope-related information, methodology, and Terms of Use. In addition,
+file descriptions also are searched.
+
+**Search - Advanced**
+
+In an advanced search, you can refine your criteria by choosing which
+Cataloging Information fields to search. You also can apply logic to the
+field search. For text fields, you can specify that the field searched
+either *contains* or *does not contain\ the text that you enter. For
+date fields, you can specify that the field searched is either *later
+than* nor *earlier than* the date that you enter. Refer to
+the `Documentation <http://lucene.apache.org/java/docs/>`__  page for
+the latest version at the Lucene website and look for *Query Syntax* for full details.
+
+To perform an advanced search, click the Advanced Search link at the
+top-right of the Search panel. You can search the following study
+metadata fields by using the Search Scope drop-down list:
+
+-  Title - Title field of studies' Cataloging Information.
+-  Author - Author fields of studies' Cataloging Information.
+-  (Study) Global ID - ID assigned to studies.
+-  Other ID - A different ID previously given to the study by another
+   archive.
+-  Abstract - Any words in the abstract of the study.
+-  Keyword - A term that defines the nature or scope of a study. For
+   example, ``elections``.
+-  Keyword Vocabulary - Reference to the standard used to define the
+   keywords.
+-  Topic Classification - One or more words that help to categorize the
+   study.
+-  Topic Classification Vocabulary - Reference used to define the Topic
+   Classifications.
+-  Producer - Institution, group, or person who produced the study.
+-  Distributor - Institution that is responsible for distributing the
+   study.
+-  Funding Agency - Agency that funded the study.
+-  Production Date - Date on which the study was created or completed.
+-  Distribution Date - Date on which the study was distributed to the
+   public.
+-  Date of Deposit - Date on which the study was uploaded to the
+   Network.
+-  Time Period Cover Start - The beginning of the period covered by the
+   study.
+-  Time Period Cover End - The end of the period covered by the study.
+-  Country/Nation - The country or countries where the study took place.
+-  Geographic Coverage - The geographical area covered by the study. For
+   example, ``North America``.
+-  Geographic Unit - The smallest geographic unit in which the study
+   took place, such as ``state``.
+-  Universe - Universe of interest, population of interest, or target
+   population.
+-  Kind of Data - The type of data included in the file, such
+   as ``survey data``, ``census/enumeration data``,
+   or ``aggregate data``.
+-  Variable Information - The variable name and description in the
+   studies' data files, given that the data file is subsettable and
+   contains tabular data. It returns the studies that contain the file
+   and the variable name where the search term was found.
+
+**Sort Results**
+
+When your search is complete, the results page lists studies that met
+the search criteria in order of relevance. For example, a study that
+includes your search term within the Cataloging Information in ten
+places appears before a study that includes your search term in the
+Cataloging Information in only one place.
+
+You can sort search results by title, study ID, last updated, or number
+of downloads (that is, the number of times users downloaded any file
+belonging to that study). Click the Sort By drop-down list to choose
+your sort order.
+
+**Search Tips**
+
+Use the following guidelines to search effectively within a Network or a
+dataverse:
+
+-  The default search syntax uses ``AND`` logic within individual
+   fields. That is, if you enter more than one term, the search engine
+   looks for all terms within a single field, such as title or abstract.
+   For example, if you enter ``United Nations report``, the results
+   list any studies that include the terms *United*, *Nations*,
+   and *report* within a single metadata field.
+-  The search logic looks for multiple terms within a specific proximity
+   to one another, and in the same field. The current proximity criteria
+   is four words. That is, if you enter two search terms, both terms
+   must be within four words of each other in the same field to be
+   returned as a result.
+   For example, you might enter ``10 year`` in a basic search. If a
+   study includes the string *10 millions deaths per year* within a
+   metadata field, such as abstract, that study is not included in the
+   search results. A study that contains the string *10 per year* within the abstract field is included in the search results.
+-  During the index process that supports searches, periods are removed
+   in strings and each term between periods is indexed individually. If
+   you perform a basic search for a term that contains one or more
+   periods, the search works because the analyzer applies
+   the *AND* logic. If you search on a specific field, though, note
+   that you should specify individually each component of the string
+   between periods to return your results.
+-  You can enter one term in the search field, and then search within
+   those results for another term to narrow the results further. This
+   might be more effective than searching for both terms at one time, if
+   those terms do not meet the proximity and field limits specified
+   previously.
+   You could search first for an author's name, and then search those
+   results for a specific term in the title. If you try searching for
+   both terms in the author and title fields together, you might not
+   find the study for which you are looking.
+   For example, you can search the Harvard Dataverse Network for the
+   following study:
+
+       *Gary King; Will Lowe, 2003, "10 Million International Dyadic
+       Events", hdl:1902.1/FYXLAWZRIA UNF:3:um06qkr/1tAwpS4roUqAiw==
+       Murray Research Archive [Distributor]*
+
+   If you type ``King, 10 Million`` in the Search field and click
+   Search, you see ``0 matches were found`` in the Results field. If
+   you type ``10`` in the Search field and click Search, you see
+   something like ``1621 matches were found`` in the Results field.
+   But if you first type ``King`` in the Search field and click
+   Search, then type ``10 Million`` in the Search field and click
+   Search again, you see something like ``4 matches were found`` in the
+   Results field.
+
+
+View Studies / Download Data
+============================
+
+**Cataloging Information**
+
+When a study is created, a set of *metadata* is associated with that
+study. This metadata is called the *Cataloging Information* for the
+study. When you select a study to view it, you first see the Cataloging
+Information tab listing the metadata associated with that study. This is
+the default view of a study.
+
+Cataloging Information contains numerous fields that help to describe
+the study. The amount of information you find for each study varies,
+based on what was entered by the author (Contributor) or Curator of that
+study. For example, one study might display the distributor, related
+material, and geographic coverage. Another study might display only the
+authors and the abstract. Every study includes the *Citation Information* fields in the Cataloging Information.
+
+Note: A comprehensive list of all Cataloging Information fields is
+provided in the :ref:`List of Metadata References <metadata-references>`
+
+Cataloging Information is divided into four sections. These sections and
+their details are displayed only when the author (Contributor) or
+Curator provides the information when creating the study. Sections
+consist of the following:
+
+-  Citation Information - These fields comprise
+   the `citation <http://thedata.org/citation>`__ for the study,
+   consisting of a global identifier for all studies and a UNF, or
+   Universal Numerical Fingerprint, for studies that contain subsettable
+   data files. It also can include information about authors, producers
+   and distributors, and references to related studies or papers.
+-  Abstract and Scope - This section describes the research study, lists
+   the study's data sets, and defines the study's geographical scope.
+-  Data Collection/Methodology - This section includes the technical
+   details of how the author obtained the data.
+-  Terms of Use - This information explains that the study requires
+   users to accept a set of conditions or agreements before downloading
+   or analyzing the data. If any *Terms of Use* text is displayed in
+   the Cataloging Information section, you are prompted to accept the
+   conditions when you click the download or analyze icons in the Files
+   page.
+   Note: A study might not contain Terms of Use, but in some cases the
+   original parent dataverse might have set conditions for all studies
+   owned by that dataverse. In that case, the conditions are inherited
+   by the study and you must accept these conditions before downloading
+   files or analyzing the data.
+   
+Study metadata can be downloaded in XML format using a link at the bottom 
+of the study Cataloging Information tab:  `DDI (without variables) <https://thedata.harvard.edu/dvn/api/metadata/91148?partialExclude=codeBook/dataDscr>`__
+/ `DDI (full) <https://thedata.harvard.edu/dvn/api/metadata/91148>`__. 
+These links appear for released studies whose metadata has been exported. 
+Studies are typically exported on a daily basis.
+
+**List of Study Files**
+
+When you view a study, click the Documentation, Data and Analysis tab to
+view a list of all electronic files associated with the study that were
+provided by the author or Curator.
+
+A study might contain documentation, data, or other files. When the
+study contributor uploads data files of the type ``.dta``, ``.sav``, or ``.por`` to the Network, those files are converted
+to ``.tab`` tab-delimited files. These ``.tab`` files
+are subsettable, and can be subsetted and analyzed online by using the Dataverse Network
+application.
+
+Data files of the type ``.xml`` also are considered to be subsettable,
+and can be subsetted and analyzed to a minimal degree online.
+An ``.xml`` type file indicates social network data that complies with
+the `GraphML <http://graphml.graphdrawing.org/>`__ file format.
+
+You can identify a subsettable data file by the *Subsetting* label and
+the number of cases and variables listed next to the file name. Other
+files that also contain data might be associated with a study, but the
+Dataverse Network application does not recognize them as data (or
+subsettable) files.
+
+**Download Study Files**
+
+You can download any of the following within a study:
+
+-  All or selected data files within a *study* or a *category* (type
+   of files)
+-  Individual *data files*
+-  Individual subsets within a data file (see :ref:`Subset and Analyze
+   Tabular Data Sets <tabular-data>`
+   or :ref:`Subset and Analyze Network Data Sets <network-data>` for details)
+
+The default format for subsettable tabular data file downloads
+is *tab-delimited*. When you download one or more subsettable files in
+tab-delimited format, the file contains a header row. When you download
+one subsettable file, you can select from the following formats in
+addition to tab-delimited:
+
+-  Original file
+-  Splus
+-  Stata
+-  R
+
+The default format for subsettable network data file downloads
+is *Original file*. In addition, you can choose to download network
+data files in *GraphML* format.
+
+If you select any other format for a tabular data file, the file is
+downloaded in a zipped archive. You must unzip the archive to view or
+use the individual data file.
+
+If you download all or a selection of data files within a study, the
+files are downloaded in a zipped archive, and the individual files are
+in tab-delimited or network format. You must unzip the archive to view
+or use the individual data files.
+
+Note: Studies and data files often have user restrictions applied. If
+prompted to accept Terms of Use for a study or file, check the *I Accept* box and then click the Continue button to view or download the
+file.
+
+**User Comments**
+
+If the User Comment feature is enabled within a dataverse, users are
+able to add comments about a study within that dataverse.
+
+When you view a study, click the User Comments tab to view all comments
+associated with the study. Comments can be monitored and abuse reported
+to the Network admin, who has permission to remove any comments deemed
+inappropriate. Note that the dataverse admin does not have permission to
+remove comments, to prevent bias.
+
+If you choose, you also can add your own comments to a study from the
+User Comments tab. See :ref:`Comment on Studies or Data <edit-study-comments-settings>` for
+detailed information.
+
+Note: To add a comment to a study, you must register and create an
+account in the dataverse that owns the study about which you choose to
+comment. This helps to prevent abuse and SPAM issues.
+
+**Versions**
+
+Upon creating a study, a version is created. This is a way to archive
+the *metadata* and *data files* associated with the study citation
+or UNF.
+
+**View Citations**
+
+You can view a formatted citation for any of the following entities
+within the Dataverse Network application:
+
+-  Studies - For every study, you can view a citation for that study.
+   Go to the Cataloging Information tab for a study and view the *How
+   to Cite* field.
+-  Data sets - For any data set, you can view a citation for that set.
+   Go to the Documentation, Data and Analysis tab for a study to see the
+   list of study files. To view the citation for any data set click
+   the *View Data Citation* link associated with that subsettable
+   file.
+-  Data subsets - If you subset and analyze a data set, you can view a
+   citation for each subset. 
+   See :ref:`Apply Descriptive Statistics <apply-descriptive-statistics>` or :ref:`Perform Advanced Analysis <perform-advanced-analysis>` for
+   detailed information.
+   Also, when you download a workspace file, a copy
+   of the citation information for that subset is provided in the
+   download.
+
+Note: For individual variables within a subsettable data subset, you can
+view the `UNF <http://thedata.org/citation/tech>`__ for that variable.
+This is not a full citation for the variable, but it is one component of
+that citation. Note also that this does not apply to ``.xml`` data.
+
+Subset and Analysis
+====================
+
+Subsetting and analysis can be performed on tabular and network data
+files. Refer to the appropriate section for more details.
+
+.. _tabular-data:
+
+Tabular Data
+--------------
+
+Tabular data files (subsettable files) can be subsetted and analyzed
+online by using the Dataverse Network application. For analysis, the
+Dataverse Network offers a user interface to Zelig, a powerful, R-based
+statistical computing tool. A comprehensive set of Statistical Analysis
+Models are provided.
+
+After you find the tablular data set that you want, access the Subset
+and Analysis options to use the online tools. Then, you can *subset
+data by variables or observations*, translate it into a convenient
+format, download subsets, and apply statistics and analysis.
+
+Network data files (also subsettable) can be subsetted online, and then
+downloaded as a subset. Note that network data files cannot be analyzed
+online.
+
+Review the Tabular Data Subset and Recode Tips before you start.
+
+**Access Subset and Analysis Options**
+
+You can subset and analyze tabular data files before you download the
+file or your subsets.
+
+To access the Subset and Analysis options for a data set:
+
+#. Click the title of the study from which you choose to analyze or
+   download a file or subset.
+#. Click the Documentation, Data and Analysis tab for the study.
+#. In the list of study files, locate the data file that you choose to
+   download, subset, or analyze.
+   You can download data sets for a file only if the file entry includes
+   the subset icon.
+#. Click the *Access Subset/Analysis* link associated with the
+   selected file.
+   If prompted, check the *I accept* box and click Continue to accept
+   the Terms of Use.
+   You see the Data File page listing data for the file that you choose
+   to subset or analyze.
+
+**View Variable Quick Summary**
+
+When a subsettable data file is uploaded for a study, the Dataverse
+Network code calculates summary statistics for each variable within that
+data file. On any tab of the Data File page, you can view the summary
+statistics for each variable in the data file. Information listed
+comprises the following:
+
+-  For continuous variables, the application calculates summary
+   statistics that are listed in the DDI schema.
+-  For discrete variables, the application tabulates values and their
+   labels as a frequency table.
+   Note, however, that if the number of categories is more than 50, the
+   values are not tabulated.
+-  The UNF value for each variable is included.
+
+To view summary statistics for a variable:
+
+#. In the Data File page, click any tab.
+#. In the variable list on the bottom of the page, the right column is
+   labeled *Quick Summary*.
+   locate a variable for which you choose to view summary statistics.
+   Then, click the Quick Summary icon for that variable to toggle the
+   statistic's information on and off.
+   You see a small chart that lists information about that variable. The
+   information provided depends upon the variable selected.
+
+**Download Tabular Subsets**
+
+You can download a subset of variables within a tabular-data study file.
+You also can recode a subset of those variables and download the recoded
+subset, if you choose.
+
+To download a subset of variables in tabular data:
+
+#. In the Data File page, click the Download Subset tab.
+#. Click the radio button for the appropriate File Format in which to
+   download the variables: Text, R Data, S plus, or Stata.
+#. On the right side of the tab, use the Show drop-down list to select
+   the quantities of variables to list at one time: 10, 20, 50, or All.
+#. Scroll down the screen and click the check boxes to select variables
+   from the table of available values. When you select a variable, it is
+   added to the Selected Variables box at the top of the tab.
+   To remove a variable from this box, deselect it from the Variable
+   Type list at the bottom of the screen.
+   To select all variables, click the check box beside the column name,
+   Variable Type.
+#. Click the *Create Zip File* button.
+   The *Create Zip File* button label changes the following
+   format: ``zipFile_<number>.zip``.
+#. Click the ``zipFile_<number>.zip`` button and follow your browser's
+   prompts to open or save the data file to your computer's disk drive
+
+.. _apply-descriptive-statistics:
+
+**Apply Descriptive Statistics**
+
+When you run descriptive statistics for data, you can do any of the
+following with the analysis results:
+
+-  Open the results in a new window to save or print the results.
+-  Download the R workspace in which the statistics were analyzed, for
+   replication of the analysis. See Replicate Analysis for more
+   information.
+-  View citation information for the data analyzed, and for the full
+   data set from which you selected variables to analyze. See View
+   Citations for more information.
+
+To apply descriptive statistics to a data set or subset:
+
+#. In the Data File page, click the Descriptive Statistics tab.
+#. Click one or both of the Descriptive Statistics options: Univariate
+   Numeric Summaries and Univariate Graphic Summaries.
+#. On the right side of the tab, use the Show drop-down list to select
+   one of the following options to show variables in predefined
+   quantities: 10, 20, 50, or All.
+#. Scroll down the screen and click the check boxes to select variables
+   from the table of available values. When you select a variable, it is
+   added to the Selected Variables box at the top of the tab.
+   To remove a variable from this box, deselect it from the Variable
+   Type list at the bottom of the screen.
+   To select all variables, click the check box beside the column name,
+   Variable Type.
+#. Click the Run Statistics button.
+   You see the Dataverse Analysis page.
+#. To save or print the results, scroll to the Descriptive Statistics
+   section and click the link *Open results in a new window*. You then
+   can print or save the window contents.
+   To save the analysis, scroll to the Replication section and click the
+   button *zipFile_<number>.zip*.
+   Review the Citation Information for the data set and for the subset
+   that you analyzed.
+#. Click the link *Back to Analysis and Subsetting* to return the
+   previous page and continue analysis of the data.
+
+**Recode and Case-Subset Tabular Data**
+
+Review the Tabular Data Recode and Subset Tips before you start work
+with a study's files.
+
+To recode and subset variables within a tabular data set:
+
+#. In the Data File page, click the Recode and Case-Subsetting tab.
+#. One the right side of the variable list, use the Show drop-down list
+   and select one of the following options to show variables in
+   predefined quantities: 10, 20, 50, or All.
+#. Scroll down the screen and click the check boxes to select variables
+   from the table of available values. When you select a variable, it is
+   added to the Selected Variables box at the top of the tab.
+   To remove a variable from this box, deselect it from the Variable
+   Type list at the bottom of the screen.
+   To select all variables, click the check box beside the column name,
+   Variable Type.
+#. Select one variable in the Selected Variables box, and then
+   click *Start*.
+   The existing name and label of the variable appear in the New
+   Variable Name and New Variable Label boxes.
+#. In the New Variable Label field, change the variable name to a unique
+   value that is not used in the data file.
+   The new variable label is optional.
+#. In the table below the Variable Name fields, you can check one or
+   more values to drop them from the subset, or enter new values,
+   labels, or ranges (as a condition) as needed. Click the Add
+   Value/Range button to create more entries in the value table.
+   Note: Click the ``?`` Info buttons to view tips on how to use the
+   Recode and Subset table. Also, See Tabular Data Recode and Subset
+   Tips for more information about adding values and ranges.
+#. Click the Apply Recodes button.
+   Your renamed variables appear at the bottom of the page in the List
+   of Recode Variables.
+#. Select another variable in the Selected Variables box, click the
+   Start button, and repeat the recode action.
+   Repeat this process for each variable that you choose to recode.
+#. To remove a recoded variable, scroll to the List of Recode Variables
+   at the bottom of the page and click the Remove link for the recoded
+   variable that you choose to delete from your subset.
+
+.. _perform-advanced-analysis:
+
+**Perform Advanced Analysis**
+
+When you run advanced statistical analysis for data, you can do any of
+the following with the analysis results:
+
+-  Open the results in a new window to save or print the results.
+-  Download the R workspace in which the statistics were analyzed, for
+   replication of the analysis. See Replicate Analysis for more
+   information.
+-  View citation information for the data analyzed, and for the full
+   data set from which you selected variables to analyze. See View
+   Citations for more information.
+
+To run statistical models for selected variables:
+
+#. In the Data File page, click the Advanced Statistical Analysis tab.
+#. Scroll down the screen and click the check boxes to select variables
+   from the table of available values. When you select a variable, it is
+   added to the Selected Variables box at the top of the tab.
+   To remove a variable from this box, deselect it from the Variable
+   Type list at the bottom of the screen.
+   To select all variables, click the check box beside the column name,
+   Variable Type.
+#. Select a model from the Choose a Statistical Model drop-down list.
+#. Select one variable in the Selected Variables box, and then click the
+   applicable arrow button to assign a function to that variable from
+   within the analysis model.
+   You see the name of the variables in the appropriate function box.
+   Note: Some functions allow a specific type of variable only, while
+   other functions allow multiple variable types. Types include
+   Character, Continuous, and Discrete. If you assign an incorrect
+   variable type to a function, you see an ``Incompatible type`` error
+   message.
+#. Repeat the variable and function assignments until your model is
+   complete.
+#. Select your Output options.
+#. Click the Run Model button.
+   If the statistical model that you defined is incomplete, you first
+   are prompted to correct the definition. Correct your model, and then
+   click Run Model again.
+   You see the Dataverse Analysis page.
+#. To save or print the results, scroll to the Advanced Statistical
+   Analysis section and click the link *Open results in a new window*.
+   You then can print or save the window contents.
+   To save the analysis, scroll to the Replication section and click the
+   button ``zipFile_<number>.zip``.
+   Review the Citation Information for the data set and for the subset
+   that you analyzed.
+#. Click the link *Back to Analysis and Subsetting* to return the
+   previous page and continue analysis of the data.
+
+**Replicate Analysis**
+
+You can save the R workspace in which the Dataverse Network performed an
+analysis. You can download the workspace as a zipped archive that
+contains four files. Together, these files enable you to recreate the
+subset analysis in another R environment:
+
+-  ``citationFile.<identifier>.txt`` - The citation for the subset that you analyzed.
+-  ``rhistoryFile.<identifier>.R`` - The R code used to perform the analysis.
+-  ``tempsubsetfile.<identifier>.tab`` - The R object file used to perform the analysis.
+-  ``tmpRWSfile.<identifier>.RData`` - The subset data that you analyzed.
+
+To download this workspace for your analysis:
+
+#. For any subset, Apply Descriptive Statistics or Perform Advanced
+   Analysis.
+#. On the Dataverse Analysis or Advanced Statistical Analysis page,
+   scroll to the Replication section and click the
+   button ``zipFile_<number>.zip``.
+#. Follow your browser's prompts to save the zipped archive.
+   When the archive file is saved to your local storage, extract the
+   contents to use the four files that compose the R workspace.
+
+**Statistical Analysis Models**
+
+You can apply any of the following advanced statistical models to all or
+some variables in a tabular data set:
+
+Categorical data analysis: Cross tabulation
+
+Ecological inference model: Hierarchical mulitnomial-direct ecological
+inference for R x C tables
+
+Event count models, for event count dependent variables:
+
+-  Negative binomial regression
+-  Poisson regression
+
+Models for continuous bounded dependent variables:
+
+-  Exponential regression for duration
+-  Gamma regression for continuous positives
+-  Log-normal regression for duration
+-  Weibull regression for duration
+
+Models for continuous dependent variables:
+
+-  Least squares regression
+-  Linear regression for left-censoreds
+
+Models for dichotomous dependent variables:
+
+-  Logistic regression for binaries
+-  Probit regression for binaries
+-  Rare events logistic regression for binaries
+
+Models for ordinal dependent variables:
+
+-  Ordinal logistic regression for ordered categoricals
+-  Ordinal probit regression for ordered categoricals
+
+**Tabular Data Recode and Subset Tips**
+
+Use the following guidelines when working with tabular data files:
+
+-  Recoding:
+
+   -  You must fill at least the first (new value) and last (condition)
+      columns of the table; the second column is optional and for a new
+      value label.
+   -  If the old variable you chose for recoding has information about
+      its value labels, you can prefill the table with these data for
+      convenience, and then modify these prefilled data.
+   -  To exclude a value from your recoding scheme, click the Drop check
+      box in the row for that value.
+
+-  Subsetting:
+
+   -  If the variable you chose for subsetting has information about its
+      value labels, you can prefill the table with these data for
+      convenience.
+   -  To exclude a value in the last column of the table, click the Drop
+      check box in row for that value.
+   -  To include a particular value or range, enter it in the last
+      column whose header shows the name of the variable for subsetting.
+
+-  Entering a value or range as a condition for subsetting or recoding:
+
+   -  Suppose the variable you chose for recoding is x.
+      If your condition is x==3, enter ``3``.
+      If your condition is x < -3, enter ``(--3``.
+      If your condition is x > -3, enter ``-3-)``.
+      If your condition is -3 < x < 3, enter ``(-3, 3)``.
+   -  Use square brackets (``[]``) for closed ranges.
+   -  You can enter non-overlapping values and ranges separated by a
+      comma, such as ``0,[7-9]``.
+
+.. _network-data:
+
+Network Data
+--------------
+
+Network data files (subsettable files) can be subsetted and analyzed
+online by using the Dataverse Network application. For analysis, the
+Dataverse Network offers generic network data analysis. A list of
+Network Analysis Models are provided.
+
+Note: All subsetting and analysis options for network data assume a
+network with undirected edges.
+
+After you find the network data set that you want, access the Subset and
+Analysis options to use the online tools. Then, you can subset data
+by *vertices* or *edges*, download subsets, and apply network
+measures.
+
+**Access Network Subset and Analyze Options**
+
+You can subset and analyze network data files before you download the
+file or your subsets. To access the Subset and Analysis options for a
+network data set:
+
+#. Click the title of the study from which you choose to analyze or
+   download a file or subset.
+#. Click the Documentation, Data and Analysis tab for the study.
+#. In the list of study files, locate the network data file that you
+   choose to download, subset, or analyze. You can download data sets
+   for a file only if the file entry includes the subset icon.
+#. Click the \ *Access Subset/Analysis* link associated with the
+   selected file. If prompted, check the \ *I accept* box and click
+   Continue to accept the Terms of Use.
+   You see the Data File page listing data for the file that you choose
+   to subset or analyze.
+
+**Subset Network Data**
+
+There are two ways in which you can subset network data. First, you can
+run a manual query, and build a query of specific values for edge or
+vertex data with which to subset the data. Or, you can select from among
+three automatically generated queries with which to subset the data:
+
+-  Largest graph - Subset the <nth> largest connected component of the
+   network. That is, the largest group of nodes that can reach one
+   another by walking across edges.
+-  Neighborhood - Subset the <nth> neighborhood of the selected
+   vertices. That is, generate a subgraph of the original network
+   composed of all vertices that are positioned at most <n> steps away
+   from the currently selected vertices in the original network, plus
+   all of the edges that connect them.
+
+You also can successively subset data to isolate specific values
+progressively.
+
+Continue to the next topics for detailed information about subsetting a
+network data set.
+
+**Subset Manually**
+
+Perform a manual query to slice a graph based on the attributes of its
+vertices or edges. You choose whether to subset the graph based on
+vertices or edges, then use the Manual Query Builder or free-text Query
+Workspace fields to construct a query based on that element's
+attributes. A single query can pertain only to vertices or only to
+edges, never both. You can perform separate, sequential vertex or edge
+queries.
+
+When you perform a vertex query, all vertices whose attributes do not
+satisfy the query are dropped from the graph, in addition to all edges
+that touch them. When you perform an edge query, all edges whose
+attributes do not satisfy the criteria are dropped, but all vertices
+remain *unless* you enable the *Eliminate disconnected vertices* check box. Note that enabling this option drops all
+disconnected vertices whether or not they were disconnected before the
+edge query.
+
+Review the Network Data Tips before you start work with a study's files.
+
+To subset variables within a network data set by using a manually
+defined query:
+
+#. In the Data File page, click the Manual Query radio button near the
+   top of the page.
+#. Use the Attribute Set drop-down list and select Vertex to subset by
+   node or vertex values.
+   Select Edge to subset by edge values.
+#. Build the first attribute selection value in the Manual Query Builder
+   panel:
+
+   #. Select a value in the Attributes list to assign values on which to
+      subset.
+   #. Use the Operators drop-down list to choose the function by which
+      to define attributes for selection in this query.
+   #. In the Values field, type the specific values to use for selection
+      of the attribute.
+   #. Click *Add to Query* to complete the attribute definition for
+      selection.
+      You see the query string for this attribute in the Query Workspace
+      field.
+
+   Alternatively, you can enter your query directly by typing it into
+   the Query Workspace field.
+
+#. Continue to add selection values to your query by using the Manual
+   Query Builder tools.
+#. To remove any verticies that do not connect with other data in the
+   set, check the \ *Eliminate disconnected vertices* check box.
+#. When you complete construction of your query string, click \ *Run* to
+   perform the query.
+#. Scroll to the bottom of the window, and when the query is processed
+   you see a new entry in the Subset History panel that defines your
+   query.
+
+Continue to build a successive subset or download a subset.
+
+**Subset Automatically**
+
+Peform an Automatic Query to select a subgraph of the nextwork based on
+structural properties of the network. Remember to review the Network
+Data Tips before you start work with a study's files.
+
+To subset variables within a network data set by using an automatically
+generated query:
+
+#. In the Data File page, click the Automatic Query radio button near
+   the middle of the page.
+#. Use the Function drop-down list and select the type of function with
+   which to select your subset:
+
+   -  Largest graph - Subset the <nth> largest group of nodes that can
+      reach one another by walking across edges.
+   -  Neighborhood - Generate a subgraph of the original network
+      composed of all vertices that are positioned at most <n> steps
+      away from the currently selected vertices in the original network,
+      plus all of the edges that connect them. This is the only query
+      that can (and generally does) increase the number of vertices and
+      edges selected.
+
+#. In the Nth field, enter the <nth> degree with which to select data
+   using that function.
+#. Click \ *Run* to perform the query.
+#. Scroll to the bottom of the window, and when the query is processed
+   you see a new entry in the Subset History panel that defines your
+   query.
+
+Continue to build a successive subset or download a subset.
+
+**Build or Restart Subsets**
+
+**Build a Subset**
+
+To build successive subsets and narrow your data selection
+progressively:
+
+#. Perform a manual or automatic subset query on a selected data set.
+#. Perform a second query to further narrow the results of your previous
+   subset activity.
+#. When you arrive at the subset with which you choose to work, continue
+   to analyze or download that subset.
+
+**Undo Previous Subset**
+
+You can reset, or undo, the most recent subsetting action for a data
+set. Note that you can do this only one time, and only to the most
+recent subset.
+
+Scroll to the Subset History panel at the bottom of the page and
+click \ *Undo* in the last row of the list of successive subsets.
+The last subset is removed, and the previous subset is available for
+downloading, further subsetting, or analysis.
+
+**Restart Subsetting**
+
+You can remove all subsetting activity and restore data to the original
+set.
+
+Scroll to the Subset History panel at the bottom of the page and
+click \ *Restart* in the row labeled \ *Initial State*.
+The data set is restored to the original condition, and is available
+for downloading, subsetting, or analysis.
+
+**Run Network Measures**
+
+When you finish selecting the specific data that you choose to analyze,
+run a Network Measure analysis on that data. Review the Network Data
+Tips before you start your analysis.
+
+#. In the Data File page, click the Network Measure radio button near
+   the bottom of the page.
+#. Use the Attributes drop-down list and select the type of analysis to
+   perform:
+
+   -  Page Rank - Determine how much influence comes from a specific
+      actor or node.
+   -  Degree - Determine the number of relationships or collaborations
+      exist within a network data set.
+   -  Unique Degree - Determine the number of collaborators that exist.
+   -  In Largest Component - Determine the largest component of a
+      network.
+   -  Bonacich Centrality - Determine the importance of a main actor or
+      node.
+
+#. In the Parameters field, enter the specific value with which to
+   subset data using that function:
+
+   -  Page Rank - Enter a value for the parameter <d>, a proportion,
+      between 0 and 1.
+   -  Degree - Enter the number of relationships to extract from a
+      network data set.
+   -  Unique Degree - Enter the number of unique relationships to
+      extract.
+   -  In Largest Component - Enter the number of components to extract
+      from a network data set, starting with the largest.
+
+#. Click *Run* to perform the analysis.
+#. Scroll to the bottom of the window, and when the analysis is
+   processed you see a new entry in the Subset History panel that
+   contains your analyzed data.
+
+Continue to download the analyzed subset.
+
+**Download Network Subsets or Measures**
+
+When you complete subsetting and analysis of a network data set, you can
+download the final set of data. Network data subsets are downloaded in a
+zip archive, which has the name ``subset_<original file name>.zip``.
+This archive contains three files:
+
+-  ``subset.xml`` - A GraphML formatted file that contains the final
+   subsetted or analyzed data.
+-  ``verticies.tab`` - A tabular file that contains all node data for
+   the final set.
+-  ``edges.tab`` - A tabular file that contains all relationship data
+   for the final set.
+
+Note: Each time you download a subset of a specific network data set, a
+zip archive is downloaded that has the same name. All three zipped files
+within that archive also have the same names. Be careful not to
+overwrite a downloaded data set that you choose to keep when you perform
+sucessive downloads.
+
+To download a final set of data:
+
+#. Scroll to the Subset History panel on the Data File page.
+#. Click *Download Latest Results* at the bottom of the history list.
+#. Follow your browser's prompts to open or save the data file to your
+   computer's disk drive. Be sure to save the file in a unique location
+   to prevent overwritting an existing downloaded data file.
+
+**Network Data Tips**
+
+Use these guidelines when subsetting or analyzing network data:
+
+-  For a Page rank network measure, the value for the parameter <d> is a
+   proportion and must be between 0 and 1. Higher values of <d> increase
+   dispersion, while values of <d> closer to zero produce a more uniform
+   distribution. PageRank is normalized so that all of the PageRanks sum
+   to 1.
+-  For a Bonacich Centrality network measure, the alpha parameter is a
+   proportion that must be between -1 and +1. It is normalized so that
+   all alpha centralities sum to 1.
+-  For a Bonacich Centrality network measure, the exo parameter must be
+   greater than 0. A higher value of exo produces a more uniform
+   distribution of centrality, while a lower value allows more
+   variation.
+-  For a Bonacich Centrality network measure, the original alpha
+   parameter of alpha centrality takes values only from -1/lambda to
+   1/lambda, where lambda is the largest eigenvalue of the adjacency
+   matrix. In this Dataverse Network implementation, the alpha parameter
+   is rescaled to be between -1 and 1 and represents the proportion of
+   1/lambda to be used in the calculation. Thus, entering alpha=1 sets
+   alpha to be 1/lambda. Entering alpha=0.5 sets alpha to be
+   1/(2\*lambda).
+
+Data Visualization
+==================
+
+Data Visualization allows contributors to make time series
+visualizations available to end users. These visualizations may be
+viewable and downloadable as graphs or data tables. Please see the
+appropriate guide for more information on setting up a visualization or
+viewing one.
+
+Explore Data
+--------------
+
+The study owner may make a data visualization interface available to
+those who can view a study.  This will allow you to select various data
+variables and see a time series graph or data table.  You will also be
+able to download your custom graph for use in your own reports or
+articles.
+
+The study owner will at least provide a list of data measures from which
+to choose.   These measures may be divided into types.  If they are you
+will be able to narrow the list of measures by first selecting a measure
+type.  Once you have selected a measure, if there are multiple variables
+associated with the measure you will be able to select one or more
+filters to uniquely identify a variable. By default any filter assigned
+to a variable will become the label associated with the variable in the
+graph or table.   By pressing the Add Line button you will add the
+selected variable to your custom graph.
+
+  |image0|
+
+Once you have added data to your graph you will be able to customize it
+further.  You will be given a choice of display options made available
+by the study owner.  These may include an interactive flash graph, a
+static image graph and a numerical data table.   You will also be
+allowed to edit the graph title, which by default is the name of the
+measure or measures selected. You may also edit the Source Label. 
+Other customizable features are the height and the legend location of
+the image graph.  You may also select a subset of the data by selecting
+the start and end points of the time series.  Finally, on the display
+tab you may opt to display the series as indices in which case a single
+data point known as the reference period will be designated as 100 and
+all other points of the series will be calculated relative to the
+reference period.  If you select data points that do not have units in
+common (i.e. one is in percent while the other is in dollars) then the
+display will automatically be set to indices with the earliest common
+data point as the default reference period.
+
+|image1| 
+
+On the Line Details tab you will see additional information on the data
+you have selected.  This may include links to outside web pages that
+further explain the data.  On this tab you will also be able to edit the
+label or delete the line from your custom graph.
+
+On the Export tab you will be given the opportunity to export your
+custom graph and/or data table.   If you select multiple files for
+download they will be bound together in a single zip file. 
+
+The Refresh button clears any data that you have added to your custom
+graph and resets all of the display options to their default values.
+
+Set Up
+--------
+
+This feature allows you to make time series visualizations available to
+your end users.   These visualizations may be viewable and downloadable
+as graphs or data tables.  In the current beta version of the feature
+your data file must be subsettable and must contain at least one date
+field and one or more measures.  You will be able to associate data
+fields from your file to a time variable and multiple measures and
+filters. 
+
+When you select Set Up Exploration from within a study, you must first
+select the file for which you would like to set up the exploration.  The
+list of files will include all subsettable data files within the study.
+
+Once you have selected a file you will go to a screen that has 5 tabs to
+guide you through the data visualization set-up. (In general, changes
+made to a visualization on the individual tabs are not saved to the
+database until the form’s Save button is pressed.  When you are in add
+or edit mode on a tab, the tab will have an update or cancel button to
+update the “working copy” of a visualization or cancel the current
+update.)
+
+If you have a previously set up an exploration for a data file you may copy that exploration to a new file. 
+When you select a file for set up you will be asked if you want to copy an exploration from another data file 
+and will be presented a list of files from which to choose.  Please note that the data variable names must 
+be identical in both files for this migration to work properly. 
+
+**Time Variable**
+
+On the first tab you select the time variable of your data file.  The
+variable list will only include those variables that are date or time
+variables.  These variables must contain a date in each row.  You may
+also enter a label in the box labeled Units.  This label will be
+displayed under the x-axis of the graph created by the end user.
+
+|image2|
+
+**Measures**
+
+On the Measures tab you may assign measures to the variables in your
+data file.  First you may customize the label that the end user will see
+for measures.  Next you may add measures by clicking the “Add Measure”
+link.  Once you click that link you must give your measure a unique
+name.  Then you may assign Units to it.  Units will be displayed as the
+y-axis label of any graph produced containing that measure.  In order to
+assist in the organizing of the measures you may create measure types
+and assign your measures to one or more measure types.  Finally, the
+list of variables for measures will include all those variables that are
+entered as numeric in your data file.  If you assign multiple variables
+to the same measure you will have to distinguish between them by
+assigning appropriate filters.   For the end user, the measure will be
+the default graph name.
+
+|image3|  
+
+**Filters**
+
+On the filters tab you may assign filters to the variables in your data
+file.  Generally filters contain demographic, geographic or other
+identifying information about the variables.  For a given group of
+filters only one filter may be assigned to a single variable.  The
+filters assigned to a variable must be sufficient to distinguish among
+the variables assigned to a single measure.   Similar to measures,
+filters may be assigned to one or more types.   For the end user the
+filter name will be the default label of the line of data added to a
+graph.
+
+|image4|
+
+| 
+
+**Sources**
+
+On the Sources tab you can indicate the source of each of the variables
+in your data file.  By default, the source will be displayed as a note
+below the x-axis labels.  You may assign a single source to any or all
+of your data variables.  You may also assign multiple sources to any of
+your data variables.
+
+|image5|
+
+| 
+
+**Display**
+
+On the Display tab you may customize what the end user sees in the Data
+Visualization interface.  Options include the data visualization formats
+made available to the end user and default view, the Measure Type label,
+and the Variable Info Label.
+
+| 
+|  |image6|  
+
+**Validate Button**
+
+When you press the “Validate” button the current state of your
+visualization data will be validated.  In order to pass validation your
+data must have one time variable defined.  There must also be at least
+one measure variable assigned.  If more than one variable is assigned to
+a given measure then filters must be assigned such that each single
+variable is defined by the measure and one or more filters.  If the data
+visualization does not pass validation a detailed error message
+enumerating the errors will be displayed.
+
+**Release Button**
+
+Once the data visualization has been validated you may release it to end
+users by pressing the “Release” button.  The release button will also
+perform a validation.  Invalid visualizations will not be released, but
+a detailed error message will not be produced. 
+
+**Save Button**
+
+The “Save” button will save any changes made to a visualization on the
+tabs to the database.   If a visualization has been released and changes
+are saved that would make it invalid the visualization will be set to
+“Unreleased”.
+
+**Exit Button**
+
+To exit the form press the “Exit” button.  You will be warned if you
+have made any unsaved changes.
+
+**Examples**
+
+Simplest case – a single measure associated with a single variable.
+
+Data variable contains information on average family income for all
+Americans.  The end user of the visualization will see an interface as
+below:
+
+|image7|
+
+Complex case - multiple measures and types along with multiple filters
+and filter types.  If you have measures related to both income and
+poverty rates you can set them up as measure types and associate the
+appropriate measures with each type.  Then, if you have variables
+associated with multiple demographic groups you can set them up as
+filters.  You can set up filter types such as age, gender, race and
+state of residence.  Some of your filters may belong to multiple types
+such as males age 18-34.
+
+|image8|
+
+.. |image0| image:: ./datausers-guides_files/measure_selected.png
+.. |image1| image:: ./datausers-guides_files/complex_graph_screenshot.png
+.. |image2| image:: ./datausers-guides_files/edittimevariablescreenshot.png
+.. |image3| image:: ./datausers-guides_files/editmeasuresscreenshot.png
+.. |image4| image:: ./datausers-guides_files/editfiltersscreenshot.png
+.. |image5| image:: ./datausers-guides_files/sourcetabscreenshot.png
+.. |image6| image:: ./datausers-guides_files/displaytabscreenshot.png
+.. |image7| image:: ./datausers-guides_files/simple_explore_data.png
+.. |image8| image:: ./datausers-guides_files/complex_exploration.png
+
+
+Dataverse Administration
+++++++++++++++++++++++++++++
+
+Once a user creates a dataverse becomes its owner and therefore is the
+administrator of that dataverse. The dataverse administrator has access
+to manage the settings described in this guide.
+
+Create a Dataverse
+=====================
+
+A dataverse is a container for studies and is the home for an individual
+scholar's or organization's data.
+
+Creating a dataverse is easy but first you must be a registered user.
+Depending on site policy, there may be a "Create a Dataverse" link on
+the Network home page. This first walks you through creating an account,
+then a dataverse. 
+
+1. Fill in the required information:
+
+ * **Type of Dataverse**: Choose Scholar if it represents an individual's work otherwise choose Basic.
+ * **Dataverse Name**: This will be displayed on the network and dataverse home pages. If this is a Scholar dataverse it will     automatically be filled in with the scholar's first and last name.
+ * **Dataverse Alias**: This is an abbreviation, usually lower-case, that becomes part of the URL for the new dataverse.
+
+  The required fields to create a dataverse are configurable in the Network Options, so fields that are required may also include
+  Affiliation, Network Home Page Description, and Classification.
+ 
+2. Click "Save" and you're done! An email will be sent to you with more information, including the URL to access you new dataverse.
+
+\*Required information can vary depending on site policy. Required fields are noted with a **red asterisk**.
+
+Edit General Settings
+=====================
+
+Use the General Settings tab on the Options page to release your
+dataverse, change the name, alias, and classification of your
+dataverse. The classifications are used to browse to your dataverse from
+the Network home page.
+
+Navigate to the General Settings from the Options page:
+
+Dataverse home page > Options page > Settings tab > General subtab
+
+To edit release your dataverse:
+
+Select *Released* from the drop-down list when your dataverse is ready
+to go public. Select *Not Released* if you wish to block public access
+to your dataverse.
+
+Your dataverse cannot be released if it does not contain any released
+studies. Create a study or define a collection with studies from other
+dataverses before you attempt to make your dataverse public.
+
+To edit the affiliation, name, or alias settings of your dataverse:
+
+If you edit a Scholar dataverse type, you can edit the following fields:
+
+-  First Name - Edit your first name, which appears with your last name
+   on the Network home page in the Scholar Dataverse group.
+-  Last Name - Edit your last name, which appears with your first name
+   on the Network home page in the Scholar Dataverse group.
+
+If you edit either Scholar or basic types, you can edit any of the
+following fields:
+
+-  Affiliation - Edit your institutional identity.
+-  Dataverse Name - Edit the title for your dataverse, which appears on
+   your dataverse home page. There are no naming restrictions.
+-  Dataverse Alias - Edit your dataverse's URL. Special characters
+   (~,\`, !, @, #, $, %, ^, &, and \*) and spaces are not allowed.
+   **Note**: if you change the Dataverse Alias field, the URL for your
+   Dataverse changes (http//.../dv/'alias'), which affects links to this
+   page.
+-  Network Home Page Description - Edit the text that appears beside the
+   name of your dataverse on the Network home page.
+-  Classification - Check the classifications, or groups, in which you
+   choose to include your dataverse. Remove the check for any
+   classifications that you choose not to join.
+
+.. _edit-layout-branding:
+
+Edit Layout Branding
+====================
+
+**Customize Layout Branding (header/footer) to match your website**
+
+The Layout Branding allows you to customize your dataverse, by
+**adding HTML to the default banner and footer**, such as that used on
+your personal website. If your website has such layout elements as a
+navigation menu or images, you can add them here. Each dataverse is
+created with a default customization added, which you can leave as is,
+edit to change the background color, or add your own customization.
+
+Navigate to the Layout Branding from the Options page:
+
+``Dataverse home page > Options page > Settings tab > Customization subtab``
+
+To edit the banner and footer of your dataverse:
+
+#. In the Custom Banner field, enter your plain text, and HTML to define
+   your custom banner.
+#. In the Custom Footer field, enter your plain text, and HTML to define
+   your custom footer.
+
+**Embed your Dataverse into your website (iframes)**
+
+Want to embed your Dataverse on an OpenScholar site? Follow :ref:`these special instructions <openscholar>`.
+
+For dataverse admins that are more advanced HTML developers, or that
+have HTML developers available to assist them, you can create a page on
+your site and add the dataverse with an iframe.
+
+1. Create a new page, that you will host on your site.
+2. Add the following HTML code to the content area of that new
+   page.
+   
+
+  | ``<script type="text/javascript">``
+  | ``var dvn_url = "[SAMPLE_ONLY_http://dvn.iq.harvard.edu/dvn/dv/sampleURL]";``
+  | ``var regexS = "[\\?&]dvn_subpage=([^&#]*)";``
+  | ``var regex = new RegExp( regexS );``
+  | ``var results = regex.exec( window.location.href );``
+  | ``if( results != null ) dvn_url = dvn_url + results[1];document.write('<iframe src="' + dvn_url + '"``        
+  | ``onLoad="set_dvn_url(this)" width="100%" height="600px" frameborder="0"``
+  | ``style="background-color:#FFFFFF;"></iframe>');``
+  | ``</script>``
+
+3. Edit that code by adding the URL of your dataverse (replace the
+   SAMPLE\_ONLY URL in the example, including the brackets “[ ]”), and
+   adjusting the height.  We suggest you keep the height at or under
+   600px in order to fit the iframe into browser windows on computer
+   monitor of all sizes, with various screen resolutions.
+#. The dataverse is set to have a min-width of 724px, so try give the
+   page a width closer to 800px.
+#. Once you have the page created on your site, with the iframe code, go
+   to the Setting tab, then the Customization subtab on your dataverse
+   Options page, and click the checkbox that disables customization for
+   your dataverse.
+#. Then enter the URL of the new page on your site. That will redirect
+   all users to the new page on your site.
+
+**Layout Branding Tips**
+
+-  HTML markup, including ``script`` tags for JavaScript, and ``style``
+   tags for an internal style sheet, are permitted. The ``html,``
+   ``head`` and ``body`` element tags are not allowed.
+-  When you use an internal style sheet to insert CSS into your
+   customization, it is important to avoid using universal ("``*``\ ")
+   and type ("``h1``\ ") selectors, because these can overwrite the
+   external style sheets that the dataverse is using, which can break
+   the layout, navigation or functionality in the app.
+-  When you link to files, such as images or pages on a web server
+   outside the network, be sure to use the full URL (e.g.
+   ``http://www.mypage.com/images/image.jpg``).
+-  If you recreate content from a website that uses frames to combine
+   content on the sides, top, or bottom, then you must substitute the
+   frames with ``table`` or ``div`` element types. You can open such an
+   element in the banner field and close it in the footer field.
+-  Each time you click "Save", your banner and footer automatically are
+   validated for HTML and other code errors. If an error message is
+   displayed, correct the error and then click "Save" again.
+-  You can use the banner or footer to house a link from your homepage
+   to your personal website. Be sure to wait until you release your
+   dataverse to the public before you add any links to another website.
+   And, be sure to link back from your website to your homepage.
+-  If you are using an OpenScholar or iframe site and the redirect is
+   not working, you can edit your branding settings by adding a flag to
+   your dataverse URL: disableCustomization=true. For example:
+   ``dvn.iq.harvard.edu/dvn/dv/mydv?disableCustomization=true``. To
+   reenable: ``dvn.iq.harvard.edu/dvn/dv/mydv?disableCustomization=false``.
+   Disabling the customization lasts for the length of the user session.
+
+Edit Description
+==================
+
+The Description is displayed on your dataverse Home page. Utilize this
+field to display announcements or messaging.
+
+Navigate to the Description from the Options page:
+
+``Dataverse home page > Options page > Settings tab > General subtab >Home Page Description``
+
+To change the content of this description:
+
+-  Enter your description or announcement text in the field provided.
+   Note: A light blue background in any form field indicates HTML,  JavaScript, and style tags are permitted. The  ``html,``, ``head`` and ``body`` element types are not allowed.
+
+Previous to the Version 3.0 release of the Dataverse Network, the
+Description had a character limit set at 1000, which would truncate
+longer description with a **more >>** link. This functionality has been
+removed, so that you can add as much text or code to that field as you
+wish. If you would like to add the character limit and truncate
+functionality back to your dataverse, just add this snippet of
+Javascript to the end of your description.
+
+
+ | ``<script type="text/javascript">``
+ |       ``jQuery(document).ready(function(){``
+ |           ``jQuery(".dvn\_hmpgMainMessage span").truncate({max\_length:1000});``
+ |      ``});``
+ | ``</script>``
+
+.. _edit-study-comments-settings:
+
+Edit Study Comments Settings
+============================
+
+You can enable or disable the Study User Comments feature in your
+dataverse. If you enable Study User Comments, any user has the option to
+add a comment to a study in this dataverse. By default, this feature is
+enabled in all new dataverses. Note that you should ensure there are
+terms of use at the network or dataverse level that define acceptable
+use of this feature if it is enabled.
+
+Navigate to the Study User Comments from the Options page:
+
+``Dataverse home page > Options page > Settings tab > General subtab >Allow Study Comments``
+
+A user must create an account in your dataverse to use the comment
+feature. When you enable this feature, be aware that new accounts will
+be created in your dataverse when users add comments to studies. In
+addition, the Report Abuse function in the comment feature is managed by
+the network admin. If a user reads a comment that might be
+inappropriate, that user can log in or register an account and access
+the Report Abuse option. Comments are reported as abuse to the network
+admin.
+
+To manage the Study User Comments feature in your dataverse:
+
+-  Click the "Allow Study Comments" check box to enable comments.
+-  Click the checked box to remove the check and disable comments.
+
+Manage E-Mail Notifications
+===========================
+
+You can edit the e-mail address used on your dataverse’s Contact Us page
+and by the network when sending notifications on processes and errors.
+By default, the e-mail address used is from the user account of the
+dataverse creator.
+
+Navigate to the E-Mail Notifications from the Options page:
+
+``Dataverse home page > Options page > Settings tab > General subtab >E-Mail Address(es)``
+
+To edit the contact and notification e-mail address for your dataverse:
+
+-  Enter one or more e-mail addresses in the **E-Mail Address** field.
+   Provide the addresses of users who you choose to receive notification
+   when contacted from this dataverse. Any time a user submits a request
+   through your dataverse, including the Request to Contribute link and
+   the Contact Us page, e-mail is sent to all addresses that you enter
+   in this field. Separate each address from others with a comma. Do not
+   add any spaces between addresses.
+
+Add Fields to Search Results
+============================
+
+Your dataverse includes the network's search and browse features to
+assist your visitors in locating the data that they need. By default,
+the Cataloging Information fields that appear in the search results or
+in studies' listings include the following: study title, authors, ID,
+production date, and abstract. You can customize other Cataloging
+Information fields to appear in search result listings after the default
+fields. Additional fields appear only if they are populated for the
+study.
+
+Navigate to the Search Results Fields from the Options page:
+
+``Dataverse home page > Options page > Settings tab > Customization subtab > Search Results Fields``
+
+To add more Cataloging Information fields listed in the Search or Browse
+panels:
+
+-  Click the check box beside any of the following Cataloging
+   Information fields to include them in your results pages: Production
+   Date, Producer, Distribution Date, Distributor, Replication For,
+   Related Publications, Related Material, and Related Studies.
+
+Note: These settings apply to your dataverse only.
+
+Set Default Study Listing Sort Order
+====================================
+
+Use the drop-down menu to set the default sort order of studies on the
+Study Listing page. By default, they are sorted by Global ID, but you
+can also sort by Title, Last Released, Production Date, or Download
+Count.
+
+Navigate to the Default Study Listing Sort Order from the Options page:
+
+``Dataverse home page > Options page > Settings tab > Customization subtab > Default Sort Order``
+
+Enable Twitter
+==============
+
+If your Dataverse Network has been configured for Automatic Tweeting,
+you will see an option listed as "Enable Twitter." When you click this,
+you will be redirected to Twtter to authorize the Dataverse Network
+application to send tweets for you.
+
+Once authorized, tweets will be sent for each new study or study version
+that is released.
+
+To disable Automatic Tweeting, go to the Options page, and click
+"Disable Twitter."
+
+Navigate to Enable Twitter from the Options page:
+
+``Dataverse home page > Options page > Settings tab > Promote Your Dataverse subtab > Sync Dataverse With Twitter``
+
+Get Code for Dataverse Link or Search Box
+=========================================
+
+Add a dataverse promotional link or dataverse search box on your
+personal website by copying the code for one of the sample links on this
+page, and then pasting it anywhere on your website to create the link.
+
+Navigate to the Code for Dataverse Link or Search Box from the Options
+page:
+
+``Dataverse home page > Options page > Settings tab > Promote Your Dataverse subtab``
+
+Edit Terms for Study Creation
+=============================
+
+You can set up Terms of Use for the dataverse that require users to
+acknowledge your terms and click "Accept" before they can contribute to
+the dataverse.
+
+Navigate to the Terms for Study Creation from the Options page:
+
+``Dataverse home page > Options page > Permissions tab > Terms subtab > Deposit Terms of Use``
+
+To set Terms of Use for creating or uploading to the dataverse:
+
+#. Click the Enable Terms of Use check box.
+#. Enter a description of your terms to which visitors must agree before
+   they can create a study or upload a file to an existing study.
+   Note: A light blue background in any form field indicates HTML,
+   JavaScript, and style tags are permitted. The ``html`` and ``body``
+   element types are not allowed.
+
+Edit Terms for File Download
+============================
+
+You can set up Terms of Use for the network that require users to
+acknowledge your terms and click "Accept" before they can download or
+subset contents from the network.
+
+Navigate to the Terms for File Download from the Options page:
+
+``Dataverse home page > Options page > Permissions tab > Terms subtab > Download Terms of Use``
+
+To set Terms of Use for downloading or subsetting contents from any
+dataverse in the network:
+
+#. Click the Enable Terms of Use check box.
+#. Enter a description of your terms to which visitors must agree before
+   they can download or analyze any file.
+   Note: A light blue background in any form field indicates HTML,
+   JavaScript, and style tags are permitted. The ``html`` and ``body``
+   element types are not allowed.
+
+Manage Permissions
+==================
+
+Enable contribution invitation, grant permissions to users and groups,
+and manage dataverse file permissions.
+
+Navigate to Manage Permissions from the Options page:
+
+``Dataverse home page > Options page > Permissions tab > Permissions subtab``
+
+**Contribution Settings**
+
+Choose the access level contributors have to your dataverse. Whether
+they are allowed to edit only their own studies, all studies, or whether
+all registered users can edit their own studies (Open dataverse) or all
+studies (Wiki dataverse). In an Open dataverse, users can add studies by
+simply creating an account, and can edit their own studies any time,
+even after the study is released. In a Wiki dataverse, users cannot only
+add studies by creating an account, but also edit any study in that
+dataverse. Contributors cannot, however, release a study directly. After
+their edits, they submit it for review and a dataverse administrator or
+curator will release it.
+
+**User Permission Settings**
+
+There are several roles defined for users of a Dataverse Network
+installation:
+
+-  Data Users - Download and analyze all types of data
+-  Contributors - Distribute data and receive recognition and citations
+   to it
+-  Curators - Summarize related data, organize data, or manage multiple
+   sets of data
+-  Administrators - Set up and manage contributions to your dataverse,
+   manage the appearance of your dataverse, organize your dataverse
+   collections
+
+**Privileged Groups**
+
+Enter group name to allow a group access to the dataverse. Groups are
+created by network administrators.
+
+**Dataverse File Permission Settings**
+
+Choose 'Yes' to restrict ALL files in this dataverse. To restrict files
+individually, go to the Study Permissions page of the study containing
+the file.
+
+Create User Account
+===================
+
+As a registered user, you can:
+
+-  Add studies to open and wiki dataverses, if available
+-  Contribute to existing studies in wiki dataverses, if available
+-  Add user comments to studies that have this option
+-  Create your own dataverse
+
+**Network Admin Level**
+
+Navigate to Create User Account from the Options page:
+
+``Network home page > Options page > Permissions tab > Users subtab > Create User link``
+
+To create an account for a new user in your Network:
+
+#. Complete the account information page.
+    Enter values in all required fields. Note: an email address can also be used as a username
+#. Click Create Account to save your entries.
+#. Go to the Permissions tab on the Options page to give the user
+   Contributor, Curator or Admin access to your dataverse.
+   
+**Dataverse Admin Level**
+
+Navigate to Create User Account from the Options page:
+
+``Dataverse home page > Options page > Permissions tab > Permissions subtab > Create User link``
+
+To create an account for a new user in your Dataverse:
+
+#. Complete the account information page.
+    Enter values in all required fields. Note: an email address can also be used as a username
+#. Click Create Account to save your entries.
+#. Go to the Permissions tab on the Options page to give the user
+   Contributor, Curator or Admin access to your dataverse.
+   
+
+**New User: Network Homepage**
+
+As a new user, to create an account at the **Dataverse Network homepage**, select "Create Account" 
+at the top-right hand side of the page.
+
+Complete the required information denoted by the red asterisk and save.
+
+**New User: Dataverse Level**
+
+As a new user, to create an account at the **Dataverse level**, select "Create Account" 
+at the top-right hand side of the page. Note: For Open Dataverses select "Create Account" in the orange box 
+on the top right hand side of the page labelled: "OPEN DATAVERSE".
+
+Complete the required information denoted by the red asterisk and save.
+
+Download Tracking Data
+======================
+
+You can view any guestbook responses that have been made in your
+dataverse. Beginning with version 3.2 of Dataverse Network, if the
+guestbook is not enabled, data will be collected silently based on the
+logged-in user or anonymously. The data displayed includes user account
+data or the session ID of an anonymous user, the global ID, study title
+and file name of the file downloaded, the time of the download, the type
+of download and any custom questions that have been answered. The
+username/session ID and download type were not collected in the 3.1
+version of Dataverse Network. A comma separated values file of all
+download tracking data may be downloaded by clicking the Export Results
+button.
+
+Navigate to the Download Tracking Data from the Options page:
+
+``Dataverse home page > Options page > Permissions tab > Download Tracking Data subtab``
+
+Edit File Download Guestbook
+============================
+
+You can set up a guestbook for your dataverse to collect information on
+all users before they can download or subset contents from the
+dataverse. The guestbook is independent of Terms of Use. Once it has
+been enabled it will be shown to any user for the first file a user
+downloads from a given study within a single session. If the user
+downloads additional files from the study in the same session a record
+will be created in the guestbook response table using data previously
+entered. Beginning with version 3.2 of Dataverse Network, if the
+dataverse guestbook is not enabled in your dataverse, download
+information will be collected silently based on logged-in user
+information or session ID.
+
+Navigate to the File Download Guestbook from the Options page:
+
+``Dataverse home page > Options page > Permissions tab > Guestbook subtab``
+
+To set up a Guestbook for downloading or subsetting contents from any study in the dataverse:
+
+#. Click the Enable File Download Guestbook check box.
+#. Select or unselect required for any of the user account identifying
+   data points (First and last name, E-Mail address, etc.)
+#. Add any custom questions to collect additional data. These questions
+   may be marked as required and set up as free text responses or
+   multiple choice. For multiple choice responses select Radio Buttons
+   as the Custom Field Type and enter the possible answers.
+#. Any custom question may be removed at any time, so that it won’t show
+   for the end user. If there are any responses associated with question
+   that has been removed they will continue to appear in the Guestbook
+   Response data table.
+
+.. _openscholar:
+
+OpenScholar
+===========
+
+**Embed your Dataverse easily on an OpenScholar site**
+
+Dataverse integrates seamlessly with
+`OpenScholar <http://openscholar.harvard.edu/>`__, a self-service site builder for higher education.
+
+To embed your dataverse on an OpenScholar site:
+
+#. On your Dataverse Options page, Go to the Setting tab
+#. Go to the Customization subtab
+#. Click the checkbox that disables customization for your dataverse
+#. Make note of your Dataverse alias URL (i.e.
+   `http://thedata.harvard.edu/dvn/dv/myvalue <http://thedata.harvard.edu/dvn/dv/myvalue>`__)
+#. Follow the `OpenScholar Support Center
+   instructions <http://support.openscholar.harvard.edu/customer/portal/articles/1215076-apps-dataverse>`__ to
+   enable the Dataverse App
+
+.. _enabling-lockss-access-to-the-dataverse:
+
+Enabling LOCKSS access to the Dataverse
+=======================================
+
+**Summary:**
+
+`LOCKSS Project <http://lockss.stanford.edu/lockss/Home>`__ or *Lots
+of Copies Keeps Stuff Safe* is an international initiative based at
+Stanford University Libraries that provides a way to inexpensively
+collect and preserve copies of authorized e-content. It does so using an
+open source, peer-to-peer, decentralized server infrastructure. In order
+to make a LOCKSS server crawl, collect and preserve content from a DVN,
+both the server (the LOCKSS daemon) and the client (the DVN) sides must
+be properly configured. In simple terms, the LOCKSS server needs to be
+pointed at the DVN, given its location and instructions on what to
+crawl, the entire network, or a particular Dataverse; on the DVN side,
+access to the data must be authorized for the LOCKSS daemon. The section
+below describes the configuration tasks that the administrator of a
+Dataverse will need to do on the client side. It does not describe how
+LOCKSS works and what it does in general; it's a fairly complex system,
+so please refer to the documentation on the `LOCKSS
+Project <http://lockss.stanford.edu/lockss/Home>`__\  site for more
+information. Some information intended to a LOCKSS server administrator
+is available in the :ref:`"Using LOCKSS with DVN"
+<using-lockss-with-dvn>` of the :ref:`DVN Installers Guide <introduction>`
+(our primary sysadmin-level manual).
+
+**Configuration Tasks:**
+
+In order for a LOCKSS server to access, crawl and preserve any data on a
+given Dataverse Network, it needs to be granted an authorization by the
+network administrator. (In other words, an owner of a dataverse cannot
+authorize LOCKSS access to its files, unless LOCKSS access is configured
+on the Dataverse Network level). By default, LOCKSS crawling of the
+Dataverse Network is not allowed; check with the administrator of
+your Dataverse Network for details. 
+
+But if enabled on the Dataverse Network level, the dataverse owner can
+further restrict LOCKSS access. For example, if on the network level all
+LOCKSS servers are allowed to crawl all publicly available data, the
+owner can limit access to the materials published in his or her
+dataverse to select servers only; specified by network address or
+domain.
+
+In order to configure LOCKSS access, navigate to the Advanced tab on the
+Options page:
+
+``Dataverse home page > Options page > Settings tab > Advanced subtab``
+
+It's important to understand that when a LOCKSS daemon is authorized to
+"crawl restricted files", this does not by itself grant the actual
+access to the materials! This setting only specifies that the daemon
+should not be skipping such restricted materials outright. If it is
+indeed desired to have non-public materials collected and preserved by
+LOCKSS, in addition to selecting this option, it will be the
+responsibility of the DV Administrator to give the LOCKSS daemon
+permission to actually access the files. As of DVN version 3.3, this can
+only be done based on the IP address of the LOCKSS server (by creating
+an IP-based user group with the appropriate permissions).
+
+Once LOCKSS crawling of the Dataverse is enabled, the Manifest page
+URL will be
+
+``http``\ ``://<YOUR SERVER>/dvn/dv/<DV ALIAS>/faces/ManifestPage.xhtml``.
+
+
+Study and Data Administration
++++++++++++++++++++++++++++++
+
+Study Options are available for Contributors, Curators, and
+Administrators of a Dataverse.
+
+Create New Study
+====================
+
+Brief instructions for creating a study:
+
+Navigate to the dataverse in which you want to create a study, then
+click Options->Create New Study.
+
+Enter at minimum a study title and click Save. Your draft study is now
+created. Add additional cataloging information and upload files as
+needed. Release the study when ready to make it viewable by others.
+
+**Data Citation widget**
+
+At the top of the edit study form, there is a data citation widget that 
+allows a user to quickly enter fields that appear in the data citation, 
+ie. title, author, date, distributor Otherwise, the information can be 
+entered as the fields appear in the data entry form.
+
+See the information below for more details and recommendations for
+creating a study.
+
+**Steps to Create a Study**
+
+#. Enter Cataloging Information, including an abstract of the study.
+   Set Terms of Use for the study in the Cataloging fields, if you choose.
+#. Upload files associated with the study.
+#. Set permissions to access the study, all of the study files, or some
+   of the study files.
+#. Delete your study if you choose, before you submit it for review.
+#. Submit your study for review, to make it available to the public.
+
+There are several guidelines to creating a study:
+
+-  You must create a study by performing steps in the specified order.
+-  If multiple users edit a study at one time, the first user to click
+   Save assumes control of the file. Only that user's changes are
+   effective.
+-  When you save the study, any changes that you make after that do not
+   effect the study's citation.
+
+**Enter Cataloging Information**
+
+To enter the Cataloging Information for a new study:
+
+#. Prepopulate Cataloging Information fields based on a study template
+   (if a template is available), use the Select Study Template pull-down
+   list to select the appropriate template.
+
+   A template provides default values for basic fields in the
+   Cataloging Information fields. The default template prepopulates the
+   Deposit Date field only.
+#. Enter a title in the Title field.
+#. Enter data in the remaining Cataloging Information fields.
+   To list all fields, including the Terms of Use fields, click the Show
+   All Fields button after you enter a title. Use the following
+   guidelines to complete these fields:
+
+   -  A light blue background in any form field indicates that HTML,
+      JavaScript, and style tags are permitted. You cannot use the
+      ``html`` and ``body`` element types.
+   -  To use the inline help and view information about a field, roll
+      your cursor over the field title.
+   -  Be sure to complete the Abstract field.
+   -  To set Terms of Use for your study, scroll to the bottom of the Cataloging Information tab.
+      Eight fields appear under the Terms of Use label. You must
+      complete at least one of these fields to enable Terms for this
+      study.
+#. Click the *Save* button and then add comments or a brief description
+   in the Study Version Notes popup. Then click the *Continue* button
+   and your study draft version is saved.
+
+**Upload Study Files**
+
+To upload files associated with a new study:
+
+#. For each file that you choose to upload to your study, first select
+   the Data Type from the drop-down list. Then click the Browse button
+   to select the file, and then click Upload to add each file at a time.
+   
+   When selecting a CSV (character-separated values) data type, an SPSS Control Card file is first required.
+
+   When selecting a TAB (tab-delimited) data type, a DDI Control Card file is first required. There is no restriction to the number or types of files that you can upload to the Dataverse Network. 
+   
+
+   There is a maximum file size of 2 gigabytes for each file that you upload.
+
+#. After you upload one file, enter the type of file in the *Category*
+   field and then click Save.
+   If you do not enter a category and click Save, the Category
+   drop-down list does not have any value. You can create any category
+   to add to this list.
+#. For each file that you upload, first click the check box in front of
+   the file's entry in the list, and then use the Category drop-down
+   list to select the type of file that you uploaded. 
+
+   Every checked file is assigned the category that you select. Be sure
+   to click the checked box to remove the check before you select a new
+   value in the Category list for another file.
+#. In the Description field, enter a brief message that identifies the
+   contents of your file.
+#. Click Save when you are finished uploading files. **Note:** If you upload a subsettable file, that process takes a few
+   moments to complete. During the upload, the study is not available for editing. When you receive e-mail notification that the
+   subsettable file upload is complete, click *Refresh* to continue editing the study.
+   
+   You see the Documentation, Data and Analysis tab of the study page
+   with a list of the uploaded files. For each *subsettable tabular*
+   data set file that you upload, the number of cases and variables and
+   a link to the Data Citation information for that data set are
+   displayed. If you uploaded an SPSS (``.sav`` or ``.por``) file, the
+   Type for that file is changed to *Tab delimited* and the file
+   extension is changed to ``.tab`` when you click Save.
+   
+   For each *subsettable network* data set file that you upload, the number of edges and verticies and a link to the Data Citation
+   information for that data set are displayed.
+#. Continue to the next step and set file permissions for the study or
+   its files.
+
+**Study File Tips**
+
+
+Keep in mind these tips when uploading study files to your dataverse:
+
+-  The following subsettable file types are supported:
+
+   -  SPSS ``sav`` and ``por`` - Versions 7.x to 20.x (See the :ref:`Note on SPSS ingest <spss-datafile-ingest>` in the Appendix)
+   -  STATA ``dta`` - Versions 4 to 12
+   -  R ``RData`` - All versions (NEW as of DVN v.3.5! See :ref:`Ingest of R data files <r-datafile-ingest>` in the Appendix)
+   -  GraphML ``xml`` - All versions
+   -  CSV data file with a :ref:`control card <controlcard-datafile-ingest>`
+   -  TAB-delimited data file with a :ref:`DDI XML control card <ddixml-datafile-ingest>` 
+
+- A custom ingest for FITS Astronomical data files has been added in v.3.4. (see :ref:`FITS File format Ingest <fits-datafile-ingest>` in the Appendix)
+
+-  You can add information for each file, including:
+
+   -  File name
+   -  Category (documentation or data)
+   -  Description
+
+-  If you upload the wrong file, click the Remove link before you click
+   Save.
+   To replace a file after you upload it and save the study, first
+   remove the file and then upload a new one.
+-  If you upload a STATA (``.dta``), SPSS (``.sav`` or ``.por``), or
+   network (``.xml``) file, the file automatically becomes subsettable
+   (that is, subset and analysis tools are available for that file in
+   the Network). In this case, processing the file might take some time
+   and you will not see the file listed immediately after you click
+   Save.
+-  When you upload a *subsettable* data file, you are prompted to
+   provide or confirm your e-mail address for notifications. One e-mail
+   lets you know that the file upload is in progress; a second e-mail
+   notifies you when the file upload is complete.
+-  While the upload of the files takes place, your study is not
+   available for editing. When you receive e-mail notification that the
+   upload is completed, click *Refresh* to continue editing the study.
+
+**Set Study and File Permissions**
+
+You can restrict access to a study, all of its files, or some of its
+files. This restriction extends to the search and browse functions.
+
+To permit or restrict access:
+
+#. On the study page, click the Permissions link.
+#. To set permissions for the study:
+
+   A. Scroll to the Entire Study Permission Settings panel, and click
+      the drop-down list to change the study to Restricted or Public.
+   #. In the *User Restricted Study Settings* field, enter a user or
+      group to whom you choose to grant access to the study, then click
+      Add.
+
+   To enable a request for access to restricted files in the study,
+   scroll to the File Permission Settings panel, and click the
+   Restricted File Settings check box. This supplies a request link on
+   the Data, Documentation and Analysis tab for users to request access
+   to restricted files by creating an account.
+
+
+   To set permission for individual files in the study:
+
+   A. Scroll to the Individual File Permission Settings panel, and enter
+      a user or group in the Restricted File User Access *Username*
+      field to grant permissions to one or more individual files.
+   #. Use the File Permission pull-down list and select the permission
+      level that you choose to apply to selected files: Restricted or
+      Public.
+   #. In the list of files, click the check box for each file to which
+      you choose to apply permissions. 
+      To select all files, click the check box at the top of the list.
+   #. Click Update. 
+      The users or groups to which you granted access privileges appear
+      in the File Permissions list after the selected files.
+
+Note: You can edit or delete your study if you choose, but only until
+you submit the study for reveiw. After you submit your study for review,
+you cannot edit or delete it from the dataverse.
+
+
+**Delete Studies**
+
+You can delete a study that you contribute, but only until you submit
+that study for review. After you submit your study for review, you
+cannot delete it from the dataverse.
+
+If a study is no longer valid, it can now be deaccessioned so it's
+unavailable to users but still has a working citation. A reference to a
+new study can be provided when deaccessioning a study. Only Network
+Administrators can now permanently delete a study once it has been
+released.
+
+To delete a draft version:
+
+#. Click the Delete Draft Version link in the top-right area of the
+   study page.
+
+   You see the Delete Draft Study Version popup.
+#. Click the Delete button to remove the draft study version from the
+   dataverse.
+
+To deaccession a study:
+
+#. Click the Deaccession link in the top-right area of the study page.
+    You see the Deaccession Study page.
+#. You have the option to add your comments about why the study was
+   deaccessioned, and a link reference to a new study by including the
+   Global ID of the study.
+#. Click the Deaccession button to remove your study from the
+   dataverse.
+
+**Submit Study for Review**
+
+When you finish setting options for your study, click *Submit For
+Review* in the top-right corner of the study page. The page study
+version changes to show *In Review*.
+
+You receive e-mail after you click *Submit For Review*, notifying you
+that your study was submitted for review by the Curator or Dataverse
+Admin. When a study is in review, it is not available to the public. You
+receive another e-mail notifying you when your study is released for
+public use.
+
+After your study is reviewed and released, it is made available to the
+public, and it is included in the search and browse functions. The
+Cataloging Information tab for your study contains the Citation
+Information for the complete study. The Documentation, Data and Analysis
+tab lists the files associated with the study. For each subsettable file
+in the study, a link is available to show the Data Citation for that
+specific data set.
+
+
+**UNF Calculation**
+
+When a study is created, a UNF is calculated for each subsettable file
+uploaded to that study. All subsettable file UNFs then are combined to
+create another UNF for the study. If you edit a study and upload new
+subsettable files, a new UNF is calculated for the new files and for the
+study.
+
+If the original study was created before version 2.0 of the Dataverse
+Network software, the UNF calculations were performed using version 3 of
+that standard. If you upload new subsettable files to an existing study
+after implementation of version 2.0 of the software, the UNFs are
+recalculated for all subsettable files and for the study using version 5
+of that standard. This prevents incompatibility of UNF version numbers
+within a study.
+
+Manage Studies
+==================
+
+You can find all studies that you uploaded to the dataverse, or that
+were submitted by a Contributor for review. Giving you access to view,
+edit, release, or delete studies.
+
+
+**View, Edit, and Delete/Deaccession Studies**
+
+To view and edit studies that you uploaded:
+
+#. Click a study Global ID, title, or *Edit* link to go to the study
+   page.
+#. From the study page, do any of the following:
+
+   -  Edit Cataloging Information
+   -  Edit/Delete File + Information
+   -  Add File(s)
+   -  Edit Study Version Notes
+   -  Permissions
+   -  Create Study Template
+   -  Release
+   -  Deaccession
+   -  Destroy Study
+
+To delete or deaccession studies that you uploaded:
+
+#. If the study has not been released, click the *Delete* link to open
+   the Delete Draft Study Version popup.
+#. If the study has been released, click the *Deaccession* link to open
+   the Deaccession Study page.
+#. Add your comments about why the study was deaccessioned, and a
+   reference link to another study by including the Global ID, then
+   click the *Deaccession* button.
+
+**Release Studies**
+
+When you release a study, you make it available to the public. Users can
+browse it or search for it from the dataverse or Network homepage.
+
+You receive e-mail notification when a Contributor submits a study for
+review. You must review each study submitted to you and release that
+study to the public. You receive a second e-mail notification after you
+release a study.
+
+To release a study draft version:
+
+#. Review the study draft version by clicking the Global ID, or title,
+   to go to the Study Page, then click Release in the upper right
+   corner. For a quick release, click *Release* from the Manage Studies
+   page.
+#. If the study draft version is an edit of an existing study, you will
+   see the Study Version Differences page. The table allows you to view
+   the changes compared to the current public version of the study.
+   Click the *Release* button to continue.
+#. Add comments or a brief description in the Study Version Notes popup.
+   Then click the *Continue* button and your study is now public.
+
+Manage Study Templates
+======================
+
+You can set up study templates for a dataverse to prepopulate any of
+the Cataloging Information fields of a new study with default values.
+When a user adds a new study, that user can select a template to fill in
+the defaults.
+
+
+**Create Template**
+
+Study templates help to reduce the work needed to add a study, and to
+apply consistency to studies within a dataverse. For example, you can
+create a template to include the Distributor and Contact details so that
+every study has the same values for that metadata.
+
+To create a new study template:
+
+#. Click Clone on any Template.
+#. You see the Study Template page.
+#. In the Template Name field, enter a descriptive name for this
+   template.
+#. Enter generic information in any of the Cataloging Information
+   metadata fields.  You may also change the input level of any field to
+   make a certain field required, recommended, optional or hidden.
+    Hidden fields will not be visible to the user creating studies from
+   the template.
+#. After you complete entry of generic details in the fields that you
+   choose to prepopulate for new studies, click Save to create the
+   template.
+
+Note: You also can create a template directly from the study page to
+use that study's Cataloging Information in the template.
+
+
+**Enable a template**
+
+Click the Enabled link for the given template. Enabled templates are
+available to end users for creating studies.
+
+
+**Edit Template**
+
+To edit an existing study template:
+
+#. In the list of templates, click the Edit link for the template that
+   you choose to edit.
+#. You see the Study Template page, with the template setup that you
+   selected.
+#. Edit the template fields that you choose to change, add, or remove.
+
+Note: You cannot edit any Network Level Template.
+
+
+**Make a Template the Default**
+
+To set any study template as the default template that applies
+automatically to new studies:
+In the list of templates, click the Make Default link next to the name
+of the template that you choose to set as the default.
+| The Current Default Template label is displayed next to the name of
+the template that you set as the default.
+
+| **Remove Template**
+| To delete a study template from a dataverse:
+
+#. In the list of templates, click the Delete link for the template that
+   you choose to remove from the dataverse.
+#. You see the Delete Template page.
+#. Click Delete to remove the template from the dataverse.
+
+Note:  You cannot delete any network template, default template or
+template in use by any study.
+
+Data Uploads
+================
+
+**Troubleshooting Data Uploads:**
+
+Though the add files page works for the majority of our users, there can
+be situations where uploading files does not work. Below are some
+troubleshooting tips, including situations where uploading a file might
+fail and things to try.
+
+
+**Situations where uploading a file might fail:**
+
+#. File is too large, larger than the maximum size, should fail immediately with an error.
+#. File takes too long and connection times out (currently this seems to happen after 5 mins) Failure behavior is vague, depends             
+   on browser. This is probably an IceFaces issue.
+#. User is going through a web proxy or firewall that is not passing through partial submit headers. There is specific failure  
+   behavior here that can be checked and it would also affect other web site functionality such as create account link. See
+   redmine ticket `#2352 <https://redmine.hmdc.harvard.edu/issues/2532>`__.
+#. AddFilesPage times out, user begins adding files and just sits there idle for a long while until the page times out, should
+   see the red circle slash.
+#. For subsettable files, there is something wrong with the file
+   itself and so is not ingested. In these cases they should upload as other and we can test here.
+#. For subsettable files, there is something wrong with our ingest code that can't process something about that particular file,    
+   format, version.
+#. There is a browser specific issue that is either a bug in our
+   software that hasn't been discovered or it is something unique to their browser such as security settings or a conflict with a
+   browser plugin like developer tools. Trying a different browser such as Firefox or Chrome would be a good step.
+#. There is a computer or network specific issue that we can't determine such as a firewall, proxy, NAT, upload versus download
+   speed, etc. Trying a different computer at a different location might be a good step.
+#. They are uploading a really large subsettable file or many files and it is taking a really long time to upload.
+#. There is something wrong with our server such as it not responding.
+#. Using IE 8, if you add 2 text or pdf files in a row it won't upload but if you add singly or also add a subsettable file they
+   all work. Known issue, reported previously, `#2367 <https://redmine.hmdc.harvard.edu/issues/2367>`__
+
+
+**So, general information that would be good to get and things to try would be:**
+
+#. Have you ever been able to upload a file?
+#. Does a small text file work?
+#. Which browser and operating system are you using? Can you try Firefox or Chrome?
+#. Does the problem affect some files or all files? If some files, do they work one at a time? Are they all the same type such as
+   Stata or SPSS? Which version? Can they be saved as a supported version, e.g. Stata 12 or SPSS 20? Upload them as type "other"
+   and we'll test here.
+#. Can you try a different computer at a different location?
+#. Last, we'll try uploading it for you (may need DropBox to facilitate upload).
+
+.. _manage-collections:
+
+Manage Collections
+===================
+
+Collections can contain studies from your own dataverse or another,
+public dataverse in the Network.
+
+
+**Create Collection**
+
+You can create new collections in your dataverse, but any new collection
+is a child of the root collection except for Collection Links. When you
+create a child in the root collection, you also can create a child
+within that child to make a nested organization of collections. The root
+collection remains the top-level parent to all collections that are not
+linked from another dataverse.
+
+There are three ways in which you can create a collection:
+
+-  Static collection - You assign specific studies to this type of
+   collection.
+-  Dynamic collection - You can create a query that gathers studies into
+   a collection based on matching criteria, and keep the contents
+   current. If a study matches the query selection criteria one week,
+   then is changed and no longer matches the criteria, that study is
+   only a member of the collection as long as it's criteria matches the
+   query.
+-  Linked collection - You can link an existing collection from another
+   dataverse to your dataverse homepage. Note that the contents of that
+   collection can be edited only in the originating dataverse.
+
+**Create Static Collection by Assigning Studies**
+
+To create a collection by assigning studies directly to it:
+
+#. Locate the root collection to create a direct subcollection in the
+   root, or locate any other existing collection in which you choose
+   create a new collection. Then, click the *Create* link in the Create
+   Child field for that collection.
+
+   You see the Study Collection page.
+#. In the Type field, click the Static option.
+#. Enter your collection Name.
+#. Select the Parent in which you choose to create the collection.
+   The default is the collection in which you started on the *Manage
+   Collections* page. You cannot create a collection in another
+   dataverse unless you have permission to do so.
+#. Populate the Selected Studies box:
+
+   -  Click the *Browse* link to use the Dataverse and Collection
+      pull-down lists to create a list of studies.
+   -  Click the *Search* link to select a query field and search for
+      specific studies, enter a term to search for in that query field,
+      and then click Search.
+
+   A list of available studies is displayed in the Studies to Choose
+   from box.
+
+#. In the Studies to Choose from box, click a study to assign it to your
+   collection.
+   
+
+   You see the study you clicked in the Selected Studies box.
+#. To remove studies from the list of Selected Studies, click the study
+   in that box.
+
+   The study is remove from the Selected Studies box.
+#. If needed, repopulate the Studies to Choose from box with new
+   studies, and add additional studies to the Studies Selected list.
+
+**Create Linked Collection**
+
+You can create a collection as a link to one or more collections from
+other dataverses, thereby defining your own collections for users to
+browse in your dataverse.
+
+Note: A collection created as a link to a collection from another
+dataverse is editable only in the originating dataverse. Also,
+collections created by use of this option might not adhere to the
+policies for adding Cataloging Information and study files that you
+require in your own dataverse.
+
+To create a collection as a link to another collection:
+
+#. In the Linked Collections field, click Add Collection Link.
+
+   You see the Add Collection Link window.
+#. Use the Dataverse pull-down list to select the dataverse from which
+   you choose to link a collection.
+#. Use the Collection pull-down list to select a collection from your
+   selected dataverse to add a link to that collection in your
+   dataverse.
+
+   The collection you select will be displayed in your dataverse
+   homepage, and will be included in your dataverse searches.
+
+**Create Dynamic Collection as a Query**
+
+When you create a collection by assigning the results of a query to it,
+that collection is dynamic and is updated regularly based on the query
+results.
+
+To create a collection by assigning the results of a query:
+
+#. Locate the root collection to create a direct subcollection in the
+   root, or locate any other existing collection in which you choose
+   create a new collection. Then, click the *Create* link in the Create
+   Child field for that collection.
+
+   You see the Study Collection page.
+#. In the Type field, click the Dynamic option.
+#. Enter your collection Name.
+#. Select the Parent in which you choose to create the collection.
+
+   The default is the collection in which you started on the *Manage Collections* page. You cannot create a collection in another
+   dataverse unless you have permission to do so.
+#. Enter a Description of this collection.
+#. In the Enter query field, enter the study field terms for which to
+   search to assign studies with those terms to this collection.
+   Use the following guidelines:
+
+   -  Almost all study fields can be used to build a collection query.
+
+      The study fields must be entered in the appropriate format to
+      search the fields' contents.
+   -  Use the following format for your query:
+      ``title:Elections AND keywordValue:world``.
+
+      For more information on query syntax, refer to the
+      `Documentation <http://lucene.apache.org/java/docs/>`__ page at
+      the Lucene website and look for *Query Syntax*. See the
+      `cataloging fields <http://guides.thedata.org/files/thedatanew_guides/files/catalogingfields11apr08.pdf>`__
+      document for field query names.
+   -  For each study in a dataverse, the Study Global Id field in the
+      Cataloging Information consists of three query terms:
+      ``protocol``, ``authority``, and ``globalID``.
+
+      If you build a query using ``protocol``, your collection can
+      return any study that uses the ``protocol`` you specified.
+
+      If you build a query using all three terms, you collection
+      returns only one study.
+
+#. To limit this collection to search for results in your own dataverse,
+   click the *Only your dataverse* check box.
+
+**Edit Collections**
+
+#. Click a collection title to edit the contents or setup of that
+   collection.
+
+   You see the Collection page, with the current collection settings
+   applied.
+#. Change, add, or delete any settings that you choose, and then click
+   Save Collection to save your edits.
+
+**Delete Collections or Remove Links**
+
+To delete existing static or dynamic collections:
+
+#. For the collection that you choose to delete, click the Delete link.
+#. Confirm the delete action to remove the collection from your
+   dataverse.
+
+To remove existing linked collections:
+
+#. For the linked collection that you choose to remove, click the
+   *Remove* link. (Note: There is no confirmation for a Remove action.
+   When you click the Remove link, the Dataverse Network removes the linked collection immediately.)
+
+Managing User File Access
+==========================
+
+User file access is managed through a set of access permissions that
+together determines whether or not a user can access a particular file,
+study, or dataverse. Generally speaking, there are three places where
+access permissions can be configured: at the dataverse level, at the
+study level, and at the file level. Think of each of these as a security
+perimeter or lock with dataverse being the outer most perimeter, study
+the next, and finally the file level. When configuring user file access,
+it might be helpful to approach this from the dataverse access level
+first and so on.
+
+For example, a user would like access to a particular file. Since files
+belong to studies and studies belong to dataverses, first determine
+whether the user has access to the dataverse. If the dataverse is
+released, all users have access to it. If it is unreleased, the user
+must appear in the User Permissions section on the dataverse permissions
+page.
+
+Next, they would need access to the study. If the study is public, then
+everyone has access. If it is restricted, the user must appear in the
+User Restricted Study Settings section on the study permissions page.
+
+Last, they would need access to the file. If the file is public,
+everyone has access. If the file is restricted, then the user must be
+granted access. 
+
+**There are two ways a file can be restricted.**
+
+First, on the dataverse permissions page, all files in the dataverse
+could be restricted using Restrict ALL files in this Dataverse. To
+enable user access in this case, add the username to the Restricted File
+User Access section on this page.
+
+Second, individual files can be restricted at the study level on the
+study permissions page in the "Files" subtab. These can be restricted on a file-by-file basis. 
+If this is the case, the file(s) will be displayed
+as restricted in the Individual File Permission Settings section. To
+enable user access to a particular file in this case, check the file to
+grant access to, type the username in the Restricted File User Access
+section, click update so their name appears next to the file, then click
+save.
+
+Another option at the study level when restricting files is to allow users the ability to 
+request access to restricted files. This can be done in the study Permissions page in the "Files" subtab where 
+you must first select the files you want to restrict, click on "update permissions" to restrict, and then under 
+"File Permission Settings" check off the box to "Allow users to request access..." and click on Save at the bottom 
+of the page. The contact(s) set for the Dataverse (``Dataverse Options > Settings > General``) will get an email 
+notification each time a user sends a request. The request access email will displays a list of the file(s) 
+requested and a DOI or Handle for the study. To approve or deny access to these file(s) go back to the study 
+permissions page under the "Files" subtab and Approve or Deny the specific files that were requested. If you 
+choose to deny any files you will have the option to add a reason why. Be sure to remember to click on the "update" 
+button and then select Save so that your selections are saved and an email is sent to the requestor granting or 
+denying them access. The email then sent to the requestor will list out which files were approved with a DOI or 
+Handle URL, and any files which were denied along with any reasons that may have been provided.
+
+Finally, a somewhat unusual configuration could exist where both
+Restrict all files in a dataverse is set and an individual file is
+restricted. In this case access would need to be granted in both places
+-think of it as two locks. This last situation is an artifact of
+integrating these two features and will be simplified in a future
+release.
+
+Network Administration
++++++++++++++++++++++++
+
+The Dataverse Network provides several options for configuring and
+customizing your application. To access these options, login to the
+Dataverse Network application with an account that has Network
+Administrator privileges. By default, a brand new installation of the
+application will include an account of this type - the username and
+password is 'networkAdmin'.
+
+After you login, the Dataverse Network home page links to the Options
+page from the "Options" gear icon, in the menu bar. Click on the icon to
+view all the options available for customizing and configuring the
+applications, as well as some network adminstrator utilities.
+
+The following tasks can be performed from the Options page:
+
+-  Manage dataverses, harvesting, exporting, and OAI sets - Create,
+   edit, and manage standard and harvesting dataverses, manage
+   harvesting schedules, set study export schedules, and manage OAI
+   harvesting sets.
+-  Manage subnetworks - Create, edit, and manage subnetworks, manage network and subnetwork level study templates.
+-  Customize the Network pages and description - Brand your Network and
+   set up your Network e-mail contact.
+-  Set and edit Terms of Use - Apply Terms of Use at the Network level
+   for accounts, uploads, and downloads.
+-  Create and manage user accounts and groups and Network privileges,
+   and enable option to create a dataverse - Manage logins, permissions,
+   and affiliate access to the Network.
+-  Use utilities and view software information - Use the administrative
+   utilities and track the current Network installation.
+
+Dataverses Section
+====================
+
+Create a New Dataverse
+-------------------------
+
+A dataverse is a container for studies and is the home for an individual
+scholar's or organization's data.
+
+Creating a dataverse is easy but first you must be a registered user.
+Depending on site policy, there may be a link on the Network home page,
+entitled "Create a Dataverse". This first walks you through creating an
+account, then a dataverse. If this is not the case on your site, log in,
+then navigate to the Create a New Dataverse page and complete the
+required information. That's it!
+
+#. Navigate to the Create a New Dataverse page: 
+    Network home page > Options page >Dataverses tab > Dataverse subtab > "Create Dataverse" link.
+#. Fill in the required information:
+
+
+    **Type of Dataverse**
+
+
+    Choose Scholar if it represents an individual's work otherwise choose Basic.
+
+
+    **Dataverse Name**
+
+
+    This will be displayed on the network and dataverse home
+    pages. If this is a Scholar dataverse it will automatically be
+    filled in with the scholar's first and last name.
+
+
+    **Dataverse Alias**
+
+
+    This is an abbreviation, usually lower-case, that becomes part of the URL for the new dataverse.
+#. Click Save and you're done!
+
+   An email will be sent to you with more information, including
+   the url to access you new dataverse.
+
+**Required information** can vary depending on site policy. Required fields are noted with a red asterisk.
+
+Note: If "Allow users to create a new Dataverse when they create an account" is enabled, there is a Create a Dataverse link on the Network home page.
+
+Manage Dataverses
+--------------------
+
+As dataverses increase in number it's useful to view summary information
+in table form and quickly locate a dataverse of interest. The Manage
+Dataverse table does just that.
+
+Navigate to Network home page > Options page > Dataverses tab >
+Dataverses subtab > Manage Dataverse table:
+
+-  Dataverses are listed in order of most recently created.
+-  Clicking on a column name sorts the list by that column such as Name
+   or Affiliation.
+-  Clicking on a letter in the alpha selector displays only those
+   dataverses beginning with that letter.
+-  Move through the list of dataverses by clicking a page number or the
+   forward and back buttons.
+-  Click Delete to remove a dataverse.
+
+Subnetwork Section
+======================
+
+A subnetwork is a container for a group of dataverses.  Users will be able to create their dataverses in a particular subnetwork.  It may include its own branding and its own custom study templates.
+
+Create a New Subnetwork
+------------------------
+
+You must be a network admin in order to create a subnetwork.  These are the steps to create a subnetwork:
+
+#. Navigate to Create a New Subnetwork Page:
+    Network home page > Options page > Subnetworks tab> Create Subnetwork Link
+    
+#. Fill in required information:
+
+    **Subnetwork Name**
+    
+    The name to be displayed in the menubar. Please use a short name.
+
+    **Subnetwork Alias**
+    
+    Short name used to build the URL for this Subnetwork. It is case sensitive.
+
+    **Subnetwork Short Description**
+    
+    This short description is displayed on the Network Home page
+    
+#. Fill in Optional Branding
+    These fields include a logo file, Subnetwork affiliation, description, and custom banner and footer.
+    
+#. Click Save and you’re done!
+
+
+Manage Subnetworks
+--------------------
+
+The Manage Subnetworks page gives summary information about all of the subnetworks in your installation.
+
+Navigate to Network home page > Options Page > Subnetworks tab:
+
+-  Subnetworks are listed alphabetically
+-  Clicking on a column name sorts the list by that column
+-  Click Edit to edit the subnetwork’s information or branding
+-  Click Delete to remove a subnetwork.  Note: this will not remove the dataverses assigned to the subnetwork.  The dataverses will remain and may be reassigned to another subnetwork.
+
+
+Manage Classifications
+------------------------
+
+Classifications are a way to organize dataverses on the network home
+page so they are more easily located. They appear on the left side of
+the page and clicking on a classification causes corresponding
+dataverses to be displayed. An example classification might be
+Organization, Government.
+
+Classifications typically form a hierarchy defined by the network
+administrator to be what makes sense for a particular site. A top level
+classification could be Organization, the next level Association,
+Business, Government, and School.
+
+The classification structure is first created on the Options page, from
+the Manage Classifications table. Once a classification is created,
+dataverses can be assigned to it either when the dataverse is first
+created or later from the Options page: Network home page > (Your)
+Dataverse home page > Options page > Settings tab > General subtab.
+
+To manage classifications, navigate to the Manage Classifications table:
+
+Network home page > Options page > Classifications tab > Manage
+Classifications table
+
+From here you can view the current classification hierarchy, create a
+classification, edit an existing classification including changing its
+place in the hierarchy, and delete a classification.
+
+Manage Study Comments Notifications
+---------------------------------------
+
+Dataverse admins can enable or disable a User Comment feature within
+their dataverses. If this feature is enabled, users are able to add
+comments to studies within that dataverse. Part of the User Comment
+feature is the ability for users to report comments as abuse if they
+deem that comment to be inappropriate in some way.
+
+Note that it is a best practice to explicitly define terms of use
+regarding comments when the User Comments feature is enabled. If you
+define those terms at the Network level, then any study to which
+comments are added include those terms.
+
+When a user reports another's comment as abuse, that comment is listed
+on the Manage Study Comment Notifications table on the Options page. For
+each comment reported as abuse, you see the study's Global ID, the
+comment reported, the user who posted the comment, and the user who
+reported the comment as abuse.
+
+There are two ways to manage abuse reports: In the Manage Study Comment
+Notifications table on the Options page, and on the study page User
+Comments tab. In both cases, you have the options to remove the comment
+or to ignore the abuse report.
+
+The Manage Study Comments Notifications table can be found here:
+
+Network home page > Options page > Dataverses tab > Study Comments
+subtab > Manage Study Comment Notifications table
+
+Manage Controlled Vocabulary
+----------------------------------
+
+You can set up controlled vocabulary for a dataverse network to give the
+end user a set list of choices to select from for most fields in a study
+template. Study fields which do not allow controlled vocabulary include
+the study title and subtitle, certain date fields and geographic
+boundaries.
+
+To **manage controlled vocabulary**, navigate to the Manage Controlled
+Vocabulary table:
+
+``Network home page > Options page > Vocabulary tab > Manage Controlled Vocabulary table``
+
+
+**To create a new controlled vocabulary:**
+
+#. Click Create New Controlled Vocabulary.
+#. You see the Edit Controlled Vocabulary page.
+#. In the Name field, enter a descriptive name for this Controlled
+   Vocabulary. In the Description field enter any additional information
+   that will make it easier to identify a particular controlled
+   vocabulary item to assign to a given custom field. In the Values
+   field enter the controlled vocabulary values that you want to make
+   available to users for a study field. Here you can submit an entire list of terms at once. Use the "add" and "remove" buttons
+   to add or subtract values from the list.  You may also copy and paste a list of values separated by carriage returns.
+#. After you complete entry of values, click Save to create the
+   controlled vocabulary.
+
+**Edit Controlled Vocabulary**
+
+
+To edit an existing controlled vocabulary:
+
+#. In the list of controlled vocabulary, click the Edit link for the
+   controlled vocabulary that you choose to edit. You see the Edit
+   Controlled Vocabulary page, with the controlled vocabulary setup that
+   you selected.
+#. Edit the controlled vocabulary items that you choose to change, add,
+   or remove. You may also copy and paste a list of values separated by carriage returns.
+
+Manage Network Study Templates
+-------------------------------------
+
+You can set up study templates for a dataverse network to prepopulate
+any of the Cataloging Information fields of a new study with default
+values. Dataverse administrators may clone a Network template and modify
+it for users of that dataverse. You may also change the input level of
+any field to make a certain field required, recommended, optional,
+hidden or disabled. Hidden fields will not be available to the user, but
+will be available to the dataverse administrator for update in cloned
+templates. Disabled field will not be available to the dataverse
+administrator for update. You may also add your own custom fields. When
+a user adds a new study, that user can select a template to fill in the
+defaults.
+
+To manage study templates, navigate to the Manage Study Templates table:
+
+``Network home page > Options page > Templates tab > Manage Study Templates table``
+
+
+**Create Template**
+
+Study templates help to reduce the work needed to add a study, and to
+apply consistency to studies across a dataverse network. For example,
+you can create a template to include the Distributor and Contact details
+so that every study has the same values for that metadata.
+
+To create a new study template:
+
+#. Click Create New Network Template.
+#. You see the Study Template page.
+#. In the Template Name field, enter a descriptive name for this
+   template.
+#. Enter generic information in any of the Cataloging Information
+   metadata fields. You can also add your own custom fields to the Data
+   Collection/Methodology section of the template. Each custom field
+   must be assigned a Name, Description and Field Type. You may also
+   apply controlled vocabulary to any of the custom fields that are set
+   to Plain Text Input as Field Type.
+#. After you complete entry of generic details in the fields that you
+   choose to prepopulate for new studies, click Save to create the
+   template.
+
+**Enable a template**
+
+Click the Enabled link for the given template. Enabled templates are
+available to database administrators for cloning and end users for
+creating studies.
+
+
+**Edit Template**
+
+To edit an existing study template:
+
+#. In the list of templates, click the Edit link for the template that
+   you choose to edit.
+#. You see the Study Template page, with the template setup that you
+   selected.
+#. Edit the template fields that you choose to change, add, or remove.
+
+**Make a Template the Default**
+
+To set any study template as the default template that applies
+automatically to the creation of new network templates:
+
+
+In the list of templates, click the Make Default Selection link next to the name
+of the template that you choose to set as the default for a subnetwork(s). A pop-up window with the names of the subnetworks will appear and you may select the appropriate subnetworks.  The subnetwork name(s) is displayed in the Default column of the template that you set as the
+default for each given subnetwork.
+
+**Remove Template**
+
+To delete a study template from a dataverse:
+
+#. In the list of templates, click the Delete link for the template that
+   you choose to remove from the network.
+#. You see the Delete Template page.
+#. Click Delete to remove the template from the network. Note that you
+   cannot delete any template that is in use or is a default template at
+   the network or dataverse level.
+
+Harvesting Section
+=======================
+
+Create a New Harvesting Dataverse
+------------------------------
+
+A harvesting dataverse allows studies from another site to be imported
+so they appear to be local, though data files remain on the remote site.
+This makes it possible to access content from data repositories and
+other sites with interesting content as long as they support the OAI or
+Nesstar protocols.
+
+Harvesting dataverses differ from ordinary dataverses in that study
+content cannot be edited since it is provided by a remote source. Most
+dataverse functions still apply including editing the dataverse name,
+branding, and setting permissions.
+
+Aside from providing the usual name, alias, and affiliation information,
+Creating a harvesting dataverse involves specifying the harvest
+protocol, OAI or Nesstar, the remote server URL, possibly format and set
+information, whether or how to register incoming studies, an optional
+harvest schedule, and permissions settings.
+
+To create a harvesting dataverse navigate to the Create a New Harvesting
+Dataverse page:
+
+``Network home page > Options page > Harvesting tab > Harvesting Dataverses subtab > "Create Harvesting Dataverse" link``
+
+Complete the form by entering required information and click Save.
+
+An example dataverse to harvest studies native to the Harvard dataverse:
+
+- **Harvesting Type:** OAI Server
+- **Dataverse Name:** Test IQSS Harvest
+- **Dataverse Alias:** testiqss
+- **Dataverse Affiliation:** Our Organization
+- **Server URL:** `http://dvn.iq.harvard.edu/dvn/OAIHandler <http://dvn.iq.harvard.edu/dvn/OAIHandler>`__
+- **Harvesting Set:** No Set (harvest all)
+- **Harvesting Format:** DDI
+- **Handle Registration:** Do not register harvested studies (studies must already have a handle)
+
+Manage Harvesting
+--------------------
+
+Harvesting is a background process meaning once initiated, either
+directly or via a timer, it conducts a transaction with a remote server
+and exists without user intervention. Depending on site policy and
+considering the update frequency of remote content this could happen
+daily, weekly, or on-demand. How does one determine what happened? By
+using the Manage Harvesting Dataverses table on the Options page.
+
+To manage harvesting dataverses, navigate to the **Manage Harvesting
+Dataverses** table:
+
+``Network home page > Options page > Harvesting tab > Harvesting Dataverses subtab > Manage Harvesting Dataverses table``
+
+The Manage Harvesting table displays all harvesting dataverses, their
+schedules, and harvest results in table form. The name of each
+harvesting dataverse is a link to that harvesting dataverse's
+configuration page. The schedule, if configured, is displayed along with
+a button to enable or disable the schedule. The last attempt and result
+is displayed along with the last non-zero result. It is possible for the
+harvest to check for updates and there are none. A Run Now button
+provides on-demand harvesting and a Remove link deletes the harvesting
+dataverse.
+
+Note: the first time a dataverse is harvested the entire catalog is
+harvested. This may take some time to complete depending on size.
+Subsequent harvests check for additions and changes or updates.
+
+Harvest failures can be investigated by examining the import and server
+logs for the timeframe and dataverse in question.
+
+Schedule Study Exports
+------------------------
+
+Sharing studies programmatically or in batch such as by harvesting
+requires information about the study or metadata to be exported in a
+commonly understood format. As this is a background process requiring no
+user intervention, it is common practice to schedule this to capture
+updated information.
+
+Our export process generates DDI, Dublin Core, Marc, and FGDC formats
+though DDI and Dublin Core are most commonly used. Be aware that
+different formats contain different amounts of information with DDI
+being most complete because it is our native format.
+
+To schedule study exports, navigate to the Harvesting Settings subtab:
+
+``Network home page > Options page > Harvesting tab > Settings subtab > Export Schedule``
+
+First enable export then choose frequency: daily using hour of day or
+weekly using day of week. Click Save and you are finished.
+
+To disable, just choose Disable export and Save.
+
+Manage OAI Harvesting Sets
+-----------------------------
+
+By default, a client harvesting from the Dataverse Network that does not
+specify a set would fetch all unrestricted, locally owned
+studies - in other words public studies that were not harvested
+from elsewhere. For various reasons it might be desirable to define sets
+of studies for harvest such as by owner, or to include a set that was
+harvested from elsewhere. This is accomplished using the Manage OAI
+Harvesting Sets table on the Options page.
+
+The Manage OAI Harvesting Sets table lists all currently defined OAI
+sets, their specifications, and edit, create, and delete functionality.
+
+To manage OAI harvesting sets, navigate to the Manage OAI Harvesting
+Sets table:
+
+``Network home page > Options page > Harvesting tab > OAI Harvesting Sets subtab > Manage OAI Harvesting Sets table``
+
+To create an OAI set, click Create OAI Harvesting Set, complete the
+required fields and Save. The essential parameter that defines the set
+is the Query Definition. This is a search query using `Lucene
+syntax <http://lucene.apache.org/java/3_0_0/queryparsersyntax.html>`__
+whose results populate the set.
+
+Once created, a set can later be edited by clicking on its name.
+
+To delete a set, click the appropriately named Delete Set link.
+
+To test the query results before creating an OAI set, a recommended
+approach is to create a :ref:`dynamic study
+collection <manage-collections>` using the
+proposed query and view the collection contents. Both features use the
+same `Lucene
+syntax <http://lucene.apache.org/java/3_0_0/queryparsersyntax.html>`__
+but a study collection provides a convenient way to confirm the results.
+
+Generally speaking, basic queries take the form of study metadata
+field:value. Examples include:
+
+- ``globalId:"hdl 1902 1 10684" OR globalId:"hdl 1902 1 11155"``: Include studies with global ids hdl:1902.1/10684 and
+  hdl:1902.1/11155
+- ``authority:1902.2``: Include studies whose authority is 1902.2. Different authorities usually represent different sources such
+  as IQSS, ICPSR, etc.
+- ``dvOwnerId:184``: Include all studies belonging to dataverse with database id 184 
+- ``studyNoteType:"DATAPASS"``: Include all studies that were tagged with or include the text DATAPASS in their study note field.
+
+**Study Metadata Search Terms:**
+
+| title
+| subtitle
+| studyId
+| otherId
+| authorName
+| authorAffiliation
+| producerName
+| productionDate
+| fundingAgency
+| distributorName
+| distributorContact
+| distributorContactAffiliation
+| distributorContactEmail
+| distributionDate
+| depositor
+| dateOfDeposit
+| seriesName
+| seriesInformation
+| studyVersion
+| relatedPublications
+| relatedMaterial
+| relatedStudy
+| otherReferences
+| keywordValue
+| keywordVocabulary
+| topicClassValue
+| topicClassVocabulary
+| abstractText
+| abstractDate
+| timePeriodCoveredStart
+| timePeriodCoveredEnd
+| dateOfCollection
+| dateOfCollectionEnd
+| country
+| geographicCoverage
+| geographicUnit
+| unitOfAnalysis
+| universe
+| kindOfData
+| timeMethod
+| dataCollector
+| frequencyOfDataCollection
+| samplingProcedure
+| deviationsFromSampleDesign
+| collectionMode
+| researchInstrument
+| dataSources
+| originOfSources
+| characteristicOfSources
+| accessToSources
+| dataCollectionSituation
+| actionsToMinimizeLoss
+| controlOperations
+| weighting
+| cleaningOperations
+| studyLevelErrorNotes
+| responseRate
+| samplingErrorEstimate
+| otherDataAppraisal
+| placeOfAccess
+| originalArchive
+| availabilityStatus
+| collectionSize
+| studyCompletion
+| confidentialityDeclaration
+| specialPermissions
+| restrictions
+| contact
+| citationRequirements
+| depositorRequirements
+| conditions
+| disclaimer
+| studyNoteType
+| studyNoteSubject
+| studyNoteText
+
+.. _edit-lockss-harvest-settings:
+
+Edit LOCKSS Harvest Settings
+-----------------------------
+
+**Summary:**
+
+`LOCKSS Project <http://lockss.stanford.edu/lockss/Home>`__ or *Lots
+of Copies Keeps Stuff Safe* is an international initiative based at
+Stanford University Libraries that provides a way to inexpensively
+collect and preserve copies of authorized e-content. It does so using an
+open source, peer-to-peer, decentralized server infrastructure. In order
+to make a LOCKSS server crawl, collect and preserve content from a Dataverse Network,
+both the server (the LOCKSS daemon) and the client (the Dataverse Network) sides must
+be properly configured. In simple terms, the LOCKSS server needs to be
+pointed at the Dataverse Network, given its location and instructions on what to
+crawl; the Dataverse Network needs to be configured to allow the LOCKSS daemon to
+access the data. The section below describes the configuration tasks
+that the Dataverse Network administrator will need to do on the client side. It does
+not describe how LOCKSS works and what it does in general; it's a fairly
+complex system, so please refer to the documentation on the `LOCKSS Project <http://lockss.stanford.edu/lockss/Home>`__\  site for more
+information. Some information intended to a LOCKSS server administrator
+is available in the `"Using LOCKSS with Dataverse Network (DVN)"
+<http://guides.thedata.org/book/h-using-lockss-dvn>`__  of the
+`Dataverse Network Installers Guide <http://guides.thedata.org/book/installers-guides>`__
+ (our primary sysadmin-level manual). 
+
+**Configuration Tasks:**
+
+Note that neither the standard LOCKSS Web Crawler, nor the OAI plugin
+can properly harvest materials from a Dataverse Network.  A custom LOCKSS plugin
+developed and maintained by the Dataverse Network project is available here:
+`http://lockss.hmdc.harvard.edu/lockss/plugin/DVNOAIPlugin.jar <http://lockss.hmdc.harvard.edu/lockss/plugin/DVNOAIPlugin.jar>`__.
+For more information on the plugin, please see the `"Using LOCKSS with
+Dataverse Network (DVN)" <http://guides.thedata.org/book/h-using-lockss-dvn>`__ section of
+the Dataverse Network Installers Guide. In order for a LOCKSS daemon to collect DVN
+content designated for preservation, an Archival Unit must be created
+with the plugin above. On the Dataverse Network side, a Manifest must be created that
+gives the LOCKSS daemon permission to collect the data. This is done by
+completing the "LOCKSS Settings" section of the:
+``Network Options -> Harvesting -> Settings tab.``
+
+For the Dataverse Network, LOCKSS can be configured at the network level
+for the entire site and also locally at the dataverse level. The network
+level enables LOCKSS harvesting but more restrictive policies, including
+disabling harvesting, can be configured by each dataverse. A dataverse
+cannot enable LOCKSS harvesting if it has not first been enabled at the
+network level.
+
+This "Edit LOCKSS Harvest Settings" section refers to the network level
+LOCKSS configuration.
+
+To enable LOCKSS harvesting at the network level do the following:
+
+- Navigate to the LOCKSS Settings page: ``Network home page -> Network Options -> Harvesting -> Settings``.
+- Fill in the harvest information including the level of harvesting allowed (Harvesting Type, Restricted Data Files), the scope
+  of harvest by choosing a predefined OAI set, then if necessary a list of servers or domains allowed to harvest.
+- It's important to understand that when a LOCKSS daemon is authorized
+  to "crawl restricted files", this does not by itself grant the actual
+  access to the materials! This setting only specifies that the daemon
+  should not be skipping such restricted materials outright. (The idea
+  behind this is that in an archive with large amounts of
+  access-restricted materials, if only public materials are to be
+  preserved by LOCKSS, lots of crawling time can be saved by instructing
+  the daemon to skip non-public files, instead of having it try to access
+  them and get 403/Permission Denied). If it is indeed desired to have
+  non-public materials collected and preserved by LOCKSS, it is the
+  responsibility of the DVN Administrator to give the LOCKSS daemon
+  permission to access the files. As of DVN version 3.3, this can only be
+  done based on the IP address of the LOCKSS server (by creating an
+  IP-based user group with the appropriate permissions).
+- Next select any licensing options or enter additional terms, and click "Save Changes". 
+- Once LOCKSS harvesting has been enabled, the LOCKSS Manifest page will
+  be provided by the application. This manifest is read by LOCKSS servers
+  and constitutes agreement to the specified terms. The URL for the
+  network-level LOCKSS manifest is
+  ``http``\ ``://<YOUR SERVER>/dvn/faces/ManifestPage.xhtml`` (it will be
+  needed by the LOCKSS server administrator in order to configure an
+  *Archive Unit* for crawling and preserving the DVN).
+
+Settings Section
+==================
+
+Edit Name
+-----------------
+
+The name of your Dataverse Network installation is displayed at the top
+of the Network homepage, and as a link at the top of each dataverse
+homepage in your Network.
+
+To create or change the name of your Network, navigate to the Settings
+tab on the Options page:
+
+``Network home page > Options page > Settings tab > General subtab > Network Name``
+
+Enter a descriptive title for your Network. There are no naming
+restrictions, but it appears in the heading of every dataverse in your
+Network, so a short name works best.
+
+Click Save and you are done!
+
+Edit Layout Branding
+-------------------------
+
+When you install a Network, there is no banner or footer on any page in
+the Network. You can apply any style to the Network pages, such as that
+used on your organization's website. You can use plain text, HTML,
+JavaScript, and style tags to define your custom banner and footer. If
+your website has such elements as a navigation menu or images, you can
+add them to your Network pages.
+
+To customize the layout branding of your Network, navigate to the
+Customization subtab on the Options page:
+
+Network home page > Options page > Settings tab > Customization subtab >
+Edit Layout Branding
+
+Enter your banner and footer content in the Custom Banner and Custom
+Footer fields and Save.
+
+See :ref:`Layout Branding Tips <edit-layout-branding>` for guidelines.
+
+Edit Description
+---------------------
+
+By default your Network homepage has the following description:
+``A description of your Dataverse Network or announcements may be added here. Use Network Options to edit or remove this text.``
+You can edit that text to describe or announce such things as new
+Network features, new dataverses, or maintenance activities. You also
+can disable the description to not appear on the homepage.
+
+To manage the Network description, navigate to:
+
+``Network home page > Options page > Settings tab > General subtab > Network Description``
+
+Create a description by entering your desired content in the text box.
+HTML, JavaScript, and style tags are permitted. The ``html`` and
+``body`` element types are not allowed. Next enable the description
+display by checking the Enable Description in Homepage checkbox. Click
+Save and you're done. You can disable the display of the description but
+keep the content by unchecking and saving.
+
+Edit Dataverse Requirements
+----------------------------
+
+Enforcing a minimum set of requirements can help ensure content
+consistency.
+
+When you enable dataverse requirements, newly created dataverses cannot
+be made public or released until the selected requirements are met.
+Existing dataverses are not affected until they are edited. Edits to
+existing dataverses cannot be saved until requirements are met.
+
+To manage the requirements, navigate to:
+
+``Network home page > Options page > Settings tab > Advanced subtab > Release Dataverse Requirements``
+
+Available requirements include:
+
+-  Require Network Homepage Dataverse Description
+-  Require Dataverse Affiliation
+-  Require Dataverse Classification
+-  Require Dataverse Studies included prior to release
+
+Manage E-Mail Notifications
+---------------------------
+
+The Dataverse Network sends notifications via email for a number of
+events on the site, including workflow events such as creating a
+dataverse, uploading files, releasing a study, etc. Many of these
+notifications are sent to the user initiating the action as well as to
+the network administrator. Additionally, the Report Issue link on the
+network home page sends email to the network administrator. By default,
+this email is sent to
+`support@thedata.org <mailto:support@thedata.org>`.
+
+To change this email address navigate to the Options page:
+
+``Network home page > Options page > Settings tab > General subtab > E-Mail Address(es)``
+
+Enter the address of network administrators who should receive these
+notifications and Save.
+
+Please note the Report Issue link when accessed within a dataverse gives
+the option of sending notification to the network or dataverse
+administrator. Configuring the dataverse administrator address is done
+at the dataverse level: 
+``(Your) Dataverse home page > Options page > Settings tab > General subtab > E-Mail Address(es)``
+
+Enable Twitter
+---------------------
+
+If your Dataverse Network has been configured for Automatic Tweeting,
+you will see an option listed as "Enable Twitter." When you click this,
+you will be redirected to Twitter to authorize the Dataverse Network
+application to send tweets for you.
+
+To manage the Dataverse Twitter configuration, navigate to:
+
+``Dataverse home page > Options page > Settings tab > Promote Your Dataverse subtab > Sync Dataverse With Twitter``
+
+Once authorized, tweets will be sent for each new dataverse that is
+released.
+
+To disable Automatic Tweeting, go to the options page, and click
+"Disable Twitter."
+
+Terms Section
+=================
+
+Edit Terms for Account Creation
+--------------------------------
+
+You can set up Terms of Use that require users with new accounts to
+accept your terms before logging in for the first time.
+
+To configure these terms navigate to the Options page:
+
+``Network home page > Options page > Permissions tab > Terms subtab > Account Term of Use``
+
+Enter your required terms as you would like them to appear to users.
+HTML, JavaScript, and style tags are permitted. The ``html`` and
+``body`` element types are not allowed. Check Enable Terms of Use to
+display these terms. Click Save and you are finished. To disable but
+preserve your current terms, uncheck the Enable checkbox and save.
+
+Edit Terms for Study Creation
+-------------------------------
+
+You can set up Terms of Use for the Network that require users to accept
+your terms before they can create or modify studies, including adding
+data files. These terms are defined at the network level so they apply
+across all dataverses. Users will be presented with these terms the
+first time they attempt to modify or create a study during each session.
+
+To configure these terms of use navigate to the Options page:
+
+``Network home page > Options page > Permissions tab > Terms subtab > Deposit Term of Use``
+
+Enter your terms as you would like to display them to the user. HTML,
+JavaScript, and style tags are permitted. The ``html`` and ``body``
+element types are not allowed. Check Enable Terms of Use and save.
+Uncheck Enable Terms of Use and save to disable but preserve existing
+terms of use.
+
+Edit Terms for File Download
+-----------------------------
+
+You can set up Terms of Use for the Network that require users to accept
+your terms before they can download or subset files from the Network.
+Since this is defined at the network level it applies to all dataverses.
+Users will be presented with these terms the first time they attempt to
+download a file or access the subsetting and analysis page each session.
+
+To configure these terms, navigate to the Options page:
+
+``Network home page > Options page > Permissions tab > Terms subtab > Download Term of Use``
+
+Enter the terms as you want them to appear to the user. HTML,
+JavaScript, and style tags are permitted. The ``html`` and ``body``
+element types are not allowed. Check Enable Terms of Use and save.
+Unchecking the checkbox and saving disables the display of the terms but
+preserves the current content.
+
+Download Tracking Data
+----------------------------
+
+You can view any guestbook responses that have been made in all
+dataverses. Beginning with version 3.2 of Dataverse Network, for any
+dataverse where the guestbook is not enabled data will be collected
+silently based on the logged in user or anonymously. The data displayed
+includes user account data or the session id of an anonymous user, the
+global ID, study title and filename of the file downloaded, the time of
+the download, the type of download and any custom questions that have
+been answered. The username/session ID and download type were not
+collected in the 3.1 version of DVN. A comma separated values file of
+all download tracking data may be downloaded by clicking the Export
+Results button.
+
+To manage the Network download tracking data, navigate to:
+
+``Network home page > Options page > Permissions tab > Download Tracking Data subtab > Manage Download Tracking Data table``
+
+Permissions and Users Section
+==============================
+
+Manage Network Permissions
+---------------------------------------
+
+Permissions that are configured at the network level include:
+
+-  Enabling users to create an account when they create a dataverse.
+-  Granting privileged roles to existing users including network
+   administrator and dataverse creator.
+-  Changing and revoking privileged roles of existing users.
+
+Enabling users to create an account when they create a dataverse
+displays a "Create a Dataverse" link on the network home page. New and
+unregistered users coming to the site can click on this link, create an
+account and a dataverse in one workflow rather than taking two separate
+steps involving the network administrator.
+
+Granting a user account network administrator status gives that user
+full control over the application as managed through the UI.
+
+Granting a user account dataverse creator status is somewhat a legacy
+function since any user who creates a dataverse has this role.
+
+To manage these permissions, navigate to the Manage Network Permissions
+table on the Options page:
+
+``Network home page > Options page > Permissions tab > Permissions subtab > Manage Network Permissions table``
+
+Enable account with dataverse creation by checking that option and
+saving.
+
+Granting privileged status to a user requires entering a valid, existing
+user name, clicking add, choosing the role, then saving changes.
+
+Roles by Version State Table
+------------------------------
+
++---------------------+-----------+----------------+------------------+------------------+---------------------+
+|                     | **Role**  |                |                  |                  |                     |
++=====================+===========+================+==================+==================+=====================+
+| **Version State**   | None      | Contributor +, | Curator          | Admin            | Network Admin**     |
+|                     |           | ++             |                  |                  |                     |
++---------------------+-----------+----------------+------------------+------------------+---------------------+
+| Draft               |           | E,E2,D3,S,V    | E,E2,P,T,D3,R,V  | E,E2,P,T,D3,R,V  | E,E2,P,T,D3,D2,R,V  |
++---------------------+-----------+----------------+---+--------------+------------------+---------------------+
+| In Review           |           | E,E2,D3,V      | E,E2,P,T,D3,R,V  | E,E2,P,T,D3,R,V  | E,E2,P,T,D3,R,D2,V  |
++---------------------+-----------+----------------+------------------+------------------+---------------------+
+| Released            |  V        | E,V            | E,P,T,D1,V       | E,P,T,D1,V       | E,P,T,D2,D1,V       |
++---------------------+-----------+----------------+------------------+------------------+---------------------+
+|  Archived           |  V        | V              | P,T,V            | P,T,V            | P,T,D2,V            |
++---------------------+-----------+----------------+------------------+------------------+---------------------+
+|  Deaccessioned      |           |                | P,T,R2,V         | P,T,R2,V         | P,T,R2,D2,V         |
++---------------------+-----------+----------------+------------------+------------------+---------------------+
+
+
+**Legend:**
+
+E = Edit (Cataloging info, File meta data, Add files)
+
+E2 = Edit Study Version Notes
+
+D1 = Deaccession
+
+P = Permission
+
+T = Create Template
+
+D2 = Destroy
+
+D3 = Delete Draft, Delete Review Version
+
+S = Submit for Review
+
+R = Release
+
+R2 = Restore
+
+V = View
+

+
+**Notes:**
+
+*\Same as Curator
+
+**\Same as Curator + D2
+
++\ Contributor actions (E,D3,S,V) depend on new DV permission settings. A
+contributor role can act on their own studies (default) or all studies
+in a dv, and registered users can become contributors and act on their
+own studies or all studies in a dv.
+
+++ A contributor is defined either as a contributor role or as any
+registered user in a DV that allows all registered users to contribute.
+

+
+Authorization to access Terms-protected files via the API
+--------------------------------------------------------------------
+
+As of DVN v. 3.2, a programmatic API has been provided for accessing DVN
+materials. It supports Basic HTTP Auth where the client authenticates
+itself as an existing DVN (or anonymous) user. Based on this, the API
+determines whether the client has permission to access the requested
+files or metadata. It is important to remember however, that in addition
+to access permissions, DVN files may also be subject to "Terms of Use"
+agreements. When access to such files is attempted through the Web
+Download or Subsetting interfaces, the user is presented with an
+agreement form. The API however is intended for automated clients, so
+the remote party's compliance with the Terms of Use must be established
+beforehand. **We advise you to have a written agreement with authorized
+parties before allowing them to access data sets, bypassing the Terms of
+Use. The authorized party should be responsible for enforcing the Terms
+of Use to their end users.**\ Once such an agreement has been
+established, you can grant the specified user unrestricted access to
+Terms-protected materials on the Network home page > Options page >
+PERMISSIONS tab > Permissions subtab, in the "Authorize Users to bypass
+Terms of Use" section.
+
+Please consult the Data Sharing section of the Guide for additional
+information on the :ref:`Data Sharing API <data-sharing-api>`.
+
+Create Account
+--------------------
+
+There are several ways to create accounts: at the network level by the
+network administrator, at the dataverse level by the dataverse
+administrator, and by the new user themselves if the option to create an
+account when creating a dataverse is enabled.
+
+Accounts created by all methods are equivalent with the exception of
+granting dataverse creator status during the create a dataverse
+workflow. That status can be granted afterwards by the network
+administrator if necessary.
+
+To create an account at the **network admin level**, navigate to the Create
+Account page from the Options page:
+
+``Network home page > Options page > Permissions tab > Users subtab > Create User link > Create Account page``
+
+Complete the required information denoted by the red asterisk and save. 
+Note: an email address can also be used as a username.
+
+
+Manage Users
+-------------------
+
+The Manage Users table gives the network administrator a list of all
+user accounts in table form. It lists username, full name, roles
+including at which dataverse the role is granted, and the current status
+whether active or deactivated.
+
+Usernames are listed alphabetically and clicking on a username takes you
+to the account page that contains detailed information on that account.
+It also provides the ability to update personal details and change
+passwords.
+
+The Manage Users table also provides the ability to deactivate a user
+account.
+
+To view the Manage Users table navigate to the Options page:
+
+``Network home page > Options page > Permissions tab > Users subtab > Manage Users table``
+
+Manage Groups
+--------------------
+
+Groups in the Dataverse Network are a way to identify collections of
+users so permissions can be applied collectively rather than
+individually. This allows controlling permissions for individuals by
+altering membership in the group without affecting permissions of other
+members. Groups can be defined by user names or IP addresses.
+
+The Manage Groups table lists information about existing groups in table
+form including name, display or friendly name, and group membership.
+
+Clicking on the name takes you to the Edit Group page where the group's
+configuration can be changed. It is also possible to create and delete
+groups from the Manage Groups table.
+
+To view the Manage Groups table, navigate to the Options page:
+
+``Network home page > Options page > Permissions tab > Groups subtab >
+Manage Groups table``
+
+Once on the Groups subtab, viewing the Manage Groups table, you can
+create or delete a group.
+
+When creating a group you must choose whether to identify users by
+username or by IP address with a Username Group or IP User Group.
+
+With a Username Group, enter an existing username into the edit box,
+click the "+" symbol to enter additional users, then save.
+
+With an IP User Group, enter an IP address or domain name into the edit
+box. Wildcards can be used by specifying an asterisk (\*) in place of an
+IP address octet (eg. 10.20.30.\*), or for the sub-domain or host
+portion of the domain name (eg. \*.mydomain.edu).
+
+Last, an optional special feature of the IP User Group is to allow for
+an Affiliate Login Service. Effectively this allows for the use of a
+proxy to access the Dataverse Network on behalf of a group such as a
+University Library where identification and authorization of users is
+managed by their proxy service. To enable this feature, enter IP
+addresses of any proxy servers that will access Dataverse Network, check
+This IP group has an affiliate login service, enter the Affiliate Name
+as it will appear on the Dataverse Network Login page, and the Affiliate
+URL which would go to the proxy server. Save and you are finished.
+
+Utilities
+===========
+
+The Dataverse Network provides the network administrator with tools to
+manually execute background processes, perform functions in batch, and
+resolve occasional operational issues.
+
+Navigate to the Utilities from the Options page:
+
+``Network home page > Options page > Utilities tab``
+
+Available tools include:
+
+- **Study Utilities** - Create draft versions of studies, release file locks and delete multiple studies by inputting ID's.
+- **Index Utilities** - Create a search index. 
+- **Export Utilities** - Select files and export them. 
+- **Harvest Utilities** - Harvest selected studies from another Network. 
+- **File Utilities** - Select files and apply the JHOVE file validation process to them. 
+- **Import Utilities** - Import multiple study files by using this custom batch process.
+- **Handle Utilities** - Register and re-register study handles.
+
+**Study Utilities**
+
+Curating a large group of studies sometimes requires direct database
+changes affecting a large number of studies that may belong to different
+dataverses. An example might be changing the distributor name and logo
+or the parent dataverse. Since the Dataverse Network employs study
+versioning, it was decided that any such backend changes should
+increment the affected studies' version. However, incrementing a study's
+version is nontrivial as a database update. So, this utility to create a
+draft of an existing study was created.
+
+The practice would involve generating a list of study database ID's that
+need changing, use the utility to create drafts of those studies, then
+run the database update scripts. The result is new, unreleased draft
+versions of studies with modifications made directly through the
+database. These studies would then need to be reviewed and released
+manually.
+
+Due to the transactional nature of study updates, particularly when
+uploading large files, it is possible a study update is interrupted such
+as during a system restart. When this occurs, the study lock, created to
+prevent simultaneous updates while one is already in progress, remains
+and the study cannot be edited until it is cleared.
+
+Checking for this condition and clearing it is easy. Open this utility,
+check if any locks are listed and remove them. The user should once
+again be able to edit their study.
+
+The user interface provides a convenient way to delete individual
+studies but when faced with deleting a large number of studies that do
+not conveniently belong to a single dataverse, use the Delete utility.
+
+Specify studies by their database id single, as a comma-separated list
+(1,7,200, etc.), or as a hyphen-separated range (1-1000, 2005,
+2500-2700).
+
+**Index Utilities**
+
+Indexing is the process of making study metadata searchable. The Lucence
+search engine used by the Dataverse Network uses file-based indexes.
+Normally, any time a study or new study version is released the study
+information is automatically indexed. Harvesting also indexes studies in
+small batches as they are harvested. Sometimes this does not occur, such
+as when the harvest process is interrupted. The index could also become
+corrupt for some reason though this would be extremely rare.
+
+The index utility allows for reindexing of studies, dataverses, and the
+entire site. Studies and dataverses can be specified by their database
+id's alone, in a comma separated list, or in a hyphenated range: 1-1000.
+Use index all sparingly, particularly if you have a large site. This is
+a single transaction and should not be interrupted or you will need to
+start again. A more flexible approach is to determine the lowest and
+highest study ID's and index in smaller ranges: 1-1000, 1001-2000, etc.
+
+Note: if for some reason a study change was not indexed, there is an
+automatic background process that will detect this, inform the
+administrator and will be reindexed once every 24 hours so manually
+reindexing is not required.
+
+**Export Utilities**
+
+Export is a background process that normally runs once every 24 hours.
+Its purpose is to produce study metadata files in well known formats
+such as DDI, DC, MIF, and FGDC that can be used to import studies to
+other systems such as through harvesting.
+
+Sometimes it's useful to manually export a study, dataverse, any updated
+studies, or all studies. Studies and dataverses are specified by
+database id rather than global id or handle.
+
+Export is tied to OAI set creation and Harvesting. To enable harvesting
+of a subset of studies by another site, first an OAI set is created that
+defines the group of studies. Next, the scheduled export runs and
+creates the export files if they're not already available. It also
+associates those studies defined by the set with the set name so future
+requests for the set receive updates — additions or deletions from the
+set. This way remote sites harvesting the set maintain an updated study
+list.
+
+If you do not want to wait 24 hours to test harvest a newly created set,
+use the export utility. Click "Run Export" to export any changed studies
+and associate studies to the set. Exporting studies or dataverses alone
+will not associate studies to a set, in those cases Update Harvest
+Studies must also be run.
+
+**Harvest Utilities**
+
+The Harvest utility allows for on-demand harvesting of a single study.
+First select one of the predefined harvesting dataverses which provide
+remote server connection information as well as the local dataverse
+where the study will be harvested to. Specify the harvest ID of the
+study to be harvested. The harvest id is particular to the study and
+server being harvested from. It can be obtained from the OAI protocol
+ListIdentifiers command, from the harvest log if previously harvested,
+or if from another DVN it takes the form: <OAI set alias>//<global id>.
+A Dataverse Network study with ``globalID: hdl:1902.1/10004``, from the OAI
+set "My Set", having alias "myset", would have a harvest identifier of:
+``myset//hdl:1902.1/10004``
+
+**File Utilities**
+
+The Dataverse Network attempts to identify file types on upload to
+provide more information to an end user. It does this by calling a file
+type identification library called JHOVE. Though JHOVE is a very
+comprehensive library, sometimes a file type may not be recognized or is
+similar to another type and misidentified. For these cases we provide an
+override mechanism — a list of file extensions and a brief text
+description. Since these are created after the files have been uploaded,
+this file utility provides a way to re-identify the file types and
+furthermore limits this process to specific file types or to studies,
+specified by database ID singly, as a comma separated, or as a
+hype-separated range.
+
+**Import Utilities**
+
+Importing studies usually is done by harvesting study metadata from a
+remote site via the OAI protocol. This causes study metadata to be
+hosted locally but files are served by the remote server. The Import
+utility is provided for cases where an OAI server is unavailable or
+where the intent is to relocate studies and their files to the Dataverse
+Network.
+
+At present this requires the help of the network administrator and can
+be manually intensive. First, study metadata may need to be modified
+slightly then saved in a specific directory structure on the server file
+system. Next, the study metadata import format and destination dataverse
+is chosen. Last, the top level directory where the study metadata and
+files are stored and "Batch Import" is clicked. Because the DDI input
+format can be quite complex and usage varies, verify the results are
+what's intended.
+
+A single study import function is also provided as a test for importing
+your study's metadata syntax but is not meant for actual import. It will
+not import associated files.
+
+Before performing a batch import, you must organize your files in the
+following manner:
+
+#. If you plan to import multiple files or studies, create a master
+   directory to hold all content that you choose to import.
+#. Create a separate subdirectory for each study that you choose to
+   import.
+   The directory name is not important.
+#. In each directory, place a file called ``study.xml`` and use that
+   file to hold the XML-formatted record for one study.
+   Note: Do not include file description elements in
+   the ``study.xml`` file. Including those fields results in the
+   addition of multiple blank files to that study.
+#. Also place in the directory any additional files that you choose to
+   upload for that study.
+
+For an example of a simple study DDI, refer to the :ref:`Metadata References <metadata-references>`
+section.
+
+**Handle Utilities**
+
+When a study is created, the global ID is first assigned, then
+registered with handle.net as a persistent identifier. This identifier
+becomes part of the study's citation and is guaranteed to always resolve
+to the study. For the study with global ID, hdl:1902.1/16598 or handle
+1902.1/16596, the URL in the citation would be:
+`http://hdl.handle.net/1902.1/16598 <http://hdl.handle.net/1902.1/16598>`__.
+
+If for any reason a study is created and not registered or is registered
+in a way that needs to be changed, use the Handle utility to either
+register currently unregistered studies or to re-register all registered
+studies.
+
+Web Statistics
+===============
+
+The Dataverse Network provides the capability to compile and analyze
+site usage through Google Analytics. A small amount of code is embedded
+in each page so when enabled, any page access along with associated
+browser and user information is recorded by Google. Later analysis of
+this compiled access data can be performed using the `Google Analytics <http://www.google.com/analytics/>`__ utility.
+
+Note: Access to Google Analytics is optional. If access to this utility
+is not configured for your network, in place of the Manage Web Usage
+menu option is a message
+stating: ``Google Analytics are not configured for this Network.``
+
+**To enable Google Analytics:**
+
+#. Create a Gmail account.
+#. Go to `Google Analytics <http://www.google.com/analytics/>`__ and create a profile for the server or website domain. You will
+   be assigned a Web Property ID.
+#. Using the Glassfish Admin console, add a JVM option and assign it the value of the newly assigned Web Property ID: 
+   ``Ddvn.googleanalytics.key=``
+#. Restart Glassfish.
+#. It takes about 24 hours after installation and set up of this option for tracking data to become available for use.
+
+Note: Google provides the code necessary for tracking. This has already
+been embedded into the Dataverse Network but not the Web Property ID.
+That is configured as a JVM option by the network admin when enabling
+this feature.
+
+**To view Web Statistics, navigate to:**
+
+- Network home page > Options page > Settings tab > General subtab > Web Statistics
+- You will be redirected to `Google Analytics <http://www.google.com/analytics/>`__. Log in using your Gmail account used to
+  create the profile.
+
+
+Appendix
+++++++++
+
+Additional documentation complementary to Users Guides.
+
+Control Card-Based Data Ingest
+=====================
+
+As of version 2.2 the DVN supports ingesting plain text data files, in
+addition to SPSS and STATA formats. This allows users and institutions
+to ingest raw data into Dataverse Networks without having to purchase
+and maintain proprietary, commercial software packages.
+
+Tab-delimited and CSV files are supported. In order to ingest a plain
+data file, an additional file containing the variable metadata needs to
+be supplied.
+
+**Two Metadata Types Are Supported**
+
+#. A simplified format based on the classic SPSS control card syntax;
+   this appears as "CSV/SPSS" in the menu on the Add Files page.
+#. DDI, an xml format from the Data Documentation Inititative
+   consortium. Choose "TAB/DDI" to ingest a tab file with a DDI metadata sheet.
+
+The specifics of the formats are documented in the 2 sections below.
+
+
+
+.. _controlcard-datafile-ingest:
+
+CSV Data, SPSS-style Control Card
+------------------------------
+
+Unlike other supported “subsettable” formats, this ingest mechanism
+requires 2 files: the CSV raw data file proper and an SPSS Setup file
+("control card") with the data set metadata. In the future, support for
+other data definition formats may be added (STATA, SAS, etc.). As
+always, user feedback is welcome.
+
+**The supported SPSS command syntax:**
+
+Please note that it is not our goal to attempt to support any set of
+arbitrary SPSS commands and/or syntax variations. The goal is to enable
+users who do not own proprietary statistical software to prepare their
+raw data for DVN ingest, using a select subset of SPSS data definitional
+syntax.
+
+(In addition to its simplicity and popularity, we chose to use the SPSS
+command syntax because Dataverse Network already has support for the SPSS ``.SAV`` and ``.POR`` formats, so we have a good working knowledge of the SPSS formatting
+conventions.)
+
+The following SPSS commands are supported:
+
+| ``DATA LIST ``
+| ``VARIABLE LABELS ``
+| ``NUMBER OF CASES``
+| ``VALUE LABELS``
+| ``FORMATS`` (actually, not supported as of now -- see below)
+| ``MISSING VALUES``
+
+We support mixed cases and all the abbreviations of the above commands
+that are valid under SPSS. For example, both "var labels" and "Var Lab"
+are acceptable commands.
+
+Individual command syntax.
+
+**1. DATA LIST**
+
+An explicit delimiter definition is required. For example:
+
+``DATA LIST LIST(',')``
+
+specifies ``','`` as the delimiter. This line is followed by the ``'/'``
+separator and variable definitions. Explicit type definitions are
+required. Each variable is defined by a name/value pair ``VARNAME``
+
+``(VARTYPE)`` where ``VARTYPE`` is a standard SPSS fortran-type
+definition.
+
+**Note** that this is the only **required** section. The minimum
+amount of metadata required to ingest a raw data file is the delimiter
+character, the names of the variables and their data type. All of these
+are defined in the ``DATA LIST`` section. Here’s an example of a
+complete, valid control card:
+
+``DATA LIST LIST(’,’)``
+``CASEID (f) NAME (A) RATIO (f)``
+``.``
+
+It defines a comma-separated file with 3 variables named ``CASEID``,
+``NAME`` and ``RATIO``, two of them of the types numeric and one character
+string.
+
+Examples of valid type definitions:
+
+| **A8** 8 byte character string;
+| **A** character string;
+| **f10.2** numeric value, 10 decimal digits, with 2 fractional digits;
+| **f8** defaults to F8.0
+| **F** defaults to F.0, i.e., numeric integer value
+| **2** defaults to F.2, i.e., numeric float value with 2 fractional digits.
+
+The following SPSS date/time types are supported:
+
+type                            format
+
+``DATE``                       ``yyyy-MM-dd``
+
+``DATETIME``                ``yyyy-MM-dd HH:mm:ss``
+
+The variable definition pairs may be separated by any combination of
+white space characters and newlines. **Wrapped-around lines must start
+with white spaces** (i.e., newlines must be followed by spaces). The
+list must be terminated by a line containing a single dot.
+
+Please note, that the actual date values should be stored in the CSV
+file as strings, in the format above. As opposed to how SPSS stores the
+types of the same name (as integer numbers of seconds).
+
+**2. VARIABLE LABELS**
+
+Simple name/value pairs, separated by any combination of white space
+characters and newlines (as described in section 1 above). The list is
+terminated by a single dot.
+
+For example:
+
+| ``VARIABLE LABELS``
+| ``CELLS "Subgroups for sample-see documentation"``
+| ``STRATA "Cell aggregates for sample”``
+| ``.``
+
+**3. NUMBER OF CASES (optional)**
+
+The number of cases may be explicitly specified. For example:
+
+``num of cases 1000``
+
+When the number of cases is specified, it will be checked against the
+number of observations actually found in the CSV file, and a mismatch
+would result in an ingest error.
+
+**4. VALUE LABELS**
+
+Each value label section is a variable name followed by a list of
+value/label pairs, terminated by a single "/" character. The list of
+value label sections is terminated by a single dot.
+
+For example,
+
+| ``VALUE labels``
+| ``FOO 0 "NADA"``
+| ``1 "NOT MUCH"``
+| ``99999999 "A LOT"``
+| ``/``
+| ``BAR 97 "REFUSAL"``
+| ``98 "DONT KNOW"``
+| ``99 "MISSING"``
+| ``/``
+| ``.``
+
+**5. FORMATS**
+
+This command is actually redundant if you explicitly supply the variable
+formats in the ``DATA LIST`` section above.
+
+**NOTE:** It appears that the only reason the``FORMATS`` command exists is
+that ``DATA LIST`` syntax does not support explicit fortran-style format
+definitions when fixed-field data is defined. So it is in fact redundant
+when we're dealing with delimited files only.
+
+Please supply valid, fortran-style variable formats in the ``DATA
+LIST`` section, as described above.
+
+**6. MISSING VALUES**
+
+This is a space/newline-separate list of variable names followed by a
+comma-separated list of missing values definition, in parentheses. For
+example: 
+
+| ``INTVU4 (97, 98, 99)``
+| The list is terminated with a single dot.
+
+An example of a valid ``MISSING VALUES`` control card section:
+
+| ``MISSING VALUES``
+| ``INTVU4 (97, 98, 99)``
+| ``INTVU4A ('97', '98', '99')``
+| ``.``
+
+| **An example of a control card ready for ingest:**
+
+.. code-block:: guess
+
+	data list list(',') /
+	  CELLS (2)  STRATA (2)  WT2517 (2)
+	  SCRNRID (f) CASEID (f)  INTVU1 (f)
+	  INTVU2 (f)  INTVU3 (f)  INTVU4 (f)
+	  INTVU4A (A)
+	  .
+	VARIABLE LABELS
+	  CELLS "Subgroups for sample-see documentation"
+	  STRATA "Cell aggregates for sample-see documenta"
+	  WT2517 "weight for rep. sample-see documentation"
+	  SCRNRID "SCREENER-ID"
+	  CASEID "RESPONDENT'S CASE ID NUMBER"
+	  INTVU1 "MONTH RESPONDENT BEGAN INTERVIEW"
+	  INTVU2 "DAY RESPONDENT BEGAN INTERVIEW"
+	  INTVU3 "HOUR RESPONDENT BEGAN INTERVIEW"
+	  INTVU4 "MINUTE RESPONDENT BEGAN INTERVIEW"
+	  INTVU4A "RESPONDENT INTERVIEW BEGAN AM OR PM"
+	  .
+	VALUE labels
+	  CASEID   99999997 "REFUSAL"
+					  99999998 "DONT KNOW"
+					  99999999 "MISSING"
+					  /
+	  INTVU1   97 "REFUSAL"
+					  98 "DONT KNOW"
+					  99 "MISSING"
+					  /
+	  INTVU2   97 "REFUSAL"
+					  98 "DONT KNOW"
+					  99 "MISSING"
+					  /
+	  INTVU3   97 "REFUSAL"
+					  98 "DONT KNOW"
+					  99 "MISSING"
+					  /
+	  INTVU4   97 "REFUSAL"
+					  98 "DONT KNOW"
+					  99 "MISSING"
+					  /
+	  INTVU4A "97" "REFUSAL"
+					  "98" "DONT KNOW"
+					  "99" "MISSING"
+					  "AM" "MORNING"
+					  "PM" "EVENING"
+	  .
+	MISSING VALUES
+	  CASEID (99999997, 99999998, 99999999)
+	  INTVU1 (97, 98, 99)
+	  INTVU2 (97, 98, 99)
+	  INTVU3 (97, 98, 99)
+	  INTVU4 (97, 98, 99)
+	  INTVU4A ('97', '98', '99')
+	  .
+	NUMBER of CASES 2517
+
+**DATA FILE.**
+
+Data must be stored in a text file, one observation per line. Both DOS
+and Unix new line characters are supported as line separators. On each
+line, individual values must be separated by the delimiter character
+defined in the DATA LISTsection. There may only be exactly (``NUMBER OF
+VARIABLES - 1``) delimiter characters per line; i.e. character values must
+not contain the delimiter character.
+
+**QUESTIONS, TODOS:**
+
+Is there any reason we may want to support ``RECODE`` command also?
+
+--- comments, suggestions are welcome! ---
+
+.. _ddixml-datafile-ingest:
+
+Tab Data, with DDI Metadata
+------------------------
+
+As of version 2.2, another method of ingesting raw TAB-delimited data
+files has been added to the Dataverse Network. Similarly to the SPSS control
+card-based ingest (also added in this release), this ingest mechanism
+requires 2 files: the TAB raw data file itself and the data set metadata
+in the DDI/XML format.
+
+**Intended use case:**
+
+Similarly to the SPSS syntax-based ingest, the goal is to provide
+another method of ingesting raw quantitative data into the DVN, without
+having to first convert it into one of the proprietary, commercial
+formats, such as SPSS or STATA. Pleaes note, that in our design
+scenario, the DDI files supplying the ingest metadata will be somehow
+machine-generated; by some software tool, script, etc. In other words,
+this design method is targeted towards more of an institutional user,
+perhaps another data archive with large quantities of data and some
+institutional knowledge of its structure, and with some resources to
+invest into developing an automated tool to generate the metadata
+describing the datasets. With the final goal of ingesting all the data
+into a DVN by another automated, batch process. The DVN project is also
+considering developing a standalone tool of our own that would guide
+users through the process of gathering the information describing their
+data sets and producing properly formatted DDIs ready to be ingested.
+
+For now, if you are merely looking for a way to ingest a single
+“subsettable” data set, you should definitely be able to create a
+working DDI by hand to achieve this goal. However, we strongly recommend
+that you instead consider the CSV/SPSS control card method, which was
+designed with this use case in mind. If anything, it will take
+considerably fewer keystrokes to create an SPSS-syntax control card than
+a DDI encoding the same amount of information.
+
+**The supported DDI syntax:**
+
+You can consult the DDI project for complete information on the DDI
+metadata (`http://icpsr.umich.edu/DDI <http://icpsr.umich.edu/DDI>`__).
+However, only a small subset of the published format syntax is used for
+ingesting individual data sets. Of the 7 main DDI sections, only 2,
+fileDscr and dataDscr are used. Inside these sections, only a select set
+of fields, those that have direct equivalents in the DVN data set
+structure, are supported.
+
+These fields are outlined below. All the fields are mandatory, unless
+specified otherwise. An XSD schema of the format subset is also
+provided, for automated validation of machine-generated XML.
+
+.. code-block:: guess
+
+		<?xml version="1.0" encoding="UTF-8"?>
+		<codeBook xmlns="http://www.icpsr.umich.edu/DDI"\>
+		<fileDscr>
+			<fileTxt ID="file1">
+					<dimensns>
+							<caseQnty>NUMBER OF OBSERVATIONS</caseQnty>
+							<varQnty>NUMBER OF VARIABLES</varQnty>
+					</dimensns>
+			</fileTxt>
+		</fileDscr>
+		<dataDscr>
+			<!-- var section for a discrete numeric variable: -->
+			<var ID="v1.1" name="VARIABLE NAME" intrvl="discrete" >
+					<location fileid="file1"/>
+					<labl level="variable">VARIABLE LABEL</labl>
+					<catgry>
+							<catValu>CATEGORY VALUE</catValu>
+					</catgry>
+				…
+				<!-- 1 or more category sections are allowed for discrete variables -->
+					<varFormat type="numeric" />
+			</var>
+		   <!-- var section for a continuous numeric variable: -->
+			<var ID="v1.2" name="VARIABLE NAME" intrvl="contin" >
+					<location fileid="file1"/>
+					<labl level="variable">VARIABLE LABEL</labl>
+					<varFormat type="numeric" />
+			</var>
+		   <!-- var section for a character (string) variable: -->
+			<var ID="v1.10" name="VARIABLE NAME" intrvl="discrete" >
+					<location fileid="file1"/>
+					<labl level="variable">VARIABLE LABEL</labl>
+					<varFormat type="character" />
+			</var>
+			<!-- a discrete variable with missing values defined: -->
+		</dataDscr>
+		</codeBook>
+
+
+--- comments, suggestions are welcome! ---
+
+.. _spss-datafile-ingest:
+
+SPSS Data File Ingest
+=====================
+
+Ingesting SPSS (.por) files with extended labels
+------------------------------------------------
+
+This feature has been added to work around the limit on the length of
+variable labels in SPSS Portable (.por) files. To use this
+feature, select "SPSS/POR,(w/labels)" from the list of file types on
+the AddFiles page. You will be prompted to first upload a text file
+containing the extended, "long" versions of the labels, and then
+upload the .por file. The label text file should contain one
+TAB-separated variable name/variable label pair per line.
+
+.. _r-datafile-ingest:
+
+Ingest of R (.RData) files
+==========================
+
+Overview.
+---------
+
+Support for ingesting R data files has been added in version 3.5. R
+has been increasingly popular in the research/academic community,
+owing to the fact that it is free and open-source (unlike SPSS and
+STATA). Consequently, more and more data is becoming available
+exclusively in RData format. This long-awaited feature makes it
+possible to ingest such data into DVN as "subsettable" files.
+
+Requirements.
+-------------
+
+R ingest relies on R having been installed, configured and made
+available to the DVN application via RServe (see the Installers
+Guide). This is in contrast to the SPSS and Stata ingest - which can
+be performed without R present. (though R is still needed to perform
+most subsetting/analysis tasks on the resulting data files).
+
+The data must be formatted as an R dataframe (using data.frame() in
+R). If an .RData file contains multiple dataframes, only the 1st one
+will be ingested.
+
+Data Types, compared to other supported formats (Stat, SPSS)
+------------------------------------------------------------
+
+Integers, Doubles, Character strings
+************************************
+
+The handling of these types is intuitive and straightforward. The
+resulting tab file columns, summary statistics and UNF signatures
+should be identical to those produced by ingesting the same vectors
+from SPSS and Stata.
+
+**A couple of features that are unique to R/new in DVN:** 
+
+R explicitly supports Missing Values for all of the types above;
+Missing Values encoded in R vectors will be recognized and preserved
+in TAB files (as 'NA'), counted in the generated summary statistics
+and data analysis.
+
+In addition to Missing Values, R recognizes "Not a Number" (NaN) and
+positive and negative infinity for floating point values. These
+are now properly supported by the DVN.
+
+Also note that, unlike Stata, where "float" and "double" are supported
+as distinct data types, all floating point values in R are double
+precision.
+
+R Factors 
+*********
+
+These are ingested as "Categorical Values" in the DVN. 
+
+One thing to keep in mind: in both Stata and SPSS, the actual value of
+a categorical variable can be both character and numeric. In R, all
+factor values are strings, even if they are string representations of
+numbers. So the values of the resulting categoricals in the DVN will
+always be of string type too.
+
+| **New:** To properly handle *ordered factors* in R, the DVN now supports the concept of an "Ordered Categorical" - a categorical value where an explicit order is assigned to the list of value labels.
+
+(New!) Boolean values
+*********************
+
+R Boolean (logical) values are supported. 
+
+
+Limitations of R data format, as compared to SPSS and STATA. 
+************************************************************
+
+Most noticeably, R lacks a standard mechanism for defining descriptive
+labels for the data frame variables.  In the DVN, similarly to
+both Stata and SPSS, variables have distinct names and labels; with
+the latter reserved for longer, descriptive text.
+With variables ingested from R data frames the variable name will be
+used for both the "name" and the "label".
+
+| *Optional R packages exist for providing descriptive variable labels;
+ in one of the future versions support may be added for such a
+ mechanism. It would of course work only for R files that were
+ created with such optional packages*.
+
+Similarly, R categorical values (factors) lack descriptive labels too.
+**Note:** This is potentially confusing, since R factors do
+actually have "labels".  This is a matter of terminology - an R
+factor's label is in fact the same thing as the "value" of a
+categorical variable in SPSS or Stata and DVN; it contains the actual
+meaningful data for the given observation. It is NOT a field reserved
+for explanatory, human-readable text, such as the case with the
+SPSS/Stata "label". 
+
+Ingesting an R factor with the level labels "MALE" and "FEMALE" will
+produce a categorical variable with "MALE" and "FEMALE" in the
+values and labels both.
+
+
+Time values in R
+----------------
+
+This warrants a dedicated section of its own, because of some unique
+ways in which time values are handled in R.
+
+R makes an effort to treat a time value as a real time instance. This
+is in contrast with either SPSS or Stata, where time value
+representations such as "Sep-23-2013 14:57:21" are allowed; note that
+in the absence of an explicitly defined time zone, this value cannot
+be mapped to an exact point in real time.  R handles times in the
+"Unix-style" way: the value is converted to the
+"seconds-since-the-Epoch" Greenwitch time (GMT or UTC) and the
+resulting numeric value is stored in the data file; time zone
+adjustments are made in real time as needed.
+
+Things get ambiguous and confusing when R **displays** this time
+value: unless the time zone was explicitly defined, R will adjust the
+value to the current time zone. The resulting behavior is often
+counter-intuitive: if you create a time value, for example:
+
+		   timevalue<-as.POSIXct("03/19/2013 12:57:00", format = "%m/%d/%Y %H:%M:%OS");
+
+on a computer configured for the San Francisco time zone, the value
+will be differently displayed on computers in different time zones;
+for example, as "12:57 PST" while still on the West Coast, but as
+"15:57 EST" in Boston.
+
+If it is important that the values are always displayed the same way,
+regardless of the current time zones, it is recommended that the time
+zone is explicitly defined. For example: 
+
+     attr(timevalue,"tzone")<-"PST"
+or 
+   timevalue<-as.POSIXct("03/19/2013 12:57:00", format = "%m/%d/%Y %H:%M:%OS", tz="PST");
+
+Now the value will always be displayed as "12:57 PST", regardless of
+the time zone that is current for the OS ... **BUT ONLY** if the OS
+where R is installed actually understands the time zone "PST", which
+is not by any means guaranteed! Otherwise, it will **quietly adjust**
+the stored GMT value to **the current time zone**, yet still
+display it with the "PST" tag attached! One way to rephrase this is
+that R does a fairly decent job **storing** time values in a
+non-ambiguous, platform-independent manner - but gives no guarantee that 
+the values will be displayed in any way that is predictable or intuitive. 
+
+In practical terms, it is recommended to use the long/descriptive
+forms of time zones, as they are more likely to be properly recognized
+on most computers. For example, "Japan" instead of "JST".  Another possible
+solution is to explicitly use GMT or UTC (since it is very likely to be
+properly recognized on any system), or the "UTC+<OFFSET>" notation. Still, none of the above
+**guarantees** proper, non-ambiguous handling of time values in R data
+sets. The fact that R **quietly** modifies time values when it doesn't
+recognize the supplied timezone attribute, yet still appends it to the
+**changed** time value does make it quite difficult. (These issues are
+discussed in depth on R-related forums, and no attempt is made to
+summarize it all in any depth here; this is just to made you aware of
+this being a potentially complex issue!)
+
+An important thing to keep in mind, in connection with the DVN ingest
+of R files, is that it will **reject** an R data file with any time
+values that have time zones that we can't recognize. This is done in
+order to avoid (some) of the potential issues outlined above.
+
+It is also recommended that any vectors containing time values
+ingested into the DVN are reviewed, and the resulting entries in the
+TAB files are compared against the original values in the R data
+frame, to make sure they have been ingested as expected. 
+
+Another **potential issue** here is the **UNF**. The way the UNF
+algorithm works, the same date/time values with and without the
+timezone (e.g. "12:45" vs. "12:45 EST") **produce different
+UNFs**. Considering that time values in Stata/SPSS do not have time
+zones, but ALL time values in R do (yes, they all do - if the timezone
+wasn't defined explicitely, it implicitly becomes a time value in the
+"UTC" zone!), this means that it is **impossible** to have 2 time
+value vectors, in Stata/SPSS and R, that produce the same UNF.
+
+**A pro tip:** if it is important to produce SPSS/Stata and R versions of
+the same data set that result in the same UNF when ingested, you may
+define the time variables as **strings** in the R data frame, and use
+the "YYYY-MM-DD HH:mm:ss" formatting notation. This is the formatting used by the UNF
+algorithm to normalize time values, so doing the above will result in
+the same UNF as the vector of the same time values in Stata.
+
+Note: date values (dates only, without time) should be handled the
+exact same way as those in SPSS and Stata, and should produce the same
+UNFs.
+
+.. _fits-datafile-ingest:
+
+FITS File format Ingest
+=======================
+
+This custom ingest is an experiment in branching out into a discipline
+outside of the Social Sciences. It has been added in v.3.4 as part of the
+collaboration between the IQSS and the Harvard-Smithsonian Center for
+Astrophysics. FITS is a multi-part file format for storing
+Astronomical data (http://fits.gsfc.nasa.gov/fits_standard.html). DVN
+now offers an ingest plugin that parses FITS file headers for
+key-value metadata that are extracted and made searchable.
+
+FITS is now listed on the DVN AddFiles page as a recognized file
+format. The same asynchronous process is used as for "subsettable"
+files: the processing is done in the background, with an email
+notification sent once completed.
+
+Unlike with the "subsettable" file ingest, no format conversion takes
+place and the FITS file is ingested as is, similarly to "other
+materials" files. The process is limited to the extaction of the
+searchable metadata.  Once the file is ingested and the study is
+re-indexed, these file-level FITS metadata fields can be searched on
+from the Advanced Search page, on either the Dataverse or Network
+level. Choose one of the FITS file Information listed in the drop
+down, and enter the relevant search term. Search results that match
+the query will show individual files as well as studies.
+
+The ingest also generates a short summary of the file contents (number
+and type of Header-Data Units) and adds it to the file description.
+
+
+.. _metadata-references:
+
+Metadata References
+===================
+
+The Dataverse Network metadata is compliant with the `DDI schema
+version 2 <http://www.icpsr.umich.edu/DDI/>`__. The Cataloging
+Information fields associated with each study contain most of the fields
+in the study description section of the DDI. That way the Dataverse
+Network metadata can be mapped easily to a DDI, and be exported into XML
+format for preservation and interoperability.
+
+Dataverse Network data also is compliant with `Simple Dublin
+Core <http://www.dublincore.org/>`__ (DC) requirements. For imports
+only, Dataverse Network data is compliant with the `Content Standard
+for Digital Geospatial Metadata (CSDGM), Vers. 2 (FGDC-STD-001-1998) <http://www.fgdc.gov/metadata>`__ (FGDC).
+
+Attached is a PDF file that defines and maps all Dataverse Network
+Cataloging Information fields. Information provided in the file includes
+the following:
+
+- Field label - For each Cataloging Information field, the field label
+  appears first in the mapping matrix.
+
+- Description - A description of each field follows the field label.
+
+- Query term - If a field is available for use in building a query, the
+  term to use for that field is listed.
+
+- Dataverse Network database element name - The Dataverse Network
+  database element name for the field is provided.
+
+- Advanced search - If a field is available for use in an advanced
+  search, that is indicated.
+
+- DDI element mapping for imports - For harvested or imported studies,
+  the imported DDI elements are mapped to Dataverse Network fields.
+
+- DDI element mapping for exports - When a study or dataverse is
+  harvested or exported in DDI format, the Dataverse Network fields are
+  mapped to DDI elements.
+
+- DC element mapping for imports - For harvested or imported studies,
+  the imported DC elements are mapped to specific Dataverse Network
+  fields.
+
+- DC element mapping for exports - When a study or dataverse is
+  harvested or exported in DC format, specific Dataverse Network fields
+  are mapped to the DC elements.
+
+- FGDC element mapping for imports - For harvested or imported studies,
+  the imported FGDC elements are mapped to specific Dataverse Network fields.
+
+Also attached is an example of a DDI for a simple study containing
+title, author, description, keyword, and topic classification cataloging
+information fields suitable for use with batch import.
+
+|image9|
+`catalogingfields11apr08.pdf <https://github.com/IQSS/dvn/blob/develop/doc/sphinx/source/datausers-guides_files/catalogingfields11apr08.pdf>`__
+
+|image10|
+`simple\_study.xml <https://github.com/IQSS/dvn/blob/develop/doc/sphinx/source/datausers-guides_files/simple_study_1.xml>`__
+
+Zelig Interface
+==========
+
+Zelig is statistical software for everyone: researchers, instructors,
+and students. It is a front-end and back-end for R (Zelig is written in
+R). The Zellig software:
+
+- Unifies diverse theories of inference
+
+- Unifies different statistical models and notation
+
+- Unifies R packages in a common syntax
+
+Zelig is distributed under the GNU General Public License, Version 2.
+After installation, the source code is located in your R library
+directory. You can download a tarball of the latest Zelig source code
+from \ `http://projects.iq.harvard.edu/zelig <http://projects.iq.harvard.edu/zelig>`__.
+
+The Dataverse Network software uses Zelig to perform advanced
+statistical analysis functions. The current interface schema used by the
+Dataverse Network for Zelig processes is in the following location:
+
+**Criteria for Model Availability**
+
+Three factors determine which Zelig models are available for analysis in
+the Dataverse Network: 
+
+- Some new models require data structures and modeling parameters that
+  are not compatible with the current framework of the Dataverse Network
+  and other web-driven applications. These types of models are not
+  available in the Dataverse Network.
+
+- Models must be explicitly listed in the Zelig packages to be used in
+  the Dataverse Network, and all models must be disclosed fully, including
+  runtime errors. Zelig models that do not meet these specifications are
+  excluded from the Dataverse Network until they are disclosed with a
+  complete set of information.
+
+- An installation-based factor also can limit the Zelig models available
+  in the Dataverse Network. A minimum version of the core software package
+  GCC 4.0 must be installed on any Linux OS-based R machine used with the
+  Dataverse Network, to install and run a key Zelig package, MCMCpack. If
+  a Linux machine that is designated to R is used for DSB services and
+  does not have the minimum version of the GCC package installed, the
+  Dataverse Network looses at least eight models from the available
+  advanced analysis models.
+
+|image11|
+`configzeliggui.xml <https://github.com/IQSS/dvn/blob/develop/doc/sphinx/source/datausers-guides_files/configzeliggui_0.xml>`__
+
+.. |image9| image:: ./appendix-0_files/application-pdf.png
+.. |image10| image:: ./appendix-0_files/application-octet-stream.png
+.. |image11| image:: ./appendix-0_files/application-octet-stream.png
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/_sources/index.txt	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,38 @@
+.. The Dataverse Network documentation master file, created by
+   sphinx-quickstart on Thu Aug  1 10:00:58 2013.
+   You can adapt this file completely to your liking, but it should at least
+   contain the root `toctree` directive.
+
+Dataverse Network Guides
+===============================
+
+Contents:
+
+.. toctree::
+   :maxdepth: 2
+
+  dataverse-user-main
+  dataverse-installer-main
+  dataverse-developer-main
+  dataverse-api-main
+
+.. index::
+   single: execution; context
+   module: __main__
+   module: sys
+   triple: module; search; path
+
+
+The execution context
+---------------------
+
+.. index:: BNF, grammar, syntax, notation
+
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/_static/agogo.css	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,489 @@
+/*
+ * agogo.css_t
+ * ~~~~~~~~~~~
+ *
+ * Sphinx stylesheet -- agogo theme.
+ *
+ * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+* {
+  margin: 0px;
+  padding: 0px;
+}
+
+body {
+  font-family: "Verdana", Arial, sans-serif;
+  line-height: 1.4em;
+  color: black;
+  background-color: #eeeeec;
+}
+
+
+/* Page layout */
+
+div.header, div.content, div.footer {
+  width: 70em;
+  margin-left: auto;
+  margin-right: auto;
+}
+
+div.header-wrapper {
+  background: white;
+  padding-top: 10px;
+  border-top: 40px solid #000;
+  border-bottom: 4px solid #000;
+}
+
+
+/* Default body styles */
+a {
+  color: #ce5c00;
+}
+
+div.bodywrapper a, div.footer a {
+  text-decoration: underline;
+}
+
+.clearer {
+  clear: both;
+}
+
+.left {
+  float: left;
+}
+
+.right {
+  float: right;
+}
+
+.line-block {
+    display: block;
+    margin-top: 1em;
+    margin-bottom: 1em;
+}
+
+.line-block .line-block {
+    margin-top: 0;
+    margin-bottom: 0;
+    margin-left: 1.5em;
+}
+
+h1, h2, h3, h4 {
+  font-family: "Georgia", "Times New Roman", serif;
+  font-weight: normal;
+  color: #3465a4;
+  margin-bottom: .8em;
+}
+
+h1 {
+  color: #204a87;
+}
+
+h2 {
+  padding-bottom: .5em;
+  border-bottom: 1px solid #3465a4;
+}
+
+a.headerlink {
+  visibility: hidden;
+  color: #dddddd;
+  padding-left: .3em;
+}
+
+h1:hover > a.headerlink,
+h2:hover > a.headerlink,
+h3:hover > a.headerlink,
+h4:hover > a.headerlink,
+h5:hover > a.headerlink,
+h6:hover > a.headerlink,
+dt:hover > a.headerlink {
+  visibility: visible;
+}
+
+img {
+  border: 0;
+}
+
+div.admonition {
+  margin-top: 10px;
+  margin-bottom: 10px;
+  padding: 2px 7px 1px 7px;
+  border-left: 0.2em solid black;
+}
+
+p.admonition-title {
+  margin: 0px 10px 5px 0px;
+  font-weight: bold;
+}
+
+dt:target, .highlighted {
+  background-color: #fbe54e;
+}
+
+/* Header */
+
+div.header {
+  padding-top: 10px;
+  padding-bottom: 10px;
+  padding-left: 220px;
+  background: url(logo.png) no-repeat 20px 0;
+  background-size: 183px 80px;
+}
+
+div.header .headertitle {
+  font-family: "Georgia", "Times New Roman", serif;
+  font-weight: normal;
+  font-size: 180%;
+  margin-bottom: .8em;
+}
+
+div.header .headertitle a {
+  color: #000;
+}
+
+div.header div.rel {
+  margin-top: 1em;
+}
+
+div.header div.rel a {
+  color: #fcaf3e;
+  letter-spacing: .1em;
+  text-transform: uppercase;
+}
+
+p.logo {
+    float: right;
+}
+
+img.logo {
+    border: 0;
+}
+
+
+/* Content */
+div.content-wrapper {
+  background-color: white;
+  padding-top: 20px;
+  padding-bottom: 20px;
+}
+
+div.document {
+  width: 50em;
+  float: left;
+}
+
+div.body {
+  padding-right: 2em;
+  text-align: justify;
+}
+
+div.document h1 {
+  line-height: 120%;
+}
+
+div.document ul {
+  margin: 1.5em;
+  list-style-type: square;
+}
+
+div.document dd {
+  margin-left: 1.2em;
+  margin-top: .4em;
+  margin-bottom: 1em;
+}
+
+div.document .section {
+  margin-top: 1.7em;
+}
+div.document .section:first-child {
+  margin-top: 0px;
+}
+
+div.document div.highlight {
+  padding: 3px;
+  background-color: #eeeeec;
+  border-top: 2px solid #dddddd;
+  border-bottom: 2px solid #dddddd;
+  margin-top: .8em;
+  margin-bottom: .8em;
+}
+
+div.document h2 {
+  margin-top: .7em;
+}
+
+div.document p {
+  margin-bottom: .5em;
+}
+
+div.document li.toctree-l1 {
+  margin-bottom: 1em;
+}
+
+div.document .descname {
+  font-weight: bold;
+}
+
+div.document .docutils.literal {
+  background-color: #eeeeec;
+  padding: 1px;
+}
+
+div.document .docutils.xref.literal {
+  background-color: transparent;
+  padding: 0px;
+}
+
+div.document blockquote {
+  margin: 1em;
+}
+
+div.document ol {
+  margin: 1.5em;
+}
+
+div.document pre {
+  white-space: pre-wrap;       /* css-3 */
+  white-space: -moz-pre-wrap;  /* Mozilla, since 1999 */
+  white-space: -pre-wrap;      /* Opera 4-6 */
+  white-space: -o-pre-wrap;    /* Opera 7 */
+  word-wrap: break-word;       /* Internet Explorer 5.5+ */
+}
+
+/* Sidebar */
+
+div.sidebar {
+  width: 20em;
+  float: right;
+  font-size: .9em;
+}
+
+div.sidebar a, div.header a {
+  text-decoration: none;
+}
+
+div.sidebar a:hover, div.header a:hover {
+  text-decoration: underline;
+}
+
+div.sidebar h3 {
+  color: #2e3436;
+  text-transform: uppercase;
+  font-size: 130%;
+  letter-spacing: .1em;
+}
+
+div.sidebar ul {
+  list-style-type: none;
+}
+
+div.sidebar li.toctree-l1 a {
+  display: block;
+  padding: 1px;
+  border: 1px solid #dddddd;
+  background-color: #eeeeec;
+  margin-bottom: .4em;
+  padding-left: 3px;
+  color: #2e3436;
+}
+
+div.sidebar li.toctree-l2 a {
+  background-color: transparent;
+  border: none;
+  margin-left: 1em;
+  border-bottom: 1px solid #dddddd;
+}
+
+div.sidebar li.toctree-l3 a {
+  background-color: transparent;
+  border: none;
+  margin-left: 2em;
+  border-bottom: 1px solid #dddddd;
+}
+
+div.sidebar li.toctree-l4 a {
+  background-color: transparent;
+  border: none;
+  margin-left: 3em;
+  border-bottom: 1px solid #dddddd;
+}
+
+div.sidebar li.toctree-l5 a {
+  background-color: transparent;
+  border: none;
+  margin-left: 4em;
+  border-bottom: 1px solid #dddddd;
+}
+
+div.sidebar li.toctree-l2:last-child a {
+  border-bottom: none;
+}
+
+div.sidebar li.toctree-l1.current a {
+  border-right: 5px solid #fcaf3e;
+}
+
+div.sidebar li.toctree-l1.current li.toctree-l2 a {
+  border-right: none;
+}
+
+div.sidebar input[type="text"] {
+  width: 170px;
+}
+
+div.sidebar input[type="submit"] {
+  width: 30px;
+}
+
+
+/* Footer */
+
+div.footer-wrapper {
+  background: url(bgfooter.png) top left repeat-x;
+  border-top: 4px solid #babdb6;
+  padding-top: 10px;
+  padding-bottom: 10px;
+  min-height: 80px;
+}
+
+div.footer, div.footer a {
+  color: #888a85;
+}
+
+div.footer .right {
+  text-align: right;
+}
+
+div.footer .left {
+  text-transform: uppercase;
+}
+
+
+/* Styles copied from basic theme */
+
+img.align-left, .figure.align-left, object.align-left {
+    clear: left;
+    float: left;
+    margin-right: 1em;
+}
+
+img.align-right, .figure.align-right, object.align-right {
+    clear: right;
+    float: right;
+    margin-left: 1em;
+}
+
+img.align-center, .figure.align-center, object.align-center {
+  display: block;
+  margin-left: auto;
+  margin-right: auto;
+}
+
+.align-left {
+    text-align: left;
+}
+
+.align-center {
+    text-align: center;
+}
+
+.align-right {
+    text-align: right;
+}
+
+/* -- search page ----------------------------------------------------------- */
+
+ul.search {
+    margin: 10px 0 0 20px;
+    padding: 0;
+}
+
+ul.search li {
+    padding: 5px 0 5px 20px;
+    background-image: url(file.png);
+    background-repeat: no-repeat;
+    background-position: 0 7px;
+}
+
+ul.search li a {
+    font-weight: bold;
+}
+
+ul.search li div.context {
+    color: #888;
+    margin: 2px 0 0 30px;
+    text-align: left;
+}
+
+ul.keywordmatches li.goodmatch a {
+    font-weight: bold;
+}
+
+/* -- index page ------------------------------------------------------------ */
+
+table.contentstable {
+    width: 90%;
+}
+
+table.contentstable p.biglink {
+    line-height: 150%;
+}
+
+a.biglink {
+    font-size: 1.3em;
+}
+
+span.linkdescr {
+    font-style: italic;
+    padding-top: 5px;
+    font-size: 90%;
+}
+
+/* -- general index --------------------------------------------------------- */
+
+table.indextable td {
+    text-align: left;
+    vertical-align: top;
+}
+
+table.indextable dl, table.indextable dd {
+    margin-top: 0;
+    margin-bottom: 0;
+}
+
+table.indextable tr.pcap {
+    height: 10px;
+}
+
+table.indextable tr.cap {
+    margin-top: 10px;
+    background-color: #f2f2f2;
+}
+
+img.toggler {
+    margin-right: 3px;
+    margin-top: 3px;
+    cursor: pointer;
+}
+
+/* -- viewcode extension ---------------------------------------------------- */
+
+.viewcode-link {
+    float: right;
+}
+
+.viewcode-back {
+    float: right;
+    font-family:: "Verdana", Arial, sans-serif;
+}
+
+div.viewcode-block:target {
+    margin: -1px -3px;
+    padding: 0 3px;
+    background-color: #f4debf;
+    border-top: 1px solid #ac9;
+    border-bottom: 1px solid #ac9;
+}
Binary file DVN-web/installer/dvninstall/doc/guides/_static/ajax-loader.gif has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/_static/basic.css	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,540 @@
+/*
+ * basic.css
+ * ~~~~~~~~~
+ *
+ * Sphinx stylesheet -- basic theme.
+ *
+ * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+/* -- main layout ----------------------------------------------------------- */
+
+div.clearer {
+    clear: both;
+}
+
+/* -- relbar ---------------------------------------------------------------- */
+
+div.related {
+    width: 100%;
+    font-size: 90%;
+}
+
+div.related h3 {
+    display: none;
+}
+
+div.related ul {
+    margin: 0;
+    padding: 0 0 0 10px;
+    list-style: none;
+}
+
+div.related li {
+    display: inline;
+}
+
+div.related li.right {
+    float: right;
+    margin-right: 5px;
+}
+
+/* -- sidebar --------------------------------------------------------------- */
+
+div.sphinxsidebarwrapper {
+    padding: 10px 5px 0 10px;
+}
+
+div.sphinxsidebar {
+    float: left;
+    width: 0px;
+    margin-left: -100%;
+    font-size: 90%;
+}
+
+div.sphinxsidebar ul {
+    list-style: none;
+}
+
+div.sphinxsidebar ul ul,
+div.sphinxsidebar ul.want-points {
+    margin-left: 20px;
+    list-style: square;
+}
+
+div.sphinxsidebar ul ul {
+    margin-top: 0;
+    margin-bottom: 0;
+}
+
+div.sphinxsidebar form {
+    margin-top: 10px;
+}
+
+div.sphinxsidebar input {
+    border: 1px solid #98dbcc;
+    font-family: sans-serif;
+    font-size: 1em;
+}
+
+div.sphinxsidebar #searchbox input[type="text"] {
+    width: 170px;
+}
+
+div.sphinxsidebar #searchbox input[type="submit"] {
+    width: 30px;
+}
+
+img {
+    border: 0;
+}
+
+/* -- search page ----------------------------------------------------------- */
+
+ul.search {
+    margin: 10px 0 0 20px;
+    padding: 0;
+}
+
+ul.search li {
+    padding: 5px 0 5px 20px;
+    background-image: url(file.png);
+    background-repeat: no-repeat;
+    background-position: 0 7px;
+}
+
+ul.search li a {
+    font-weight: bold;
+}
+
+ul.search li div.context {
+    color: #888;
+    margin: 2px 0 0 30px;
+    text-align: left;
+}
+
+ul.keywordmatches li.goodmatch a {
+    font-weight: bold;
+}
+
+/* -- index page ------------------------------------------------------------ */
+
+table.contentstable {
+    width: 90%;
+}
+
+table.contentstable p.biglink {
+    line-height: 150%;
+}
+
+a.biglink {
+    font-size: 1.3em;
+}
+
+span.linkdescr {
+    font-style: italic;
+    padding-top: 5px;
+    font-size: 90%;
+}
+
+/* -- general index --------------------------------------------------------- */
+
+table.indextable {
+    width: 100%;
+}
+
+table.indextable td {
+    text-align: left;
+    vertical-align: top;
+}
+
+table.indextable dl, table.indextable dd {
+    margin-top: 0;
+    margin-bottom: 0;
+}
+
+table.indextable tr.pcap {
+    height: 10px;
+}
+
+table.indextable tr.cap {
+    margin-top: 10px;
+    background-color: #f2f2f2;
+}
+
+img.toggler {
+    margin-right: 3px;
+    margin-top: 3px;
+    cursor: pointer;
+}
+
+div.modindex-jumpbox {
+    border-top: 1px solid #ddd;
+    border-bottom: 1px solid #ddd;
+    margin: 1em 0 1em 0;
+    padding: 0.4em;
+}
+
+div.genindex-jumpbox {
+    border-top: 1px solid #ddd;
+    border-bottom: 1px solid #ddd;
+    margin: 1em 0 1em 0;
+    padding: 0.4em;
+}
+
+/* -- general body styles --------------------------------------------------- */
+
+a.headerlink {
+    visibility: hidden;
+}
+
+h1:hover > a.headerlink,
+h2:hover > a.headerlink,
+h3:hover > a.headerlink,
+h4:hover > a.headerlink,
+h5:hover > a.headerlink,
+h6:hover > a.headerlink,
+dt:hover > a.headerlink {
+    visibility: visible;
+}
+
+div.body p.caption {
+    text-align: inherit;
+}
+
+div.body td {
+    text-align: left;
+}
+
+.field-list ul {
+    padding-left: 1em;
+}
+
+.first {
+    margin-top: 0 !important;
+}
+
+p.rubric {
+    margin-top: 30px;
+    font-weight: bold;
+}
+
+img.align-left, .figure.align-left, object.align-left {
+    clear: left;
+    float: left;
+    margin-right: 1em;
+}
+
+img.align-right, .figure.align-right, object.align-right {
+    clear: right;
+    float: right;
+    margin-left: 1em;
+}
+
+img.align-center, .figure.align-center, object.align-center {
+  display: block;
+  margin-left: auto;
+  margin-right: auto;
+}
+
+.align-left {
+    text-align: left;
+}
+
+.align-center {
+    text-align: center;
+}
+
+.align-right {
+    text-align: right;
+}
+
+/* -- sidebars -------------------------------------------------------------- */
+
+div.sidebar {
+    margin: 0 0 0.5em 1em;
+    border: 1px solid #ddb;
+    padding: 7px 7px 0 7px;
+    background-color: #ffe;
+    width: 40%;
+    float: right;
+}
+
+p.sidebar-title {
+    font-weight: bold;
+}
+
+/* -- topics ---------------------------------------------------------------- */
+
+div.topic {
+    border: 1px solid #ccc;
+    padding: 7px 7px 0 7px;
+    margin: 10px 0 10px 0;
+}
+
+p.topic-title {
+    font-size: 1.1em;
+    font-weight: bold;
+    margin-top: 10px;
+}
+
+/* -- admonitions ----------------------------------------------------------- */
+
+div.admonition {
+    margin-top: 10px;
+    margin-bottom: 10px;
+    padding: 7px;
+}
+
+div.admonition dt {
+    font-weight: bold;
+}
+
+div.admonition dl {
+    margin-bottom: 0;
+}
+
+p.admonition-title {
+    margin: 0px 10px 5px 0px;
+    font-weight: bold;
+}
+
+div.body p.centered {
+    text-align: center;
+    margin-top: 25px;
+}
+
+/* -- tables ---------------------------------------------------------------- */
+
+table.docutils {
+    border: 0;
+    border-collapse: collapse;
+}
+
+table.docutils td, table.docutils th {
+    padding: 1px 8px 1px 5px;
+    border-top: 0;
+    border-left: 0;
+    border-right: 0;
+    border-bottom: 1px solid #aaa;
+}
+
+table.field-list td, table.field-list th {
+    border: 0 !important;
+}
+
+table.footnote td, table.footnote th {
+    border: 0 !important;
+}
+
+th {
+    text-align: left;
+    padding-right: 5px;
+}
+
+table.citation {
+    border-left: solid 1px gray;
+    margin-left: 1px;
+}
+
+table.citation td {
+    border-bottom: none;
+}
+
+/* -- other body styles ----------------------------------------------------- */
+
+ol.arabic {
+    list-style: decimal;
+}
+
+ol.loweralpha {
+    list-style: lower-alpha;
+}
+
+ol.upperalpha {
+    list-style: upper-alpha;
+}
+
+ol.lowerroman {
+    list-style: lower-roman;
+}
+
+ol.upperroman {
+    list-style: upper-roman;
+}
+
+dl {
+    margin-bottom: 15px;
+}
+
+dd p {
+    margin-top: 0px;
+}
+
+dd ul, dd table {
+    margin-bottom: 10px;
+}
+
+dd {
+    margin-top: 3px;
+    margin-bottom: 10px;
+    margin-left: 30px;
+}
+
+dt:target, .highlighted {
+    background-color: #fbe54e;
+}
+
+dl.glossary dt {
+    font-weight: bold;
+    font-size: 1.1em;
+}
+
+.field-list ul {
+    margin: 0;
+    padding-left: 1em;
+}
+
+.field-list p {
+    margin: 0;
+}
+
+.refcount {
+    color: #060;
+}
+
+.optional {
+    font-size: 1.3em;
+}
+
+.versionmodified {
+    font-style: italic;
+}
+
+.system-message {
+    background-color: #fda;
+    padding: 5px;
+    border: 3px solid red;
+}
+
+.footnote:target  {
+    background-color: #ffa;
+}
+
+.line-block {
+    display: block;
+    margin-top: 1em;
+    margin-bottom: 1em;
+}
+
+.line-block .line-block {
+    margin-top: 0;
+    margin-bottom: 0;
+    margin-left: 1.5em;
+}
+
+.guilabel, .menuselection {
+    font-family: sans-serif;
+}
+
+.accelerator {
+    text-decoration: underline;
+}
+
+.classifier {
+    font-style: oblique;
+}
+
+abbr, acronym {
+    border-bottom: dotted 1px;
+    cursor: help;
+}
+
+/* -- code displays --------------------------------------------------------- */
+
+pre {
+    overflow: auto;
+    overflow-y: hidden;  /* fixes display issues on Chrome browsers */
+}
+
+td.linenos pre {
+    padding: 5px 0px;
+    border: 0;
+    background-color: transparent;
+    color: #aaa;
+}
+
+table.highlighttable {
+    margin-left: 0.5em;
+}
+
+table.highlighttable td {
+    padding: 0 0.5em 0 0.5em;
+}
+
+tt.descname {
+    background-color: transparent;
+    font-weight: bold;
+    font-size: 1.2em;
+}
+
+tt.descclassname {
+    background-color: transparent;
+}
+
+tt.xref, a tt {
+    background-color: transparent;
+    font-weight: bold;
+}
+
+h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
+    background-color: transparent;
+}
+
+.viewcode-link {
+    float: right;
+}
+
+.viewcode-back {
+    float: right;
+    font-family: sans-serif;
+}
+
+div.viewcode-block:target {
+    margin: -1px -10px;
+    padding: 0 10px;
+}
+
+/* -- math display ---------------------------------------------------------- */
+
+img.math {
+    vertical-align: middle;
+}
+
+div.body div.math p {
+    text-align: center;
+}
+
+span.eqno {
+    float: right;
+}
+
+/* -- printout stylesheet --------------------------------------------------- */
+
+@media print {
+    div.document,
+    div.documentwrapper,
+    div.bodywrapper {
+        margin: 0 !important;
+        width: 100%;
+    }
+
+    div.sphinxsidebar,
+    div.related,
+    div.footer,
+    #top-link {
+        display: none;
+    }
+}
\ No newline at end of file
Binary file DVN-web/installer/dvninstall/doc/guides/_static/bgfooter.png has changed
Binary file DVN-web/installer/dvninstall/doc/guides/_static/bgtop.png has changed
Binary file DVN-web/installer/dvninstall/doc/guides/_static/comment-bright.png has changed
Binary file DVN-web/installer/dvninstall/doc/guides/_static/comment-close.png has changed
Binary file DVN-web/installer/dvninstall/doc/guides/_static/comment.png has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/_static/doctools.js	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,235 @@
+/*
+ * doctools.js
+ * ~~~~~~~~~~~
+ *
+ * Sphinx JavaScript utilities for all documentation.
+ *
+ * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+/**
+ * select a different prefix for underscore
+ */
+$u = _.noConflict();
+
+/**
+ * make the code below compatible with browsers without
+ * an installed firebug like debugger
+if (!window.console || !console.firebug) {
+  var names = ["log", "debug", "info", "warn", "error", "assert", "dir",
+    "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace",
+    "profile", "profileEnd"];
+  window.console = {};
+  for (var i = 0; i < names.length; ++i)
+    window.console[names[i]] = function() {};
+}
+ */
+
+/**
+ * small helper function to urldecode strings
+ */
+jQuery.urldecode = function(x) {
+  return decodeURIComponent(x).replace(/\+/g, ' ');
+};
+
+/**
+ * small helper function to urlencode strings
+ */
+jQuery.urlencode = encodeURIComponent;
+
+/**
+ * This function returns the parsed url parameters of the
+ * current request. Multiple values per key are supported,
+ * it will always return arrays of strings for the value parts.
+ */
+jQuery.getQueryParameters = function(s) {
+  if (typeof s == 'undefined')
+    s = document.location.search;
+  var parts = s.substr(s.indexOf('?') + 1).split('&');
+  var result = {};
+  for (var i = 0; i < parts.length; i++) {
+    var tmp = parts[i].split('=', 2);
+    var key = jQuery.urldecode(tmp[0]);
+    var value = jQuery.urldecode(tmp[1]);
+    if (key in result)
+      result[key].push(value);
+    else
+      result[key] = [value];
+  }
+  return result;
+};
+
+/**
+ * highlight a given string on a jquery object by wrapping it in
+ * span elements with the given class name.
+ */
+jQuery.fn.highlightText = function(text, className) {
+  function highlight(node) {
+    if (node.nodeType == 3) {
+      var val = node.nodeValue;
+      var pos = val.toLowerCase().indexOf(text);
+      if (pos >= 0 && !jQuery(node.parentNode).hasClass(className)) {
+        var span = document.createElement("span");
+        span.className = className;
+        span.appendChild(document.createTextNode(val.substr(pos, text.length)));
+        node.parentNode.insertBefore(span, node.parentNode.insertBefore(
+          document.createTextNode(val.substr(pos + text.length)),
+          node.nextSibling));
+        node.nodeValue = val.substr(0, pos);
+      }
+    }
+    else if (!jQuery(node).is("button, select, textarea")) {
+      jQuery.each(node.childNodes, function() {
+        highlight(this);
+      });
+    }
+  }
+  return this.each(function() {
+    highlight(this);
+  });
+};
+
+/**
+ * Small JavaScript module for the documentation.
+ */
+var Documentation = {
+
+  init : function() {
+    this.fixFirefoxAnchorBug();
+    this.highlightSearchWords();
+    this.initIndexTable();
+  },
+
+  /**
+   * i18n support
+   */
+  TRANSLATIONS : {},
+  PLURAL_EXPR : function(n) { return n == 1 ? 0 : 1; },
+  LOCALE : 'unknown',
+
+  // gettext and ngettext don't access this so that the functions
+  // can safely bound to a different name (_ = Documentation.gettext)
+  gettext : function(string) {
+    var translated = Documentation.TRANSLATIONS[string];
+    if (typeof translated == 'undefined')
+      return string;
+    return (typeof translated == 'string') ? translated : translated[0];
+  },
+
+  ngettext : function(singular, plural, n) {
+    var translated = Documentation.TRANSLATIONS[singular];
+    if (typeof translated == 'undefined')
+      return (n == 1) ? singular : plural;
+    return translated[Documentation.PLURALEXPR(n)];
+  },
+
+  addTranslations : function(catalog) {
+    for (var key in catalog.messages)
+      this.TRANSLATIONS[key] = catalog.messages[key];
+    this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')');
+    this.LOCALE = catalog.locale;
+  },
+
+  /**
+   * add context elements like header anchor links
+   */
+  addContextElements : function() {
+    $('div[id] > :header:first').each(function() {
+      $('<a class="headerlink">\u00B6</a>').
+      attr('href', '#' + this.id).
+      attr('title', _('Permalink to this headline')).
+      appendTo(this);
+    });
+    $('dt[id]').each(function() {
+      $('<a class="headerlink">\u00B6</a>').
+      attr('href', '#' + this.id).
+      attr('title', _('Permalink to this definition')).
+      appendTo(this);
+    });
+  },
+
+  /**
+   * workaround a firefox stupidity
+   */
+  fixFirefoxAnchorBug : function() {
+    if (document.location.hash && $.browser.mozilla)
+      window.setTimeout(function() {
+        document.location.href += '';
+      }, 10);
+  },
+
+  /**
+   * highlight the search words provided in the url in the text
+   */
+  highlightSearchWords : function() {
+    var params = $.getQueryParameters();
+    var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : [];
+    if (terms.length) {
+      var body = $('div.body');
+      window.setTimeout(function() {
+        $.each(terms, function() {
+          body.highlightText(this.toLowerCase(), 'highlighted');
+        });
+      }, 10);
+      $('<p class="highlight-link"><a href="javascript:Documentation.' +
+        'hideSearchWords()">' + _('Hide Search Matches') + '</a></p>')
+          .appendTo($('#searchbox'));
+    }
+  },
+
+  /**
+   * init the domain index toggle buttons
+   */
+  initIndexTable : function() {
+    var togglers = $('img.toggler').click(function() {
+      var src = $(this).attr('src');
+      var idnum = $(this).attr('id').substr(7);
+      $('tr.cg-' + idnum).toggle();
+      if (src.substr(-9) == 'minus.png')
+        $(this).attr('src', src.substr(0, src.length-9) + 'plus.png');
+      else
+        $(this).attr('src', src.substr(0, src.length-8) + 'minus.png');
+    }).css('display', '');
+    if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) {
+        togglers.click();
+    }
+  },
+
+  /**
+   * helper function to hide the search marks again
+   */
+  hideSearchWords : function() {
+    $('#searchbox .highlight-link').fadeOut(300);
+    $('span.highlighted').removeClass('highlighted');
+  },
+
+  /**
+   * make the url absolute
+   */
+  makeURL : function(relativeURL) {
+    return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL;
+  },
+
+  /**
+   * get the current relative url
+   */
+  getCurrentURL : function() {
+    var path = document.location.pathname;
+    var parts = path.split(/\//);
+    $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() {
+      if (this == '..')
+        parts.pop();
+    });
+    var url = parts.join('/');
+    return path.substring(url.lastIndexOf('/') + 1, path.length - 1);
+  }
+};
+
+// quick alias for translations
+_ = Documentation.gettext;
+
+$(document).ready(function() {
+  Documentation.init();
+});
Binary file DVN-web/installer/dvninstall/doc/guides/_static/down-pressed.png has changed
Binary file DVN-web/installer/dvninstall/doc/guides/_static/down.png has changed
Binary file DVN-web/installer/dvninstall/doc/guides/_static/file.png has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/_static/jquery.js	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,4 @@
+/*! jQuery v1.7.1 jquery.com | jquery.org/license */
+(function(a,b){function cy(a){return f.isWindow(a)?a:a.nodeType===9?a.defaultView||a.parentWindow:!1}function cv(a){if(!ck[a]){var b=c.body,d=f("<"+a+">").appendTo(b),e=d.css("display");d.remove();if(e==="none"||e===""){cl||(cl=c.createElement("iframe"),cl.frameBorder=cl.width=cl.height=0),b.appendChild(cl);if(!cm||!cl.createElement)cm=(cl.contentWindow||cl.contentDocument).document,cm.write((c.compatMode==="CSS1Compat"?"<!doctype html>":"")+"<html><body>"),cm.close();d=cm.createElement(a),cm.body.appendChild(d),e=f.css(d,"display"),b.removeChild(cl)}ck[a]=e}return ck[a]}function cu(a,b){var c={};f.each(cq.concat.apply([],cq.slice(0,b)),function(){c[this]=a});return c}function ct(){cr=b}function cs(){setTimeout(ct,0);return cr=f.now()}function cj(){try{return new a.ActiveXObject("Microsoft.XMLHTTP")}catch(b){}}function ci(){try{return new a.XMLHttpRequest}catch(b){}}function cc(a,c){a.dataFilter&&(c=a.dataFilter(c,a.dataType));var d=a.dataTypes,e={},g,h,i=d.length,j,k=d[0],l,m,n,o,p;for(g=1;g<i;g++){if(g===1)for(h in a.converters)typeof h=="string"&&(e[h.toLowerCase()]=a.converters[h]);l=k,k=d[g];if(k==="*")k=l;else if(l!=="*"&&l!==k){m=l+" "+k,n=e[m]||e["* "+k];if(!n){p=b;for(o in e){j=o.split(" ");if(j[0]===l||j[0]==="*"){p=e[j[1]+" "+k];if(p){o=e[o],o===!0?n=p:p===!0&&(n=o);break}}}}!n&&!p&&f.error("No conversion from "+m.replace(" "," to ")),n!==!0&&(c=n?n(c):p(o(c)))}}return c}function cb(a,c,d){var e=a.contents,f=a.dataTypes,g=a.responseFields,h,i,j,k;for(i in g)i in d&&(c[g[i]]=d[i]);while(f[0]==="*")f.shift(),h===b&&(h=a.mimeType||c.getResponseHeader("content-type"));if(h)for(i in e)if(e[i]&&e[i].test(h)){f.unshift(i);break}if(f[0]in d)j=f[0];else{for(i in d){if(!f[0]||a.converters[i+" "+f[0]]){j=i;break}k||(k=i)}j=j||k}if(j){j!==f[0]&&f.unshift(j);return d[j]}}function ca(a,b,c,d){if(f.isArray(b))f.each(b,function(b,e){c||bE.test(a)?d(a,e):ca(a+"["+(typeof e=="object"||f.isArray(e)?b:"")+"]",e,c,d)});else if(!c&&b!=null&&typeof b=="object")for(var e in b)ca(a+"["+e+"]",b[e],c,d);else d(a,b)}function b_(a,c){var d,e,g=f.ajaxSettings.flatOptions||{};for(d in c)c[d]!==b&&((g[d]?a:e||(e={}))[d]=c[d]);e&&f.extend(!0,a,e)}function b$(a,c,d,e,f,g){f=f||c.dataTypes[0],g=g||{},g[f]=!0;var h=a[f],i=0,j=h?h.length:0,k=a===bT,l;for(;i<j&&(k||!l);i++)l=h[i](c,d,e),typeof l=="string"&&(!k||g[l]?l=b:(c.dataTypes.unshift(l),l=b$(a,c,d,e,l,g)));(k||!l)&&!g["*"]&&(l=b$(a,c,d,e,"*",g));return l}function bZ(a){return function(b,c){typeof b!="string"&&(c=b,b="*");if(f.isFunction(c)){var d=b.toLowerCase().split(bP),e=0,g=d.length,h,i,j;for(;e<g;e++)h=d[e],j=/^\+/.test(h),j&&(h=h.substr(1)||"*"),i=a[h]=a[h]||[],i[j?"unshift":"push"](c)}}}function bC(a,b,c){var d=b==="width"?a.offsetWidth:a.offsetHeight,e=b==="width"?bx:by,g=0,h=e.length;if(d>0){if(c!=="border")for(;g<h;g++)c||(d-=parseFloat(f.css(a,"padding"+e[g]))||0),c==="margin"?d+=parseFloat(f.css(a,c+e[g]))||0:d-=parseFloat(f.css(a,"border"+e[g]+"Width"))||0;return d+"px"}d=bz(a,b,b);if(d<0||d==null)d=a.style[b]||0;d=parseFloat(d)||0;if(c)for(;g<h;g++)d+=parseFloat(f.css(a,"padding"+e[g]))||0,c!=="padding"&&(d+=parseFloat(f.css(a,"border"+e[g]+"Width"))||0),c==="margin"&&(d+=parseFloat(f.css(a,c+e[g]))||0);return d+"px"}function bp(a,b){b.src?f.ajax({url:b.src,async:!1,dataType:"script"}):f.globalEval((b.text||b.textContent||b.innerHTML||"").replace(bf,"/*$0*/")),b.parentNode&&b.parentNode.removeChild(b)}function bo(a){var b=c.createElement("div");bh.appendChild(b),b.innerHTML=a.outerHTML;return b.firstChild}function bn(a){var b=(a.nodeName||"").toLowerCase();b==="input"?bm(a):b!=="script"&&typeof a.getElementsByTagName!="undefined"&&f.grep(a.getElementsByTagName("input"),bm)}function bm(a){if(a.type==="checkbox"||a.type==="radio")a.defaultChecked=a.checked}function bl(a){return typeof a.getElementsByTagName!="undefined"?a.getElementsByTagName("*"):typeof a.querySelectorAll!="undefined"?a.querySelectorAll("*"):[]}function bk(a,b){var c;if(b.nodeType===1){b.clearAttributes&&b.clearAttributes(),b.mergeAttributes&&b.mergeAttributes(a),c=b.nodeName.toLowerCase();if(c==="object")b.outerHTML=a.outerHTML;else if(c!=="input"||a.type!=="checkbox"&&a.type!=="radio"){if(c==="option")b.selected=a.defaultSelected;else if(c==="input"||c==="textarea")b.defaultValue=a.defaultValue}else a.checked&&(b.defaultChecked=b.checked=a.checked),b.value!==a.value&&(b.value=a.value);b.removeAttribute(f.expando)}}function bj(a,b){if(b.nodeType===1&&!!f.hasData(a)){var c,d,e,g=f._data(a),h=f._data(b,g),i=g.events;if(i){delete h.handle,h.events={};for(c in i)for(d=0,e=i[c].length;d<e;d++)f.event.add(b,c+(i[c][d].namespace?".":"")+i[c][d].namespace,i[c][d],i[c][d].data)}h.data&&(h.data=f.extend({},h.data))}}function bi(a,b){return f.nodeName(a,"table")?a.getElementsByTagName("tbody")[0]||a.appendChild(a.ownerDocument.createElement("tbody")):a}function U(a){var b=V.split("|"),c=a.createDocumentFragment();if(c.createElement)while(b.length)c.createElement(b.pop());return c}function T(a,b,c){b=b||0;if(f.isFunction(b))return f.grep(a,function(a,d){var e=!!b.call(a,d,a);return e===c});if(b.nodeType)return f.grep(a,function(a,d){return a===b===c});if(typeof b=="string"){var d=f.grep(a,function(a){return a.nodeType===1});if(O.test(b))return f.filter(b,d,!c);b=f.filter(b,d)}return f.grep(a,function(a,d){return f.inArray(a,b)>=0===c})}function S(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function K(){return!0}function J(){return!1}function n(a,b,c){var d=b+"defer",e=b+"queue",g=b+"mark",h=f._data(a,d);h&&(c==="queue"||!f._data(a,e))&&(c==="mark"||!f._data(a,g))&&setTimeout(function(){!f._data(a,e)&&!f._data(a,g)&&(f.removeData(a,d,!0),h.fire())},0)}function m(a){for(var b in a){if(b==="data"&&f.isEmptyObject(a[b]))continue;if(b!=="toJSON")return!1}return!0}function l(a,c,d){if(d===b&&a.nodeType===1){var e="data-"+c.replace(k,"-$1").toLowerCase();d=a.getAttribute(e);if(typeof d=="string"){try{d=d==="true"?!0:d==="false"?!1:d==="null"?null:f.isNumeric(d)?parseFloat(d):j.test(d)?f.parseJSON(d):d}catch(g){}f.data(a,c,d)}else d=b}return d}function h(a){var b=g[a]={},c,d;a=a.split(/\s+/);for(c=0,d=a.length;c<d;c++)b[a[c]]=!0;return b}var c=a.document,d=a.navigator,e=a.location,f=function(){function J(){if(!e.isReady){try{c.documentElement.doScroll("left")}catch(a){setTimeout(J,1);return}e.ready()}}var e=function(a,b){return new e.fn.init(a,b,h)},f=a.jQuery,g=a.$,h,i=/^(?:[^#<]*(<[\w\W]+>)[^>]*$|#([\w\-]*)$)/,j=/\S/,k=/^\s+/,l=/\s+$/,m=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,n=/^[\],:{}\s]*$/,o=/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,p=/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,q=/(?:^|:|,)(?:\s*\[)+/g,r=/(webkit)[ \/]([\w.]+)/,s=/(opera)(?:.*version)?[ \/]([\w.]+)/,t=/(msie) ([\w.]+)/,u=/(mozilla)(?:.*? rv:([\w.]+))?/,v=/-([a-z]|[0-9])/ig,w=/^-ms-/,x=function(a,b){return(b+"").toUpperCase()},y=d.userAgent,z,A,B,C=Object.prototype.toString,D=Object.prototype.hasOwnProperty,E=Array.prototype.push,F=Array.prototype.slice,G=String.prototype.trim,H=Array.prototype.indexOf,I={};e.fn=e.prototype={constructor:e,init:function(a,d,f){var g,h,j,k;if(!a)return this;if(a.nodeType){this.context=this[0]=a,this.length=1;return this}if(a==="body"&&!d&&c.body){this.context=c,this[0]=c.body,this.selector=a,this.length=1;return this}if(typeof a=="string"){a.charAt(0)!=="<"||a.charAt(a.length-1)!==">"||a.length<3?g=i.exec(a):g=[null,a,null];if(g&&(g[1]||!d)){if(g[1]){d=d instanceof e?d[0]:d,k=d?d.ownerDocument||d:c,j=m.exec(a),j?e.isPlainObject(d)?(a=[c.createElement(j[1])],e.fn.attr.call(a,d,!0)):a=[k.createElement(j[1])]:(j=e.buildFragment([g[1]],[k]),a=(j.cacheable?e.clone(j.fragment):j.fragment).childNodes);return e.merge(this,a)}h=c.getElementById(g[2]);if(h&&h.parentNode){if(h.id!==g[2])return f.find(a);this.length=1,this[0]=h}this.context=c,this.selector=a;return this}return!d||d.jquery?(d||f).find(a):this.constructor(d).find(a)}if(e.isFunction(a))return f.ready(a);a.selector!==b&&(this.selector=a.selector,this.context=a.context);return e.makeArray(a,this)},selector:"",jquery:"1.7.1",length:0,size:function(){return this.length},toArray:function(){return F.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this[this.length+a]:this[a]},pushStack:function(a,b,c){var d=this.constructor();e.isArray(a)?E.apply(d,a):e.merge(d,a),d.prevObject=this,d.context=this.context,b==="find"?d.selector=this.selector+(this.selector?" ":"")+c:b&&(d.selector=this.selector+"."+b+"("+c+")");return d},each:function(a,b){return e.each(this,a,b)},ready:function(a){e.bindReady(),A.add(a);return this},eq:function(a){a=+a;return a===-1?this.slice(a):this.slice(a,a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(F.apply(this,arguments),"slice",F.call(arguments).join(","))},map:function(a){return this.pushStack(e.map(this,function(b,c){return a.call(b,c,b)}))},end:function(){return this.prevObject||this.constructor(null)},push:E,sort:[].sort,splice:[].splice},e.fn.init.prototype=e.fn,e.extend=e.fn.extend=function(){var a,c,d,f,g,h,i=arguments[0]||{},j=1,k=arguments.length,l=!1;typeof i=="boolean"&&(l=i,i=arguments[1]||{},j=2),typeof i!="object"&&!e.isFunction(i)&&(i={}),k===j&&(i=this,--j);for(;j<k;j++)if((a=arguments[j])!=null)for(c in a){d=i[c],f=a[c];if(i===f)continue;l&&f&&(e.isPlainObject(f)||(g=e.isArray(f)))?(g?(g=!1,h=d&&e.isArray(d)?d:[]):h=d&&e.isPlainObject(d)?d:{},i[c]=e.extend(l,h,f)):f!==b&&(i[c]=f)}return i},e.extend({noConflict:function(b){a.$===e&&(a.$=g),b&&a.jQuery===e&&(a.jQuery=f);return e},isReady:!1,readyWait:1,holdReady:function(a){a?e.readyWait++:e.ready(!0)},ready:function(a){if(a===!0&&!--e.readyWait||a!==!0&&!e.isReady){if(!c.body)return setTimeout(e.ready,1);e.isReady=!0;if(a!==!0&&--e.readyWait>0)return;A.fireWith(c,[e]),e.fn.trigger&&e(c).trigger("ready").off("ready")}},bindReady:function(){if(!A){A=e.Callbacks("once memory");if(c.readyState==="complete")return setTimeout(e.ready,1);if(c.addEventListener)c.addEventListener("DOMContentLoaded",B,!1),a.addEventListener("load",e.ready,!1);else if(c.attachEvent){c.attachEvent("onreadystatechange",B),a.attachEvent("onload",e.ready);var b=!1;try{b=a.frameElement==null}catch(d){}c.documentElement.doScroll&&b&&J()}}},isFunction:function(a){return e.type(a)==="function"},isArray:Array.isArray||function(a){return e.type(a)==="array"},isWindow:function(a){return a&&typeof a=="object"&&"setInterval"in a},isNumeric:function(a){return!isNaN(parseFloat(a))&&isFinite(a)},type:function(a){return a==null?String(a):I[C.call(a)]||"object"},isPlainObject:function(a){if(!a||e.type(a)!=="object"||a.nodeType||e.isWindow(a))return!1;try{if(a.constructor&&!D.call(a,"constructor")&&!D.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}var d;for(d in a);return d===b||D.call(a,d)},isEmptyObject:function(a){for(var b in a)return!1;return!0},error:function(a){throw new Error(a)},parseJSON:function(b){if(typeof b!="string"||!b)return null;b=e.trim(b);if(a.JSON&&a.JSON.parse)return a.JSON.parse(b);if(n.test(b.replace(o,"@").replace(p,"]").replace(q,"")))return(new Function("return "+b))();e.error("Invalid JSON: "+b)},parseXML:function(c){var d,f;try{a.DOMParser?(f=new DOMParser,d=f.parseFromString(c,"text/xml")):(d=new ActiveXObject("Microsoft.XMLDOM"),d.async="false",d.loadXML(c))}catch(g){d=b}(!d||!d.documentElement||d.getElementsByTagName("parsererror").length)&&e.error("Invalid XML: "+c);return d},noop:function(){},globalEval:function(b){b&&j.test(b)&&(a.execScript||function(b){a.eval.call(a,b)})(b)},camelCase:function(a){return a.replace(w,"ms-").replace(v,x)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toUpperCase()===b.toUpperCase()},each:function(a,c,d){var f,g=0,h=a.length,i=h===b||e.isFunction(a);if(d){if(i){for(f in a)if(c.apply(a[f],d)===!1)break}else for(;g<h;)if(c.apply(a[g++],d)===!1)break}else if(i){for(f in a)if(c.call(a[f],f,a[f])===!1)break}else for(;g<h;)if(c.call(a[g],g,a[g++])===!1)break;return a},trim:G?function(a){return a==null?"":G.call(a)}:function(a){return a==null?"":(a+"").replace(k,"").replace(l,"")},makeArray:function(a,b){var c=b||[];if(a!=null){var d=e.type(a);a.length==null||d==="string"||d==="function"||d==="regexp"||e.isWindow(a)?E.call(c,a):e.merge(c,a)}return c},inArray:function(a,b,c){var d;if(b){if(H)return H.call(b,a,c);d=b.length,c=c?c<0?Math.max(0,d+c):c:0;for(;c<d;c++)if(c in b&&b[c]===a)return c}return-1},merge:function(a,c){var d=a.length,e=0;if(typeof c.length=="number")for(var f=c.length;e<f;e++)a[d++]=c[e];else while(c[e]!==b)a[d++]=c[e++];a.length=d;return a},grep:function(a,b,c){var d=[],e;c=!!c;for(var f=0,g=a.length;f<g;f++)e=!!b(a[f],f),c!==e&&d.push(a[f]);return d},map:function(a,c,d){var f,g,h=[],i=0,j=a.length,k=a instanceof e||j!==b&&typeof j=="number"&&(j>0&&a[0]&&a[j-1]||j===0||e.isArray(a));if(k)for(;i<j;i++)f=c(a[i],i,d),f!=null&&(h[h.length]=f);else for(g in a)f=c(a[g],g,d),f!=null&&(h[h.length]=f);return h.concat.apply([],h)},guid:1,proxy:function(a,c){if(typeof c=="string"){var d=a[c];c=a,a=d}if(!e.isFunction(a))return b;var f=F.call(arguments,2),g=function(){return a.apply(c,f.concat(F.call(arguments)))};g.guid=a.guid=a.guid||g.guid||e.guid++;return g},access:function(a,c,d,f,g,h){var i=a.length;if(typeof c=="object"){for(var j in c)e.access(a,j,c[j],f,g,d);return a}if(d!==b){f=!h&&f&&e.isFunction(d);for(var k=0;k<i;k++)g(a[k],c,f?d.call(a[k],k,g(a[k],c)):d,h);return a}return i?g(a[0],c):b},now:function(){return(new Date).getTime()},uaMatch:function(a){a=a.toLowerCase();var b=r.exec(a)||s.exec(a)||t.exec(a)||a.indexOf("compatible")<0&&u.exec(a)||[];return{browser:b[1]||"",version:b[2]||"0"}},sub:function(){function a(b,c){return new a.fn.init(b,c)}e.extend(!0,a,this),a.superclass=this,a.fn=a.prototype=this(),a.fn.constructor=a,a.sub=this.sub,a.fn.init=function(d,f){f&&f instanceof e&&!(f instanceof a)&&(f=a(f));return e.fn.init.call(this,d,f,b)},a.fn.init.prototype=a.fn;var b=a(c);return a},browser:{}}),e.each("Boolean Number String Function Array Date RegExp Object".split(" "),function(a,b){I["[object "+b+"]"]=b.toLowerCase()}),z=e.uaMatch(y),z.browser&&(e.browser[z.browser]=!0,e.browser.version=z.version),e.browser.webkit&&(e.browser.safari=!0),j.test(" ")&&(k=/^[\s\xA0]+/,l=/[\s\xA0]+$/),h=e(c),c.addEventListener?B=function(){c.removeEventListener("DOMContentLoaded",B,!1),e.ready()}:c.attachEvent&&(B=function(){c.readyState==="complete"&&(c.detachEvent("onreadystatechange",B),e.ready())});return e}(),g={};f.Callbacks=function(a){a=a?g[a]||h(a):{};var c=[],d=[],e,i,j,k,l,m=function(b){var d,e,g,h,i;for(d=0,e=b.length;d<e;d++)g=b[d],h=f.type(g),h==="array"?m(g):h==="function"&&(!a.unique||!o.has(g))&&c.push(g)},n=function(b,f){f=f||[],e=!a.memory||[b,f],i=!0,l=j||0,j=0,k=c.length;for(;c&&l<k;l++)if(c[l].apply(b,f)===!1&&a.stopOnFalse){e=!0;break}i=!1,c&&(a.once?e===!0?o.disable():c=[]:d&&d.length&&(e=d.shift(),o.fireWith(e[0],e[1])))},o={add:function(){if(c){var a=c.length;m(arguments),i?k=c.length:e&&e!==!0&&(j=a,n(e[0],e[1]))}return this},remove:function(){if(c){var b=arguments,d=0,e=b.length;for(;d<e;d++)for(var f=0;f<c.length;f++)if(b[d]===c[f]){i&&f<=k&&(k--,f<=l&&l--),c.splice(f--,1);if(a.unique)break}}return this},has:function(a){if(c){var b=0,d=c.length;for(;b<d;b++)if(a===c[b])return!0}return!1},empty:function(){c=[];return this},disable:function(){c=d=e=b;return this},disabled:function(){return!c},lock:function(){d=b,(!e||e===!0)&&o.disable();return this},locked:function(){return!d},fireWith:function(b,c){d&&(i?a.once||d.push([b,c]):(!a.once||!e)&&n(b,c));return this},fire:function(){o.fireWith(this,arguments);return this},fired:function(){return!!e}};return o};var i=[].slice;f.extend({Deferred:function(a){var b=f.Callbacks("once memory"),c=f.Callbacks("once memory"),d=f.Callbacks("memory"),e="pending",g={resolve:b,reject:c,notify:d},h={done:b.add,fail:c.add,progress:d.add,state:function(){return e},isResolved:b.fired,isRejected:c.fired,then:function(a,b,c){i.done(a).fail(b).progress(c);return this},always:function(){i.done.apply(i,arguments).fail.apply(i,arguments);return this},pipe:function(a,b,c){return f.Deferred(function(d){f.each({done:[a,"resolve"],fail:[b,"reject"],progress:[c,"notify"]},function(a,b){var c=b[0],e=b[1],g;f.isFunction(c)?i[a](function(){g=c.apply(this,arguments),g&&f.isFunction(g.promise)?g.promise().then(d.resolve,d.reject,d.notify):d[e+"With"](this===i?d:this,[g])}):i[a](d[e])})}).promise()},promise:function(a){if(a==null)a=h;else for(var b in h)a[b]=h[b];return a}},i=h.promise({}),j;for(j in g)i[j]=g[j].fire,i[j+"With"]=g[j].fireWith;i.done(function(){e="resolved"},c.disable,d.lock).fail(function(){e="rejected"},b.disable,d.lock),a&&a.call(i,i);return i},when:function(a){function m(a){return function(b){e[a]=arguments.length>1?i.call(arguments,0):b,j.notifyWith(k,e)}}function l(a){return function(c){b[a]=arguments.length>1?i.call(arguments,0):c,--g||j.resolveWith(j,b)}}var b=i.call(arguments,0),c=0,d=b.length,e=Array(d),g=d,h=d,j=d<=1&&a&&f.isFunction(a.promise)?a:f.Deferred(),k=j.promise();if(d>1){for(;c<d;c++)b[c]&&b[c].promise&&f.isFunction(b[c].promise)?b[c].promise().then(l(c),j.reject,m(c)):--g;g||j.resolveWith(j,b)}else j!==a&&j.resolveWith(j,d?[a]:[]);return k}}),f.support=function(){var b,d,e,g,h,i,j,k,l,m,n,o,p,q=c.createElement("div"),r=c.documentElement;q.setAttribute("className","t"),q.innerHTML="   <link/><table></table><a href='/a' style='top:1px;float:left;opacity:.55;'>a</a><input type='checkbox'/>",d=q.getElementsByTagName("*"),e=q.getElementsByTagName("a")[0];if(!d||!d.length||!e)return{};g=c.createElement("select"),h=g.appendChild(c.createElement("option")),i=q.getElementsByTagName("input")[0],b={leadingWhitespace:q.firstChild.nodeType===3,tbody:!q.getElementsByTagName("tbody").length,htmlSerialize:!!q.getElementsByTagName("link").length,style:/top/.test(e.getAttribute("style")),hrefNormalized:e.getAttribute("href")==="/a",opacity:/^0.55/.test(e.style.opacity),cssFloat:!!e.style.cssFloat,checkOn:i.value==="on",optSelected:h.selected,getSetAttribute:q.className!=="t",enctype:!!c.createElement("form").enctype,html5Clone:c.createElement("nav").cloneNode(!0).outerHTML!=="<:nav></:nav>",submitBubbles:!0,changeBubbles:!0,focusinBubbles:!1,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0},i.checked=!0,b.noCloneChecked=i.cloneNode(!0).checked,g.disabled=!0,b.optDisabled=!h.disabled;try{delete q.test}catch(s){b.deleteExpando=!1}!q.addEventListener&&q.attachEvent&&q.fireEvent&&(q.attachEvent("onclick",function(){b.noCloneEvent=!1}),q.cloneNode(!0).fireEvent("onclick")),i=c.createElement("input"),i.value="t",i.setAttribute("type","radio"),b.radioValue=i.value==="t",i.setAttribute("checked","checked"),q.appendChild(i),k=c.createDocumentFragment(),k.appendChild(q.lastChild),b.checkClone=k.cloneNode(!0).cloneNode(!0).lastChild.checked,b.appendChecked=i.checked,k.removeChild(i),k.appendChild(q),q.innerHTML="",a.getComputedStyle&&(j=c.createElement("div"),j.style.width="0",j.style.marginRight="0",q.style.width="2px",q.appendChild(j),b.reliableMarginRight=(parseInt((a.getComputedStyle(j,null)||{marginRight:0}).marginRight,10)||0)===0);if(q.attachEvent)for(o in{submit:1,change:1,focusin:1})n="on"+o,p=n in q,p||(q.setAttribute(n,"return;"),p=typeof q[n]=="function"),b[o+"Bubbles"]=p;k.removeChild(q),k=g=h=j=q=i=null,f(function(){var a,d,e,g,h,i,j,k,m,n,o,r=c.getElementsByTagName("body")[0];!r||(j=1,k="position:absolute;top:0;left:0;width:1px;height:1px;margin:0;",m="visibility:hidden;border:0;",n="style='"+k+"border:5px solid #000;padding:0;'",o="<div "+n+"><div></div></div>"+"<table "+n+" cellpadding='0' cellspacing='0'>"+"<tr><td></td></tr></table>",a=c.createElement("div"),a.style.cssText=m+"width:0;height:0;position:static;top:0;margin-top:"+j+"px",r.insertBefore(a,r.firstChild),q=c.createElement("div"),a.appendChild(q),q.innerHTML="<table><tr><td style='padding:0;border:0;display:none'></td><td>t</td></tr></table>",l=q.getElementsByTagName("td"),p=l[0].offsetHeight===0,l[0].style.display="",l[1].style.display="none",b.reliableHiddenOffsets=p&&l[0].offsetHeight===0,q.innerHTML="",q.style.width=q.style.paddingLeft="1px",f.boxModel=b.boxModel=q.offsetWidth===2,typeof q.style.zoom!="undefined"&&(q.style.display="inline",q.style.zoom=1,b.inlineBlockNeedsLayout=q.offsetWidth===2,q.style.display="",q.innerHTML="<div style='width:4px;'></div>",b.shrinkWrapBlocks=q.offsetWidth!==2),q.style.cssText=k+m,q.innerHTML=o,d=q.firstChild,e=d.firstChild,h=d.nextSibling.firstChild.firstChild,i={doesNotAddBorder:e.offsetTop!==5,doesAddBorderForTableAndCells:h.offsetTop===5},e.style.position="fixed",e.style.top="20px",i.fixedPosition=e.offsetTop===20||e.offsetTop===15,e.style.position=e.style.top="",d.style.overflow="hidden",d.style.position="relative",i.subtractsBorderForOverflowNotVisible=e.offsetTop===-5,i.doesNotIncludeMarginInBodyOffset=r.offsetTop!==j,r.removeChild(a),q=a=null,f.extend(b,i))});return b}();var j=/^(?:\{.*\}|\[.*\])$/,k=/([A-Z])/g;f.extend({cache:{},uuid:0,expando:"jQuery"+(f.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(a){a=a.nodeType?f.cache[a[f.expando]]:a[f.expando];return!!a&&!m(a)},data:function(a,c,d,e){if(!!f.acceptData(a)){var g,h,i,j=f.expando,k=typeof c=="string",l=a.nodeType,m=l?f.cache:a,n=l?a[j]:a[j]&&j,o=c==="events";if((!n||!m[n]||!o&&!e&&!m[n].data)&&k&&d===b)return;n||(l?a[j]=n=++f.uuid:n=j),m[n]||(m[n]={},l||(m[n].toJSON=f.noop));if(typeof c=="object"||typeof c=="function")e?m[n]=f.extend(m[n],c):m[n].data=f.extend(m[n].data,c);g=h=m[n],e||(h.data||(h.data={}),h=h.data),d!==b&&(h[f.camelCase(c)]=d);if(o&&!h[c])return g.events;k?(i=h[c],i==null&&(i=h[f.camelCase(c)])):i=h;return i}},removeData:function(a,b,c){if(!!f.acceptData(a)){var d,e,g,h=f.expando,i=a.nodeType,j=i?f.cache:a,k=i?a[h]:h;if(!j[k])return;if(b){d=c?j[k]:j[k].data;if(d){f.isArray(b)||(b in d?b=[b]:(b=f.camelCase(b),b in d?b=[b]:b=b.split(" ")));for(e=0,g=b.length;e<g;e++)delete d[b[e]];if(!(c?m:f.isEmptyObject)(d))return}}if(!c){delete j[k].data;if(!m(j[k]))return}f.support.deleteExpando||!j.setInterval?delete j[k]:j[k]=null,i&&(f.support.deleteExpando?delete a[h]:a.removeAttribute?a.removeAttribute(h):a[h]=null)}},_data:function(a,b,c){return f.data(a,b,c,!0)},acceptData:function(a){if(a.nodeName){var b=f.noData[a.nodeName.toLowerCase()];if(b)return b!==!0&&a.getAttribute("classid")===b}return!0}}),f.fn.extend({data:function(a,c){var d,e,g,h=null;if(typeof a=="undefined"){if(this.length){h=f.data(this[0]);if(this[0].nodeType===1&&!f._data(this[0],"parsedAttrs")){e=this[0].attributes;for(var i=0,j=e.length;i<j;i++)g=e[i].name,g.indexOf("data-")===0&&(g=f.camelCase(g.substring(5)),l(this[0],g,h[g]));f._data(this[0],"parsedAttrs",!0)}}return h}if(typeof a=="object")return this.each(function(){f.data(this,a)});d=a.split("."),d[1]=d[1]?"."+d[1]:"";if(c===b){h=this.triggerHandler("getData"+d[1]+"!",[d[0]]),h===b&&this.length&&(h=f.data(this[0],a),h=l(this[0],a,h));return h===b&&d[1]?this.data(d[0]):h}return this.each(function(){var b=f(this),e=[d[0],c];b.triggerHandler("setData"+d[1]+"!",e),f.data(this,a,c),b.triggerHandler("changeData"+d[1]+"!",e)})},removeData:function(a){return this.each(function(){f.removeData(this,a)})}}),f.extend({_mark:function(a,b){a&&(b=(b||"fx")+"mark",f._data(a,b,(f._data(a,b)||0)+1))},_unmark:function(a,b,c){a!==!0&&(c=b,b=a,a=!1);if(b){c=c||"fx";var d=c+"mark",e=a?0:(f._data(b,d)||1)-1;e?f._data(b,d,e):(f.removeData(b,d,!0),n(b,c,"mark"))}},queue:function(a,b,c){var d;if(a){b=(b||"fx")+"queue",d=f._data(a,b),c&&(!d||f.isArray(c)?d=f._data(a,b,f.makeArray(c)):d.push(c));return d||[]}},dequeue:function(a,b){b=b||"fx";var c=f.queue(a,b),d=c.shift(),e={};d==="inprogress"&&(d=c.shift()),d&&(b==="fx"&&c.unshift("inprogress"),f._data(a,b+".run",e),d.call(a,function(){f.dequeue(a,b)},e)),c.length||(f.removeData(a,b+"queue "+b+".run",!0),n(a,b,"queue"))}}),f.fn.extend({queue:function(a,c){typeof a!="string"&&(c=a,a="fx");if(c===b)return f.queue(this[0],a);return this.each(function(){var b=f.queue(this,a,c);a==="fx"&&b[0]!=="inprogress"&&f.dequeue(this,a)})},dequeue:function(a){return this.each(function(){f.dequeue(this,a)})},delay:function(a,b){a=f.fx?f.fx.speeds[a]||a:a,b=b||"fx";return this.queue(b,function(b,c){var d=setTimeout(b,a);c.stop=function(){clearTimeout(d)}})},clearQueue:function(a){return this.queue(a||"fx",[])},promise:function(a,c){function m(){--h||d.resolveWith(e,[e])}typeof a!="string"&&(c=a,a=b),a=a||"fx";var d=f.Deferred(),e=this,g=e.length,h=1,i=a+"defer",j=a+"queue",k=a+"mark",l;while(g--)if(l=f.data(e[g],i,b,!0)||(f.data(e[g],j,b,!0)||f.data(e[g],k,b,!0))&&f.data(e[g],i,f.Callbacks("once memory"),!0))h++,l.add(m);m();return d.promise()}});var o=/[\n\t\r]/g,p=/\s+/,q=/\r/g,r=/^(?:button|input)$/i,s=/^(?:button|input|object|select|textarea)$/i,t=/^a(?:rea)?$/i,u=/^(?:autofocus|autoplay|async|checked|controls|defer|disabled|hidden|loop|multiple|open|readonly|required|scoped|selected)$/i,v=f.support.getSetAttribute,w,x,y;f.fn.extend({attr:function(a,b){return f.access(this,a,b,!0,f.attr)},removeAttr:function(a){return this.each(function(){f.removeAttr(this,a)})},prop:function(a,b){return f.access(this,a,b,!0,f.prop)},removeProp:function(a){a=f.propFix[a]||a;return this.each(function(){try{this[a]=b,delete this[a]}catch(c){}})},addClass:function(a){var b,c,d,e,g,h,i;if(f.isFunction(a))return this.each(function(b){f(this).addClass(a.call(this,b,this.className))});if(a&&typeof a=="string"){b=a.split(p);for(c=0,d=this.length;c<d;c++){e=this[c];if(e.nodeType===1)if(!e.className&&b.length===1)e.className=a;else{g=" "+e.className+" ";for(h=0,i=b.length;h<i;h++)~g.indexOf(" "+b[h]+" ")||(g+=b[h]+" ");e.className=f.trim(g)}}}return this},removeClass:function(a){var c,d,e,g,h,i,j;if(f.isFunction(a))return this.each(function(b){f(this).removeClass(a.call(this,b,this.className))});if(a&&typeof a=="string"||a===b){c=(a||"").split(p);for(d=0,e=this.length;d<e;d++){g=this[d];if(g.nodeType===1&&g.className)if(a){h=(" "+g.className+" ").replace(o," ");for(i=0,j=c.length;i<j;i++)h=h.replace(" "+c[i]+" "," ");g.className=f.trim(h)}else g.className=""}}return this},toggleClass:function(a,b){var c=typeof a,d=typeof b=="boolean";if(f.isFunction(a))return this.each(function(c){f(this).toggleClass(a.call(this,c,this.className,b),b)});return this.each(function(){if(c==="string"){var e,g=0,h=f(this),i=b,j=a.split(p);while(e=j[g++])i=d?i:!h.hasClass(e),h[i?"addClass":"removeClass"](e)}else if(c==="undefined"||c==="boolean")this.className&&f._data(this,"__className__",this.className),this.className=this.className||a===!1?"":f._data(this,"__className__")||""})},hasClass:function(a){var b=" "+a+" ",c=0,d=this.length;for(;c<d;c++)if(this[c].nodeType===1&&(" "+this[c].className+" ").replace(o," ").indexOf(b)>-1)return!0;return!1},val:function(a){var c,d,e,g=this[0];{if(!!arguments.length){e=f.isFunction(a);return this.each(function(d){var g=f(this),h;if(this.nodeType===1){e?h=a.call(this,d,g.val()):h=a,h==null?h="":typeof h=="number"?h+="":f.isArray(h)&&(h=f.map(h,function(a){return a==null?"":a+""})),c=f.valHooks[this.nodeName.toLowerCase()]||f.valHooks[this.type];if(!c||!("set"in c)||c.set(this,h,"value")===b)this.value=h}})}if(g){c=f.valHooks[g.nodeName.toLowerCase()]||f.valHooks[g.type];if(c&&"get"in c&&(d=c.get(g,"value"))!==b)return d;d=g.value;return typeof d=="string"?d.replace(q,""):d==null?"":d}}}}),f.extend({valHooks:{option:{get:function(a){var b=a.attributes.value;return!b||b.specified?a.value:a.text}},select:{get:function(a){var b,c,d,e,g=a.selectedIndex,h=[],i=a.options,j=a.type==="select-one";if(g<0)return null;c=j?g:0,d=j?g+1:i.length;for(;c<d;c++){e=i[c];if(e.selected&&(f.support.optDisabled?!e.disabled:e.getAttribute("disabled")===null)&&(!e.parentNode.disabled||!f.nodeName(e.parentNode,"optgroup"))){b=f(e).val();if(j)return b;h.push(b)}}if(j&&!h.length&&i.length)return f(i[g]).val();return h},set:function(a,b){var c=f.makeArray(b);f(a).find("option").each(function(){this.selected=f.inArray(f(this).val(),c)>=0}),c.length||(a.selectedIndex=-1);return c}}},attrFn:{val:!0,css:!0,html:!0,text:!0,data:!0,width:!0,height:!0,offset:!0},attr:function(a,c,d,e){var g,h,i,j=a.nodeType;if(!!a&&j!==3&&j!==8&&j!==2){if(e&&c in f.attrFn)return f(a)[c](d);if(typeof a.getAttribute=="undefined")return f.prop(a,c,d);i=j!==1||!f.isXMLDoc(a),i&&(c=c.toLowerCase(),h=f.attrHooks[c]||(u.test(c)?x:w));if(d!==b){if(d===null){f.removeAttr(a,c);return}if(h&&"set"in h&&i&&(g=h.set(a,d,c))!==b)return g;a.setAttribute(c,""+d);return d}if(h&&"get"in h&&i&&(g=h.get(a,c))!==null)return g;g=a.getAttribute(c);return g===null?b:g}},removeAttr:function(a,b){var c,d,e,g,h=0;if(b&&a.nodeType===1){d=b.toLowerCase().split(p),g=d.length;for(;h<g;h++)e=d[h],e&&(c=f.propFix[e]||e,f.attr(a,e,""),a.removeAttribute(v?e:c),u.test(e)&&c in a&&(a[c]=!1))}},attrHooks:{type:{set:function(a,b){if(r.test(a.nodeName)&&a.parentNode)f.error("type property can't be changed");else if(!f.support.radioValue&&b==="radio"&&f.nodeName(a,"input")){var c=a.value;a.setAttribute("type",b),c&&(a.value=c);return b}}},value:{get:function(a,b){if(w&&f.nodeName(a,"button"))return w.get(a,b);return b in a?a.value:null},set:function(a,b,c){if(w&&f.nodeName(a,"button"))return w.set(a,b,c);a.value=b}}},propFix:{tabindex:"tabIndex",readonly:"readOnly","for":"htmlFor","class":"className",maxlength:"maxLength",cellspacing:"cellSpacing",cellpadding:"cellPadding",rowspan:"rowSpan",colspan:"colSpan",usemap:"useMap",frameborder:"frameBorder",contenteditable:"contentEditable"},prop:function(a,c,d){var e,g,h,i=a.nodeType;if(!!a&&i!==3&&i!==8&&i!==2){h=i!==1||!f.isXMLDoc(a),h&&(c=f.propFix[c]||c,g=f.propHooks[c]);return d!==b?g&&"set"in g&&(e=g.set(a,d,c))!==b?e:a[c]=d:g&&"get"in g&&(e=g.get(a,c))!==null?e:a[c]}},propHooks:{tabIndex:{get:function(a){var c=a.getAttributeNode("tabindex");return c&&c.specified?parseInt(c.value,10):s.test(a.nodeName)||t.test(a.nodeName)&&a.href?0:b}}}}),f.attrHooks.tabindex=f.propHooks.tabIndex,x={get:function(a,c){var d,e=f.prop(a,c);return e===!0||typeof e!="boolean"&&(d=a.getAttributeNode(c))&&d.nodeValue!==!1?c.toLowerCase():b},set:function(a,b,c){var d;b===!1?f.removeAttr(a,c):(d=f.propFix[c]||c,d in a&&(a[d]=!0),a.setAttribute(c,c.toLowerCase()));return c}},v||(y={name:!0,id:!0},w=f.valHooks.button={get:function(a,c){var d;d=a.getAttributeNode(c);return d&&(y[c]?d.nodeValue!=="":d.specified)?d.nodeValue:b},set:function(a,b,d){var e=a.getAttributeNode(d);e||(e=c.createAttribute(d),a.setAttributeNode(e));return e.nodeValue=b+""}},f.attrHooks.tabindex.set=w.set,f.each(["width","height"],function(a,b){f.attrHooks[b]=f.extend(f.attrHooks[b],{set:function(a,c){if(c===""){a.setAttribute(b,"auto");return c}}})}),f.attrHooks.contenteditable={get:w.get,set:function(a,b,c){b===""&&(b="false"),w.set(a,b,c)}}),f.support.hrefNormalized||f.each(["href","src","width","height"],function(a,c){f.attrHooks[c]=f.extend(f.attrHooks[c],{get:function(a){var d=a.getAttribute(c,2);return d===null?b:d}})}),f.support.style||(f.attrHooks.style={get:function(a){return a.style.cssText.toLowerCase()||b},set:function(a,b){return a.style.cssText=""+b}}),f.support.optSelected||(f.propHooks.selected=f.extend(f.propHooks.selected,{get:function(a){var b=a.parentNode;b&&(b.selectedIndex,b.parentNode&&b.parentNode.selectedIndex);return null}})),f.support.enctype||(f.propFix.enctype="encoding"),f.support.checkOn||f.each(["radio","checkbox"],function(){f.valHooks[this]={get:function(a){return a.getAttribute("value")===null?"on":a.value}}}),f.each(["radio","checkbox"],function(){f.valHooks[this]=f.extend(f.valHooks[this],{set:function(a,b){if(f.isArray(b))return a.checked=f.inArray(f(a).val(),b)>=0}})});var z=/^(?:textarea|input|select)$/i,A=/^([^\.]*)?(?:\.(.+))?$/,B=/\bhover(\.\S+)?\b/,C=/^key/,D=/^(?:mouse|contextmenu)|click/,E=/^(?:focusinfocus|focusoutblur)$/,F=/^(\w*)(?:#([\w\-]+))?(?:\.([\w\-]+))?$/,G=function(a){var b=F.exec(a);b&&(b[1]=(b[1]||"").toLowerCase(),b[3]=b[3]&&new RegExp("(?:^|\\s)"+b[3]+"(?:\\s|$)"));return b},H=function(a,b){var c=a.attributes||{};return(!b[1]||a.nodeName.toLowerCase()===b[1])&&(!b[2]||(c.id||{}).value===b[2])&&(!b[3]||b[3].test((c["class"]||{}).value))},I=function(a){return f.event.special.hover?a:a.replace(B,"mouseenter$1 mouseleave$1")};
+f.event={add:function(a,c,d,e,g){var h,i,j,k,l,m,n,o,p,q,r,s;if(!(a.nodeType===3||a.nodeType===8||!c||!d||!(h=f._data(a)))){d.handler&&(p=d,d=p.handler),d.guid||(d.guid=f.guid++),j=h.events,j||(h.events=j={}),i=h.handle,i||(h.handle=i=function(a){return typeof f!="undefined"&&(!a||f.event.triggered!==a.type)?f.event.dispatch.apply(i.elem,arguments):b},i.elem=a),c=f.trim(I(c)).split(" ");for(k=0;k<c.length;k++){l=A.exec(c[k])||[],m=l[1],n=(l[2]||"").split(".").sort(),s=f.event.special[m]||{},m=(g?s.delegateType:s.bindType)||m,s=f.event.special[m]||{},o=f.extend({type:m,origType:l[1],data:e,handler:d,guid:d.guid,selector:g,quick:G(g),namespace:n.join(".")},p),r=j[m];if(!r){r=j[m]=[],r.delegateCount=0;if(!s.setup||s.setup.call(a,e,n,i)===!1)a.addEventListener?a.addEventListener(m,i,!1):a.attachEvent&&a.attachEvent("on"+m,i)}s.add&&(s.add.call(a,o),o.handler.guid||(o.handler.guid=d.guid)),g?r.splice(r.delegateCount++,0,o):r.push(o),f.event.global[m]=!0}a=null}},global:{},remove:function(a,b,c,d,e){var g=f.hasData(a)&&f._data(a),h,i,j,k,l,m,n,o,p,q,r,s;if(!!g&&!!(o=g.events)){b=f.trim(I(b||"")).split(" ");for(h=0;h<b.length;h++){i=A.exec(b[h])||[],j=k=i[1],l=i[2];if(!j){for(j in o)f.event.remove(a,j+b[h],c,d,!0);continue}p=f.event.special[j]||{},j=(d?p.delegateType:p.bindType)||j,r=o[j]||[],m=r.length,l=l?new RegExp("(^|\\.)"+l.split(".").sort().join("\\.(?:.*\\.)?")+"(\\.|$)"):null;for(n=0;n<r.length;n++)s=r[n],(e||k===s.origType)&&(!c||c.guid===s.guid)&&(!l||l.test(s.namespace))&&(!d||d===s.selector||d==="**"&&s.selector)&&(r.splice(n--,1),s.selector&&r.delegateCount--,p.remove&&p.remove.call(a,s));r.length===0&&m!==r.length&&((!p.teardown||p.teardown.call(a,l)===!1)&&f.removeEvent(a,j,g.handle),delete o[j])}f.isEmptyObject(o)&&(q=g.handle,q&&(q.elem=null),f.removeData(a,["events","handle"],!0))}},customEvent:{getData:!0,setData:!0,changeData:!0},trigger:function(c,d,e,g){if(!e||e.nodeType!==3&&e.nodeType!==8){var h=c.type||c,i=[],j,k,l,m,n,o,p,q,r,s;if(E.test(h+f.event.triggered))return;h.indexOf("!")>=0&&(h=h.slice(0,-1),k=!0),h.indexOf(".")>=0&&(i=h.split("."),h=i.shift(),i.sort());if((!e||f.event.customEvent[h])&&!f.event.global[h])return;c=typeof c=="object"?c[f.expando]?c:new f.Event(h,c):new f.Event(h),c.type=h,c.isTrigger=!0,c.exclusive=k,c.namespace=i.join("."),c.namespace_re=c.namespace?new RegExp("(^|\\.)"+i.join("\\.(?:.*\\.)?")+"(\\.|$)"):null,o=h.indexOf(":")<0?"on"+h:"";if(!e){j=f.cache;for(l in j)j[l].events&&j[l].events[h]&&f.event.trigger(c,d,j[l].handle.elem,!0);return}c.result=b,c.target||(c.target=e),d=d!=null?f.makeArray(d):[],d.unshift(c),p=f.event.special[h]||{};if(p.trigger&&p.trigger.apply(e,d)===!1)return;r=[[e,p.bindType||h]];if(!g&&!p.noBubble&&!f.isWindow(e)){s=p.delegateType||h,m=E.test(s+h)?e:e.parentNode,n=null;for(;m;m=m.parentNode)r.push([m,s]),n=m;n&&n===e.ownerDocument&&r.push([n.defaultView||n.parentWindow||a,s])}for(l=0;l<r.length&&!c.isPropagationStopped();l++)m=r[l][0],c.type=r[l][1],q=(f._data(m,"events")||{})[c.type]&&f._data(m,"handle"),q&&q.apply(m,d),q=o&&m[o],q&&f.acceptData(m)&&q.apply(m,d)===!1&&c.preventDefault();c.type=h,!g&&!c.isDefaultPrevented()&&(!p._default||p._default.apply(e.ownerDocument,d)===!1)&&(h!=="click"||!f.nodeName(e,"a"))&&f.acceptData(e)&&o&&e[h]&&(h!=="focus"&&h!=="blur"||c.target.offsetWidth!==0)&&!f.isWindow(e)&&(n=e[o],n&&(e[o]=null),f.event.triggered=h,e[h](),f.event.triggered=b,n&&(e[o]=n));return c.result}},dispatch:function(c){c=f.event.fix(c||a.event);var d=(f._data(this,"events")||{})[c.type]||[],e=d.delegateCount,g=[].slice.call(arguments,0),h=!c.exclusive&&!c.namespace,i=[],j,k,l,m,n,o,p,q,r,s,t;g[0]=c,c.delegateTarget=this;if(e&&!c.target.disabled&&(!c.button||c.type!=="click")){m=f(this),m.context=this.ownerDocument||this;for(l=c.target;l!=this;l=l.parentNode||this){o={},q=[],m[0]=l;for(j=0;j<e;j++)r=d[j],s=r.selector,o[s]===b&&(o[s]=r.quick?H(l,r.quick):m.is(s)),o[s]&&q.push(r);q.length&&i.push({elem:l,matches:q})}}d.length>e&&i.push({elem:this,matches:d.slice(e)});for(j=0;j<i.length&&!c.isPropagationStopped();j++){p=i[j],c.currentTarget=p.elem;for(k=0;k<p.matches.length&&!c.isImmediatePropagationStopped();k++){r=p.matches[k];if(h||!c.namespace&&!r.namespace||c.namespace_re&&c.namespace_re.test(r.namespace))c.data=r.data,c.handleObj=r,n=((f.event.special[r.origType]||{}).handle||r.handler).apply(p.elem,g),n!==b&&(c.result=n,n===!1&&(c.preventDefault(),c.stopPropagation()))}}return c.result},props:"attrChange attrName relatedNode srcElement altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),fixHooks:{},keyHooks:{props:"char charCode key keyCode".split(" "),filter:function(a,b){a.which==null&&(a.which=b.charCode!=null?b.charCode:b.keyCode);return a}},mouseHooks:{props:"button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "),filter:function(a,d){var e,f,g,h=d.button,i=d.fromElement;a.pageX==null&&d.clientX!=null&&(e=a.target.ownerDocument||c,f=e.documentElement,g=e.body,a.pageX=d.clientX+(f&&f.scrollLeft||g&&g.scrollLeft||0)-(f&&f.clientLeft||g&&g.clientLeft||0),a.pageY=d.clientY+(f&&f.scrollTop||g&&g.scrollTop||0)-(f&&f.clientTop||g&&g.clientTop||0)),!a.relatedTarget&&i&&(a.relatedTarget=i===a.target?d.toElement:i),!a.which&&h!==b&&(a.which=h&1?1:h&2?3:h&4?2:0);return a}},fix:function(a){if(a[f.expando])return a;var d,e,g=a,h=f.event.fixHooks[a.type]||{},i=h.props?this.props.concat(h.props):this.props;a=f.Event(g);for(d=i.length;d;)e=i[--d],a[e]=g[e];a.target||(a.target=g.srcElement||c),a.target.nodeType===3&&(a.target=a.target.parentNode),a.metaKey===b&&(a.metaKey=a.ctrlKey);return h.filter?h.filter(a,g):a},special:{ready:{setup:f.bindReady},load:{noBubble:!0},focus:{delegateType:"focusin"},blur:{delegateType:"focusout"},beforeunload:{setup:function(a,b,c){f.isWindow(this)&&(this.onbeforeunload=c)},teardown:function(a,b){this.onbeforeunload===b&&(this.onbeforeunload=null)}}},simulate:function(a,b,c,d){var e=f.extend(new f.Event,c,{type:a,isSimulated:!0,originalEvent:{}});d?f.event.trigger(e,null,b):f.event.dispatch.call(b,e),e.isDefaultPrevented()&&c.preventDefault()}},f.event.handle=f.event.dispatch,f.removeEvent=c.removeEventListener?function(a,b,c){a.removeEventListener&&a.removeEventListener(b,c,!1)}:function(a,b,c){a.detachEvent&&a.detachEvent("on"+b,c)},f.Event=function(a,b){if(!(this instanceof f.Event))return new f.Event(a,b);a&&a.type?(this.originalEvent=a,this.type=a.type,this.isDefaultPrevented=a.defaultPrevented||a.returnValue===!1||a.getPreventDefault&&a.getPreventDefault()?K:J):this.type=a,b&&f.extend(this,b),this.timeStamp=a&&a.timeStamp||f.now(),this[f.expando]=!0},f.Event.prototype={preventDefault:function(){this.isDefaultPrevented=K;var a=this.originalEvent;!a||(a.preventDefault?a.preventDefault():a.returnValue=!1)},stopPropagation:function(){this.isPropagationStopped=K;var a=this.originalEvent;!a||(a.stopPropagation&&a.stopPropagation(),a.cancelBubble=!0)},stopImmediatePropagation:function(){this.isImmediatePropagationStopped=K,this.stopPropagation()},isDefaultPrevented:J,isPropagationStopped:J,isImmediatePropagationStopped:J},f.each({mouseenter:"mouseover",mouseleave:"mouseout"},function(a,b){f.event.special[a]={delegateType:b,bindType:b,handle:function(a){var c=this,d=a.relatedTarget,e=a.handleObj,g=e.selector,h;if(!d||d!==c&&!f.contains(c,d))a.type=e.origType,h=e.handler.apply(this,arguments),a.type=b;return h}}}),f.support.submitBubbles||(f.event.special.submit={setup:function(){if(f.nodeName(this,"form"))return!1;f.event.add(this,"click._submit keypress._submit",function(a){var c=a.target,d=f.nodeName(c,"input")||f.nodeName(c,"button")?c.form:b;d&&!d._submit_attached&&(f.event.add(d,"submit._submit",function(a){this.parentNode&&!a.isTrigger&&f.event.simulate("submit",this.parentNode,a,!0)}),d._submit_attached=!0)})},teardown:function(){if(f.nodeName(this,"form"))return!1;f.event.remove(this,"._submit")}}),f.support.changeBubbles||(f.event.special.change={setup:function(){if(z.test(this.nodeName)){if(this.type==="checkbox"||this.type==="radio")f.event.add(this,"propertychange._change",function(a){a.originalEvent.propertyName==="checked"&&(this._just_changed=!0)}),f.event.add(this,"click._change",function(a){this._just_changed&&!a.isTrigger&&(this._just_changed=!1,f.event.simulate("change",this,a,!0))});return!1}f.event.add(this,"beforeactivate._change",function(a){var b=a.target;z.test(b.nodeName)&&!b._change_attached&&(f.event.add(b,"change._change",function(a){this.parentNode&&!a.isSimulated&&!a.isTrigger&&f.event.simulate("change",this.parentNode,a,!0)}),b._change_attached=!0)})},handle:function(a){var b=a.target;if(this!==b||a.isSimulated||a.isTrigger||b.type!=="radio"&&b.type!=="checkbox")return a.handleObj.handler.apply(this,arguments)},teardown:function(){f.event.remove(this,"._change");return z.test(this.nodeName)}}),f.support.focusinBubbles||f.each({focus:"focusin",blur:"focusout"},function(a,b){var d=0,e=function(a){f.event.simulate(b,a.target,f.event.fix(a),!0)};f.event.special[b]={setup:function(){d++===0&&c.addEventListener(a,e,!0)},teardown:function(){--d===0&&c.removeEventListener(a,e,!0)}}}),f.fn.extend({on:function(a,c,d,e,g){var h,i;if(typeof a=="object"){typeof c!="string"&&(d=c,c=b);for(i in a)this.on(i,c,d,a[i],g);return this}d==null&&e==null?(e=c,d=c=b):e==null&&(typeof c=="string"?(e=d,d=b):(e=d,d=c,c=b));if(e===!1)e=J;else if(!e)return this;g===1&&(h=e,e=function(a){f().off(a);return h.apply(this,arguments)},e.guid=h.guid||(h.guid=f.guid++));return this.each(function(){f.event.add(this,a,e,d,c)})},one:function(a,b,c,d){return this.on.call(this,a,b,c,d,1)},off:function(a,c,d){if(a&&a.preventDefault&&a.handleObj){var e=a.handleObj;f(a.delegateTarget).off(e.namespace?e.type+"."+e.namespace:e.type,e.selector,e.handler);return this}if(typeof a=="object"){for(var g in a)this.off(g,c,a[g]);return this}if(c===!1||typeof c=="function")d=c,c=b;d===!1&&(d=J);return this.each(function(){f.event.remove(this,a,d,c)})},bind:function(a,b,c){return this.on(a,null,b,c)},unbind:function(a,b){return this.off(a,null,b)},live:function(a,b,c){f(this.context).on(a,this.selector,b,c);return this},die:function(a,b){f(this.context).off(a,this.selector||"**",b);return this},delegate:function(a,b,c,d){return this.on(b,a,c,d)},undelegate:function(a,b,c){return arguments.length==1?this.off(a,"**"):this.off(b,a,c)},trigger:function(a,b){return this.each(function(){f.event.trigger(a,b,this)})},triggerHandler:function(a,b){if(this[0])return f.event.trigger(a,b,this[0],!0)},toggle:function(a){var b=arguments,c=a.guid||f.guid++,d=0,e=function(c){var e=(f._data(this,"lastToggle"+a.guid)||0)%d;f._data(this,"lastToggle"+a.guid,e+1),c.preventDefault();return b[e].apply(this,arguments)||!1};e.guid=c;while(d<b.length)b[d++].guid=c;return this.click(e)},hover:function(a,b){return this.mouseenter(a).mouseleave(b||a)}}),f.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "),function(a,b){f.fn[b]=function(a,c){c==null&&(c=a,a=null);return arguments.length>0?this.on(b,null,a,c):this.trigger(b)},f.attrFn&&(f.attrFn[b]=!0),C.test(b)&&(f.event.fixHooks[b]=f.event.keyHooks),D.test(b)&&(f.event.fixHooks[b]=f.event.mouseHooks)}),function(){function x(a,b,c,e,f,g){for(var h=0,i=e.length;h<i;h++){var j=e[h];if(j){var k=!1;j=j[a];while(j){if(j[d]===c){k=e[j.sizset];break}if(j.nodeType===1){g||(j[d]=c,j.sizset=h);if(typeof b!="string"){if(j===b){k=!0;break}}else if(m.filter(b,[j]).length>0){k=j;break}}j=j[a]}e[h]=k}}}function w(a,b,c,e,f,g){for(var h=0,i=e.length;h<i;h++){var j=e[h];if(j){var k=!1;j=j[a];while(j){if(j[d]===c){k=e[j.sizset];break}j.nodeType===1&&!g&&(j[d]=c,j.sizset=h);if(j.nodeName.toLowerCase()===b){k=j;break}j=j[a]}e[h]=k}}}var a=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^\[\]]*\]|['"][^'"]*['"]|[^\[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,d="sizcache"+(Math.random()+"").replace(".",""),e=0,g=Object.prototype.toString,h=!1,i=!0,j=/\\/g,k=/\r\n/g,l=/\W/;[0,0].sort(function(){i=!1;return 0});var m=function(b,d,e,f){e=e||[],d=d||c;var h=d;if(d.nodeType!==1&&d.nodeType!==9)return[];if(!b||typeof b!="string")return e;var i,j,k,l,n,q,r,t,u=!0,v=m.isXML(d),w=[],x=b;do{a.exec(""),i=a.exec(x);if(i){x=i[3],w.push(i[1]);if(i[2]){l=i[3];break}}}while(i);if(w.length>1&&p.exec(b))if(w.length===2&&o.relative[w[0]])j=y(w[0]+w[1],d,f);else{j=o.relative[w[0]]?[d]:m(w.shift(),d);while(w.length)b=w.shift(),o.relative[b]&&(b+=w.shift()),j=y(b,j,f)}else{!f&&w.length>1&&d.nodeType===9&&!v&&o.match.ID.test(w[0])&&!o.match.ID.test(w[w.length-1])&&(n=m.find(w.shift(),d,v),d=n.expr?m.filter(n.expr,n.set)[0]:n.set[0]);if(d){n=f?{expr:w.pop(),set:s(f)}:m.find(w.pop(),w.length===1&&(w[0]==="~"||w[0]==="+")&&d.parentNode?d.parentNode:d,v),j=n.expr?m.filter(n.expr,n.set):n.set,w.length>0?k=s(j):u=!1;while(w.length)q=w.pop(),r=q,o.relative[q]?r=w.pop():q="",r==null&&(r=d),o.relative[q](k,r,v)}else k=w=[]}k||(k=j),k||m.error(q||b);if(g.call(k)==="[object Array]")if(!u)e.push.apply(e,k);else if(d&&d.nodeType===1)for(t=0;k[t]!=null;t++)k[t]&&(k[t]===!0||k[t].nodeType===1&&m.contains(d,k[t]))&&e.push(j[t]);else for(t=0;k[t]!=null;t++)k[t]&&k[t].nodeType===1&&e.push(j[t]);else s(k,e);l&&(m(l,h,e,f),m.uniqueSort(e));return e};m.uniqueSort=function(a){if(u){h=i,a.sort(u);if(h)for(var b=1;b<a.length;b++)a[b]===a[b-1]&&a.splice(b--,1)}return a},m.matches=function(a,b){return m(a,null,null,b)},m.matchesSelector=function(a,b){return m(b,null,null,[a]).length>0},m.find=function(a,b,c){var d,e,f,g,h,i;if(!a)return[];for(e=0,f=o.order.length;e<f;e++){h=o.order[e];if(g=o.leftMatch[h].exec(a)){i=g[1],g.splice(1,1);if(i.substr(i.length-1)!=="\\"){g[1]=(g[1]||"").replace(j,""),d=o.find[h](g,b,c);if(d!=null){a=a.replace(o.match[h],"");break}}}}d||(d=typeof b.getElementsByTagName!="undefined"?b.getElementsByTagName("*"):[]);return{set:d,expr:a}},m.filter=function(a,c,d,e){var f,g,h,i,j,k,l,n,p,q=a,r=[],s=c,t=c&&c[0]&&m.isXML(c[0]);while(a&&c.length){for(h in o.filter)if((f=o.leftMatch[h].exec(a))!=null&&f[2]){k=o.filter[h],l=f[1],g=!1,f.splice(1,1);if(l.substr(l.length-1)==="\\")continue;s===r&&(r=[]);if(o.preFilter[h]){f=o.preFilter[h](f,s,d,r,e,t);if(!f)g=i=!0;else if(f===!0)continue}if(f)for(n=0;(j=s[n])!=null;n++)j&&(i=k(j,f,n,s),p=e^i,d&&i!=null?p?g=!0:s[n]=!1:p&&(r.push(j),g=!0));if(i!==b){d||(s=r),a=a.replace(o.match[h],"");if(!g)return[];break}}if(a===q)if(g==null)m.error(a);else break;q=a}return s},m.error=function(a){throw new Error("Syntax error, unrecognized expression: "+a)};var n=m.getText=function(a){var b,c,d=a.nodeType,e="";if(d){if(d===1||d===9){if(typeof a.textContent=="string")return a.textContent;if(typeof a.innerText=="string")return a.innerText.replace(k,"");for(a=a.firstChild;a;a=a.nextSibling)e+=n(a)}else if(d===3||d===4)return a.nodeValue}else for(b=0;c=a[b];b++)c.nodeType!==8&&(e+=n(c));return e},o=m.selectors={order:["ID","NAME","TAG"],match:{ID:/#((?:[\w\u00c0-\uFFFF\-]|\\.)+)/,CLASS:/\.((?:[\w\u00c0-\uFFFF\-]|\\.)+)/,NAME:/\[name=['"]*((?:[\w\u00c0-\uFFFF\-]|\\.)+)['"]*\]/,ATTR:/\[\s*((?:[\w\u00c0-\uFFFF\-]|\\.)+)\s*(?:(\S?=)\s*(?:(['"])(.*?)\3|(#?(?:[\w\u00c0-\uFFFF\-]|\\.)*)|)|)\s*\]/,TAG:/^((?:[\w\u00c0-\uFFFF\*\-]|\\.)+)/,CHILD:/:(only|nth|last|first)-child(?:\(\s*(even|odd|(?:[+\-]?\d+|(?:[+\-]?\d*)?n\s*(?:[+\-]\s*\d+)?))\s*\))?/,POS:/:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^\-]|$)/,PSEUDO:/:((?:[\w\u00c0-\uFFFF\-]|\\.)+)(?:\((['"]?)((?:\([^\)]+\)|[^\(\)]*)+)\2\))?/},leftMatch:{},attrMap:{"class":"className","for":"htmlFor"},attrHandle:{href:function(a){return a.getAttribute("href")},type:function(a){return a.getAttribute("type")}},relative:{"+":function(a,b){var c=typeof b=="string",d=c&&!l.test(b),e=c&&!d;d&&(b=b.toLowerCase());for(var f=0,g=a.length,h;f<g;f++)if(h=a[f]){while((h=h.previousSibling)&&h.nodeType!==1);a[f]=e||h&&h.nodeName.toLowerCase()===b?h||!1:h===b}e&&m.filter(b,a,!0)},">":function(a,b){var c,d=typeof b=="string",e=0,f=a.length;if(d&&!l.test(b)){b=b.toLowerCase();for(;e<f;e++){c=a[e];if(c){var g=c.parentNode;a[e]=g.nodeName.toLowerCase()===b?g:!1}}}else{for(;e<f;e++)c=a[e],c&&(a[e]=d?c.parentNode:c.parentNode===b);d&&m.filter(b,a,!0)}},"":function(a,b,c){var d,f=e++,g=x;typeof b=="string"&&!l.test(b)&&(b=b.toLowerCase(),d=b,g=w),g("parentNode",b,f,a,d,c)},"~":function(a,b,c){var d,f=e++,g=x;typeof b=="string"&&!l.test(b)&&(b=b.toLowerCase(),d=b,g=w),g("previousSibling",b,f,a,d,c)}},find:{ID:function(a,b,c){if(typeof b.getElementById!="undefined"&&!c){var d=b.getElementById(a[1]);return d&&d.parentNode?[d]:[]}},NAME:function(a,b){if(typeof b.getElementsByName!="undefined"){var c=[],d=b.getElementsByName(a[1]);for(var e=0,f=d.length;e<f;e++)d[e].getAttribute("name")===a[1]&&c.push(d[e]);return c.length===0?null:c}},TAG:function(a,b){if(typeof b.getElementsByTagName!="undefined")return b.getElementsByTagName(a[1])}},preFilter:{CLASS:function(a,b,c,d,e,f){a=" "+a[1].replace(j,"")+" ";if(f)return a;for(var g=0,h;(h=b[g])!=null;g++)h&&(e^(h.className&&(" "+h.className+" ").replace(/[\t\n\r]/g," ").indexOf(a)>=0)?c||d.push(h):c&&(b[g]=!1));return!1},ID:function(a){return a[1].replace(j,"")},TAG:function(a,b){return a[1].replace(j,"").toLowerCase()},CHILD:function(a){if(a[1]==="nth"){a[2]||m.error(a[0]),a[2]=a[2].replace(/^\+|\s*/g,"");var b=/(-?)(\d*)(?:n([+\-]?\d*))?/.exec(a[2]==="even"&&"2n"||a[2]==="odd"&&"2n+1"||!/\D/.test(a[2])&&"0n+"+a[2]||a[2]);a[2]=b[1]+(b[2]||1)-0,a[3]=b[3]-0}else a[2]&&m.error(a[0]);a[0]=e++;return a},ATTR:function(a,b,c,d,e,f){var g=a[1]=a[1].replace(j,"");!f&&o.attrMap[g]&&(a[1]=o.attrMap[g]),a[4]=(a[4]||a[5]||"").replace(j,""),a[2]==="~="&&(a[4]=" "+a[4]+" ");return a},PSEUDO:function(b,c,d,e,f){if(b[1]==="not")if((a.exec(b[3])||"").length>1||/^\w/.test(b[3]))b[3]=m(b[3],null,null,c);else{var g=m.filter(b[3],c,d,!0^f);d||e.push.apply(e,g);return!1}else if(o.match.POS.test(b[0])||o.match.CHILD.test(b[0]))return!0;return b},POS:function(a){a.unshift(!0);return a}},filters:{enabled:function(a){return a.disabled===!1&&a.type!=="hidden"},disabled:function(a){return a.disabled===!0},checked:function(a){return a.checked===!0},selected:function(a){a.parentNode&&a.parentNode.selectedIndex;return a.selected===!0},parent:function(a){return!!a.firstChild},empty:function(a){return!a.firstChild},has:function(a,b,c){return!!m(c[3],a).length},header:function(a){return/h\d/i.test(a.nodeName)},text:function(a){var b=a.getAttribute("type"),c=a.type;return a.nodeName.toLowerCase()==="input"&&"text"===c&&(b===c||b===null)},radio:function(a){return a.nodeName.toLowerCase()==="input"&&"radio"===a.type},checkbox:function(a){return a.nodeName.toLowerCase()==="input"&&"checkbox"===a.type},file:function(a){return a.nodeName.toLowerCase()==="input"&&"file"===a.type},password:function(a){return a.nodeName.toLowerCase()==="input"&&"password"===a.type},submit:function(a){var b=a.nodeName.toLowerCase();return(b==="input"||b==="button")&&"submit"===a.type},image:function(a){return a.nodeName.toLowerCase()==="input"&&"image"===a.type},reset:function(a){var b=a.nodeName.toLowerCase();return(b==="input"||b==="button")&&"reset"===a.type},button:function(a){var b=a.nodeName.toLowerCase();return b==="input"&&"button"===a.type||b==="button"},input:function(a){return/input|select|textarea|button/i.test(a.nodeName)},focus:function(a){return a===a.ownerDocument.activeElement}},setFilters:{first:function(a,b){return b===0},last:function(a,b,c,d){return b===d.length-1},even:function(a,b){return b%2===0},odd:function(a,b){return b%2===1},lt:function(a,b,c){return b<c[3]-0},gt:function(a,b,c){return b>c[3]-0},nth:function(a,b,c){return c[3]-0===b},eq:function(a,b,c){return c[3]-0===b}},filter:{PSEUDO:function(a,b,c,d){var e=b[1],f=o.filters[e];if(f)return f(a,c,b,d);if(e==="contains")return(a.textContent||a.innerText||n([a])||"").indexOf(b[3])>=0;if(e==="not"){var g=b[3];for(var h=0,i=g.length;h<i;h++)if(g[h]===a)return!1;return!0}m.error(e)},CHILD:function(a,b){var c,e,f,g,h,i,j,k=b[1],l=a;switch(k){case"only":case"first":while(l=l.previousSibling)if(l.nodeType===1)return!1;if(k==="first")return!0;l=a;case"last":while(l=l.nextSibling)if(l.nodeType===1)return!1;return!0;case"nth":c=b[2],e=b[3];if(c===1&&e===0)return!0;f=b[0],g=a.parentNode;if(g&&(g[d]!==f||!a.nodeIndex)){i=0;for(l=g.firstChild;l;l=l.nextSibling)l.nodeType===1&&(l.nodeIndex=++i);g[d]=f}j=a.nodeIndex-e;return c===0?j===0:j%c===0&&j/c>=0}},ID:function(a,b){return a.nodeType===1&&a.getAttribute("id")===b},TAG:function(a,b){return b==="*"&&a.nodeType===1||!!a.nodeName&&a.nodeName.toLowerCase()===b},CLASS:function(a,b){return(" "+(a.className||a.getAttribute("class"))+" ").indexOf(b)>-1},ATTR:function(a,b){var c=b[1],d=m.attr?m.attr(a,c):o.attrHandle[c]?o.attrHandle[c](a):a[c]!=null?a[c]:a.getAttribute(c),e=d+"",f=b[2],g=b[4];return d==null?f==="!=":!f&&m.attr?d!=null:f==="="?e===g:f==="*="?e.indexOf(g)>=0:f==="~="?(" "+e+" ").indexOf(g)>=0:g?f==="!="?e!==g:f==="^="?e.indexOf(g)===0:f==="$="?e.substr(e.length-g.length)===g:f==="|="?e===g||e.substr(0,g.length+1)===g+"-":!1:e&&d!==!1},POS:function(a,b,c,d){var e=b[2],f=o.setFilters[e];if(f)return f(a,c,b,d)}}},p=o.match.POS,q=function(a,b){return"\\"+(b-0+1)};for(var r in o.match)o.match[r]=new RegExp(o.match[r].source+/(?![^\[]*\])(?![^\(]*\))/.source),o.leftMatch[r]=new RegExp(/(^(?:.|\r|\n)*?)/.source+o.match[r].source.replace(/\\(\d+)/g,q));var s=function(a,b){a=Array.prototype.slice.call(a,0);if(b){b.push.apply(b,a);return b}return a};try{Array.prototype.slice.call(c.documentElement.childNodes,0)[0].nodeType}catch(t){s=function(a,b){var c=0,d=b||[];if(g.call(a)==="[object Array]")Array.prototype.push.apply(d,a);else if(typeof a.length=="number")for(var e=a.length;c<e;c++)d.push(a[c]);else for(;a[c];c++)d.push(a[c]);return d}}var u,v;c.documentElement.compareDocumentPosition?u=function(a,b){if(a===b){h=!0;return 0}if(!a.compareDocumentPosition||!b.compareDocumentPosition)return a.compareDocumentPosition?-1:1;return a.compareDocumentPosition(b)&4?-1:1}:(u=function(a,b){if(a===b){h=!0;return 0}if(a.sourceIndex&&b.sourceIndex)return a.sourceIndex-b.sourceIndex;var c,d,e=[],f=[],g=a.parentNode,i=b.parentNode,j=g;if(g===i)return v(a,b);if(!g)return-1;if(!i)return 1;while(j)e.unshift(j),j=j.parentNode;j=i;while(j)f.unshift(j),j=j.parentNode;c=e.length,d=f.length;for(var k=0;k<c&&k<d;k++)if(e[k]!==f[k])return v(e[k],f[k]);return k===c?v(a,f[k],-1):v(e[k],b,1)},v=function(a,b,c){if(a===b)return c;var d=a.nextSibling;while(d){if(d===b)return-1;d=d.nextSibling}return 1}),function(){var a=c.createElement("div"),d="script"+(new Date).getTime(),e=c.documentElement;a.innerHTML="<a name='"+d+"'/>",e.insertBefore(a,e.firstChild),c.getElementById(d)&&(o.find.ID=function(a,c,d){if(typeof c.getElementById!="undefined"&&!d){var e=c.getElementById(a[1]);return e?e.id===a[1]||typeof e.getAttributeNode!="undefined"&&e.getAttributeNode("id").nodeValue===a[1]?[e]:b:[]}},o.filter.ID=function(a,b){var c=typeof a.getAttributeNode!="undefined"&&a.getAttributeNode("id");return a.nodeType===1&&c&&c.nodeValue===b}),e.removeChild(a),e=a=null}(),function(){var a=c.createElement("div");a.appendChild(c.createComment("")),a.getElementsByTagName("*").length>0&&(o.find.TAG=function(a,b){var c=b.getElementsByTagName(a[1]);if(a[1]==="*"){var d=[];for(var e=0;c[e];e++)c[e].nodeType===1&&d.push(c[e]);c=d}return c}),a.innerHTML="<a href='#'></a>",a.firstChild&&typeof a.firstChild.getAttribute!="undefined"&&a.firstChild.getAttribute("href")!=="#"&&(o.attrHandle.href=function(a){return a.getAttribute("href",2)}),a=null}(),c.querySelectorAll&&function(){var a=m,b=c.createElement("div"),d="__sizzle__";b.innerHTML="<p class='TEST'></p>";if(!b.querySelectorAll||b.querySelectorAll(".TEST").length!==0){m=function(b,e,f,g){e=e||c;if(!g&&!m.isXML(e)){var h=/^(\w+$)|^\.([\w\-]+$)|^#([\w\-]+$)/.exec(b);if(h&&(e.nodeType===1||e.nodeType===9)){if(h[1])return s(e.getElementsByTagName(b),f);if(h[2]&&o.find.CLASS&&e.getElementsByClassName)return s(e.getElementsByClassName(h[2]),f)}if(e.nodeType===9){if(b==="body"&&e.body)return s([e.body],f);if(h&&h[3]){var i=e.getElementById(h[3]);if(!i||!i.parentNode)return s([],f);if(i.id===h[3])return s([i],f)}try{return s(e.querySelectorAll(b),f)}catch(j){}}else if(e.nodeType===1&&e.nodeName.toLowerCase()!=="object"){var k=e,l=e.getAttribute("id"),n=l||d,p=e.parentNode,q=/^\s*[+~]/.test(b);l?n=n.replace(/'/g,"\\$&"):e.setAttribute("id",n),q&&p&&(e=e.parentNode);try{if(!q||p)return s(e.querySelectorAll("[id='"+n+"'] "+b),f)}catch(r){}finally{l||k.removeAttribute("id")}}}return a(b,e,f,g)};for(var e in a)m[e]=a[e];b=null}}(),function(){var a=c.documentElement,b=a.matchesSelector||a.mozMatchesSelector||a.webkitMatchesSelector||a.msMatchesSelector;if(b){var d=!b.call(c.createElement("div"),"div"),e=!1;try{b.call(c.documentElement,"[test!='']:sizzle")}catch(f){e=!0}m.matchesSelector=function(a,c){c=c.replace(/\=\s*([^'"\]]*)\s*\]/g,"='$1']");if(!m.isXML(a))try{if(e||!o.match.PSEUDO.test(c)&&!/!=/.test(c)){var f=b.call(a,c);if(f||!d||a.document&&a.document.nodeType!==11)return f}}catch(g){}return m(c,null,null,[a]).length>0}}}(),function(){var a=c.createElement("div");a.innerHTML="<div class='test e'></div><div class='test'></div>";if(!!a.getElementsByClassName&&a.getElementsByClassName("e").length!==0){a.lastChild.className="e";if(a.getElementsByClassName("e").length===1)return;o.order.splice(1,0,"CLASS"),o.find.CLASS=function(a,b,c){if(typeof b.getElementsByClassName!="undefined"&&!c)return b.getElementsByClassName(a[1])},a=null}}(),c.documentElement.contains?m.contains=function(a,b){return a!==b&&(a.contains?a.contains(b):!0)}:c.documentElement.compareDocumentPosition?m.contains=function(a,b){return!!(a.compareDocumentPosition(b)&16)}:m.contains=function(){return!1},m.isXML=function(a){var b=(a?a.ownerDocument||a:0).documentElement;return b?b.nodeName!=="HTML":!1};var y=function(a,b,c){var d,e=[],f="",g=b.nodeType?[b]:b;while(d=o.match.PSEUDO.exec(a))f+=d[0],a=a.replace(o.match.PSEUDO,"");a=o.relative[a]?a+"*":a;for(var h=0,i=g.length;h<i;h++)m(a,g[h],e,c);return m.filter(f,e)};m.attr=f.attr,m.selectors.attrMap={},f.find=m,f.expr=m.selectors,f.expr[":"]=f.expr.filters,f.unique=m.uniqueSort,f.text=m.getText,f.isXMLDoc=m.isXML,f.contains=m.contains}();var L=/Until$/,M=/^(?:parents|prevUntil|prevAll)/,N=/,/,O=/^.[^:#\[\.,]*$/,P=Array.prototype.slice,Q=f.expr.match.POS,R={children:!0,contents:!0,next:!0,prev:!0};f.fn.extend({find:function(a){var b=this,c,d;if(typeof a!="string")return f(a).filter(function(){for(c=0,d=b.length;c<d;c++)if(f.contains(b[c],this))return!0});var e=this.pushStack("","find",a),g,h,i;for(c=0,d=this.length;c<d;c++){g=e.length,f.find(a,this[c],e);if(c>0)for(h=g;h<e.length;h++)for(i=0;i<g;i++)if(e[i]===e[h]){e.splice(h--,1);break}}return e},has:function(a){var b=f(a);return this.filter(function(){for(var a=0,c=b.length;a<c;a++)if(f.contains(this,b[a]))return!0})},not:function(a){return this.pushStack(T(this,a,!1),"not",a)},filter:function(a){return this.pushStack(T(this,a,!0),"filter",a)},is:function(a){return!!a&&(typeof a=="string"?Q.test(a)?f(a,this.context).index(this[0])>=0:f.filter(a,this).length>0:this.filter(a).length>0)},closest:function(a,b){var c=[],d,e,g=this[0];if(f.isArray(a)){var h=1;while(g&&g.ownerDocument&&g!==b){for(d=0;d<a.length;d++)f(g).is(a[d])&&c.push({selector:a[d],elem:g,level:h});g=g.parentNode,h++}return c}var i=Q.test(a)||typeof a!="string"?f(a,b||this.context):0;for(d=0,e=this.length;d<e;d++){g=this[d];while(g){if(i?i.index(g)>-1:f.find.matchesSelector(g,a)){c.push(g);break}g=g.parentNode;if(!g||!g.ownerDocument||g===b||g.nodeType===11)break}}c=c.length>1?f.unique(c):c;return this.pushStack(c,"closest",a)},index:function(a){if(!a)return this[0]&&this[0].parentNode?this.prevAll().length:-1;if(typeof a=="string")return f.inArray(this[0],f(a));return f.inArray(a.jquery?a[0]:a,this)},add:function(a,b){var c=typeof a=="string"?f(a,b):f.makeArray(a&&a.nodeType?[a]:a),d=f.merge(this.get(),c);return this.pushStack(S(c[0])||S(d[0])?d:f.unique(d))},andSelf:function(){return this.add(this.prevObject)}}),f.each({parent:function(a){var b=a.parentNode;return b&&b.nodeType!==11?b:null},parents:function(a){return f.dir(a,"parentNode")},parentsUntil:function(a,b,c){return f.dir(a,"parentNode",c)},next:function(a){return f.nth(a,2,"nextSibling")},prev:function(a){return f.nth(a,2,"previousSibling")},nextAll:function(a){return f.dir(a,"nextSibling")},prevAll:function(a){return f.dir(a,"previousSibling")},nextUntil:function(a,b,c){return f.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return f.dir(a,"previousSibling",c)},siblings:function(a){return f.sibling(a.parentNode.firstChild,a)},children:function(a){return f.sibling(a.firstChild)},contents:function(a){return f.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:f.makeArray(a.childNodes)}},function(a,b){f.fn[a]=function(c,d){var e=f.map(this,b,c);L.test(a)||(d=c),d&&typeof d=="string"&&(e=f.filter(d,e)),e=this.length>1&&!R[a]?f.unique(e):e,(this.length>1||N.test(d))&&M.test(a)&&(e=e.reverse());return this.pushStack(e,a,P.call(arguments).join(","))}}),f.extend({filter:function(a,b,c){c&&(a=":not("+a+")");return b.length===1?f.find.matchesSelector(b[0],a)?[b[0]]:[]:f.find.matches(a,b)},dir:function(a,c,d){var e=[],g=a[c];while(g&&g.nodeType!==9&&(d===b||g.nodeType!==1||!f(g).is(d)))g.nodeType===1&&e.push(g),g=g[c];return e},nth:function(a,b,c,d){b=b||1;var e=0;for(;a;a=a[c])if(a.nodeType===1&&++e===b)break;return a},sibling:function(a,b){var c=[];for(;a;a=a.nextSibling)a.nodeType===1&&a!==b&&c.push(a);return c}});var V="abbr|article|aside|audio|canvas|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",W=/ jQuery\d+="(?:\d+|null)"/g,X=/^\s+/,Y=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/ig,Z=/<([\w:]+)/,$=/<tbody/i,_=/<|&#?\w+;/,ba=/<(?:script|style)/i,bb=/<(?:script|object|embed|option|style)/i,bc=new RegExp("<(?:"+V+")","i"),bd=/checked\s*(?:[^=]|=\s*.checked.)/i,be=/\/(java|ecma)script/i,bf=/^\s*<!(?:\[CDATA\[|\-\-)/,bg={option:[1,"<select multiple='multiple'>","</select>"],legend:[1,"<fieldset>","</fieldset>"],thead:[1,"<table>","</table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],col:[2,"<table><tbody></tbody><colgroup>","</colgroup></table>"],area:[1,"<map>","</map>"],_default:[0,"",""]},bh=U(c);bg.optgroup=bg.option,bg.tbody=bg.tfoot=bg.colgroup=bg.caption=bg.thead,bg.th=bg.td,f.support.htmlSerialize||(bg._default=[1,"div<div>","</div>"]),f.fn.extend({text:function(a){if(f.isFunction(a))return this.each(function(b){var c=f(this);c.text(a.call(this,b,c.text()))});if(typeof a!="object"&&a!==b)return this.empty().append((this[0]&&this[0].ownerDocument||c).createTextNode(a));return f.text(this)},wrapAll:function(a){if(f.isFunction(a))return this.each(function(b){f(this).wrapAll(a.call(this,b))});if(this[0]){var b=f(a,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&b.insertBefore(this[0]),b.map(function(){var a=this;while(a.firstChild&&a.firstChild.nodeType===1)a=a.firstChild;return a}).append(this)}return this},wrapInner:function(a){if(f.isFunction(a))return this.each(function(b){f(this).wrapInner(a.call(this,b))});return this.each(function(){var b=f(this),c=b.contents();c.length?c.wrapAll(a):b.append(a)})},wrap:function(a){var b=f.isFunction(a);return this.each(function(c){f(this).wrapAll(b?a.call(this,c):a)})},unwrap:function(){return this.parent().each(function(){f.nodeName(this,"body")||f(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(a){this.nodeType===1&&this.appendChild(a)})},prepend:function(){return this.domManip(arguments,!0,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this)});if(arguments.length){var a=f.clean(arguments);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this.nextSibling)});if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,f.clean(arguments));return a}},remove:function(a,b){for(var c=0,d;(d=this[c])!=null;c++)if(!a||f.filter(a,[d]).length)!b&&d.nodeType===1&&(f.cleanData(d.getElementsByTagName("*")),f.cleanData([d])),d.parentNode&&d.parentNode.removeChild(d);return this},empty:function()
+{for(var a=0,b;(b=this[a])!=null;a++){b.nodeType===1&&f.cleanData(b.getElementsByTagName("*"));while(b.firstChild)b.removeChild(b.firstChild)}return this},clone:function(a,b){a=a==null?!1:a,b=b==null?a:b;return this.map(function(){return f.clone(this,a,b)})},html:function(a){if(a===b)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(W,""):null;if(typeof a=="string"&&!ba.test(a)&&(f.support.leadingWhitespace||!X.test(a))&&!bg[(Z.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Y,"<$1></$2>");try{for(var c=0,d=this.length;c<d;c++)this[c].nodeType===1&&(f.cleanData(this[c].getElementsByTagName("*")),this[c].innerHTML=a)}catch(e){this.empty().append(a)}}else f.isFunction(a)?this.each(function(b){var c=f(this);c.html(a.call(this,b,c.html()))}):this.empty().append(a);return this},replaceWith:function(a){if(this[0]&&this[0].parentNode){if(f.isFunction(a))return this.each(function(b){var c=f(this),d=c.html();c.replaceWith(a.call(this,b,d))});typeof a!="string"&&(a=f(a).detach());return this.each(function(){var b=this.nextSibling,c=this.parentNode;f(this).remove(),b?f(b).before(a):f(c).append(a)})}return this.length?this.pushStack(f(f.isFunction(a)?a():a),"replaceWith",a):this},detach:function(a){return this.remove(a,!0)},domManip:function(a,c,d){var e,g,h,i,j=a[0],k=[];if(!f.support.checkClone&&arguments.length===3&&typeof j=="string"&&bd.test(j))return this.each(function(){f(this).domManip(a,c,d,!0)});if(f.isFunction(j))return this.each(function(e){var g=f(this);a[0]=j.call(this,e,c?g.html():b),g.domManip(a,c,d)});if(this[0]){i=j&&j.parentNode,f.support.parentNode&&i&&i.nodeType===11&&i.childNodes.length===this.length?e={fragment:i}:e=f.buildFragment(a,this,k),h=e.fragment,h.childNodes.length===1?g=h=h.firstChild:g=h.firstChild;if(g){c=c&&f.nodeName(g,"tr");for(var l=0,m=this.length,n=m-1;l<m;l++)d.call(c?bi(this[l],g):this[l],e.cacheable||m>1&&l<n?f.clone(h,!0,!0):h)}k.length&&f.each(k,bp)}return this}}),f.buildFragment=function(a,b,d){var e,g,h,i,j=a[0];b&&b[0]&&(i=b[0].ownerDocument||b[0]),i.createDocumentFragment||(i=c),a.length===1&&typeof j=="string"&&j.length<512&&i===c&&j.charAt(0)==="<"&&!bb.test(j)&&(f.support.checkClone||!bd.test(j))&&(f.support.html5Clone||!bc.test(j))&&(g=!0,h=f.fragments[j],h&&h!==1&&(e=h)),e||(e=i.createDocumentFragment(),f.clean(a,i,e,d)),g&&(f.fragments[j]=h?e:1);return{fragment:e,cacheable:g}},f.fragments={},f.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){f.fn[a]=function(c){var d=[],e=f(c),g=this.length===1&&this[0].parentNode;if(g&&g.nodeType===11&&g.childNodes.length===1&&e.length===1){e[b](this[0]);return this}for(var h=0,i=e.length;h<i;h++){var j=(h>0?this.clone(!0):this).get();f(e[h])[b](j),d=d.concat(j)}return this.pushStack(d,a,e.selector)}}),f.extend({clone:function(a,b,c){var d,e,g,h=f.support.html5Clone||!bc.test("<"+a.nodeName)?a.cloneNode(!0):bo(a);if((!f.support.noCloneEvent||!f.support.noCloneChecked)&&(a.nodeType===1||a.nodeType===11)&&!f.isXMLDoc(a)){bk(a,h),d=bl(a),e=bl(h);for(g=0;d[g];++g)e[g]&&bk(d[g],e[g])}if(b){bj(a,h);if(c){d=bl(a),e=bl(h);for(g=0;d[g];++g)bj(d[g],e[g])}}d=e=null;return h},clean:function(a,b,d,e){var g;b=b||c,typeof b.createElement=="undefined"&&(b=b.ownerDocument||b[0]&&b[0].ownerDocument||c);var h=[],i;for(var j=0,k;(k=a[j])!=null;j++){typeof k=="number"&&(k+="");if(!k)continue;if(typeof k=="string")if(!_.test(k))k=b.createTextNode(k);else{k=k.replace(Y,"<$1></$2>");var l=(Z.exec(k)||["",""])[1].toLowerCase(),m=bg[l]||bg._default,n=m[0],o=b.createElement("div");b===c?bh.appendChild(o):U(b).appendChild(o),o.innerHTML=m[1]+k+m[2];while(n--)o=o.lastChild;if(!f.support.tbody){var p=$.test(k),q=l==="table"&&!p?o.firstChild&&o.firstChild.childNodes:m[1]==="<table>"&&!p?o.childNodes:[];for(i=q.length-1;i>=0;--i)f.nodeName(q[i],"tbody")&&!q[i].childNodes.length&&q[i].parentNode.removeChild(q[i])}!f.support.leadingWhitespace&&X.test(k)&&o.insertBefore(b.createTextNode(X.exec(k)[0]),o.firstChild),k=o.childNodes}var r;if(!f.support.appendChecked)if(k[0]&&typeof (r=k.length)=="number")for(i=0;i<r;i++)bn(k[i]);else bn(k);k.nodeType?h.push(k):h=f.merge(h,k)}if(d){g=function(a){return!a.type||be.test(a.type)};for(j=0;h[j];j++)if(e&&f.nodeName(h[j],"script")&&(!h[j].type||h[j].type.toLowerCase()==="text/javascript"))e.push(h[j].parentNode?h[j].parentNode.removeChild(h[j]):h[j]);else{if(h[j].nodeType===1){var s=f.grep(h[j].getElementsByTagName("script"),g);h.splice.apply(h,[j+1,0].concat(s))}d.appendChild(h[j])}}return h},cleanData:function(a){var b,c,d=f.cache,e=f.event.special,g=f.support.deleteExpando;for(var h=0,i;(i=a[h])!=null;h++){if(i.nodeName&&f.noData[i.nodeName.toLowerCase()])continue;c=i[f.expando];if(c){b=d[c];if(b&&b.events){for(var j in b.events)e[j]?f.event.remove(i,j):f.removeEvent(i,j,b.handle);b.handle&&(b.handle.elem=null)}g?delete i[f.expando]:i.removeAttribute&&i.removeAttribute(f.expando),delete d[c]}}}});var bq=/alpha\([^)]*\)/i,br=/opacity=([^)]*)/,bs=/([A-Z]|^ms)/g,bt=/^-?\d+(?:px)?$/i,bu=/^-?\d/,bv=/^([\-+])=([\-+.\de]+)/,bw={position:"absolute",visibility:"hidden",display:"block"},bx=["Left","Right"],by=["Top","Bottom"],bz,bA,bB;f.fn.css=function(a,c){if(arguments.length===2&&c===b)return this;return f.access(this,a,c,!0,function(a,c,d){return d!==b?f.style(a,c,d):f.css(a,c)})},f.extend({cssHooks:{opacity:{get:function(a,b){if(b){var c=bz(a,"opacity","opacity");return c===""?"1":c}return a.style.opacity}}},cssNumber:{fillOpacity:!0,fontWeight:!0,lineHeight:!0,opacity:!0,orphans:!0,widows:!0,zIndex:!0,zoom:!0},cssProps:{"float":f.support.cssFloat?"cssFloat":"styleFloat"},style:function(a,c,d,e){if(!!a&&a.nodeType!==3&&a.nodeType!==8&&!!a.style){var g,h,i=f.camelCase(c),j=a.style,k=f.cssHooks[i];c=f.cssProps[i]||i;if(d===b){if(k&&"get"in k&&(g=k.get(a,!1,e))!==b)return g;return j[c]}h=typeof d,h==="string"&&(g=bv.exec(d))&&(d=+(g[1]+1)*+g[2]+parseFloat(f.css(a,c)),h="number");if(d==null||h==="number"&&isNaN(d))return;h==="number"&&!f.cssNumber[i]&&(d+="px");if(!k||!("set"in k)||(d=k.set(a,d))!==b)try{j[c]=d}catch(l){}}},css:function(a,c,d){var e,g;c=f.camelCase(c),g=f.cssHooks[c],c=f.cssProps[c]||c,c==="cssFloat"&&(c="float");if(g&&"get"in g&&(e=g.get(a,!0,d))!==b)return e;if(bz)return bz(a,c)},swap:function(a,b,c){var d={};for(var e in b)d[e]=a.style[e],a.style[e]=b[e];c.call(a);for(e in b)a.style[e]=d[e]}}),f.curCSS=f.css,f.each(["height","width"],function(a,b){f.cssHooks[b]={get:function(a,c,d){var e;if(c){if(a.offsetWidth!==0)return bC(a,b,d);f.swap(a,bw,function(){e=bC(a,b,d)});return e}},set:function(a,b){if(!bt.test(b))return b;b=parseFloat(b);if(b>=0)return b+"px"}}}),f.support.opacity||(f.cssHooks.opacity={get:function(a,b){return br.test((b&&a.currentStyle?a.currentStyle.filter:a.style.filter)||"")?parseFloat(RegExp.$1)/100+"":b?"1":""},set:function(a,b){var c=a.style,d=a.currentStyle,e=f.isNumeric(b)?"alpha(opacity="+b*100+")":"",g=d&&d.filter||c.filter||"";c.zoom=1;if(b>=1&&f.trim(g.replace(bq,""))===""){c.removeAttribute("filter");if(d&&!d.filter)return}c.filter=bq.test(g)?g.replace(bq,e):g+" "+e}}),f(function(){f.support.reliableMarginRight||(f.cssHooks.marginRight={get:function(a,b){var c;f.swap(a,{display:"inline-block"},function(){b?c=bz(a,"margin-right","marginRight"):c=a.style.marginRight});return c}})}),c.defaultView&&c.defaultView.getComputedStyle&&(bA=function(a,b){var c,d,e;b=b.replace(bs,"-$1").toLowerCase(),(d=a.ownerDocument.defaultView)&&(e=d.getComputedStyle(a,null))&&(c=e.getPropertyValue(b),c===""&&!f.contains(a.ownerDocument.documentElement,a)&&(c=f.style(a,b)));return c}),c.documentElement.currentStyle&&(bB=function(a,b){var c,d,e,f=a.currentStyle&&a.currentStyle[b],g=a.style;f===null&&g&&(e=g[b])&&(f=e),!bt.test(f)&&bu.test(f)&&(c=g.left,d=a.runtimeStyle&&a.runtimeStyle.left,d&&(a.runtimeStyle.left=a.currentStyle.left),g.left=b==="fontSize"?"1em":f||0,f=g.pixelLeft+"px",g.left=c,d&&(a.runtimeStyle.left=d));return f===""?"auto":f}),bz=bA||bB,f.expr&&f.expr.filters&&(f.expr.filters.hidden=function(a){var b=a.offsetWidth,c=a.offsetHeight;return b===0&&c===0||!f.support.reliableHiddenOffsets&&(a.style&&a.style.display||f.css(a,"display"))==="none"},f.expr.filters.visible=function(a){return!f.expr.filters.hidden(a)});var bD=/%20/g,bE=/\[\]$/,bF=/\r?\n/g,bG=/#.*$/,bH=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,bI=/^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,bJ=/^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/,bK=/^(?:GET|HEAD)$/,bL=/^\/\//,bM=/\?/,bN=/<script\b[^<]*(?:(?!<\/script>)<[^<]*)*<\/script>/gi,bO=/^(?:select|textarea)/i,bP=/\s+/,bQ=/([?&])_=[^&]*/,bR=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+))?)?/,bS=f.fn.load,bT={},bU={},bV,bW,bX=["*/"]+["*"];try{bV=e.href}catch(bY){bV=c.createElement("a"),bV.href="",bV=bV.href}bW=bR.exec(bV.toLowerCase())||[],f.fn.extend({load:function(a,c,d){if(typeof a!="string"&&bS)return bS.apply(this,arguments);if(!this.length)return this;var e=a.indexOf(" ");if(e>=0){var g=a.slice(e,a.length);a=a.slice(0,e)}var h="GET";c&&(f.isFunction(c)?(d=c,c=b):typeof c=="object"&&(c=f.param(c,f.ajaxSettings.traditional),h="POST"));var i=this;f.ajax({url:a,type:h,dataType:"html",data:c,complete:function(a,b,c){c=a.responseText,a.isResolved()&&(a.done(function(a){c=a}),i.html(g?f("<div>").append(c.replace(bN,"")).find(g):c)),d&&i.each(d,[c,b,a])}});return this},serialize:function(){return f.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?f.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||bO.test(this.nodeName)||bI.test(this.type))}).map(function(a,b){var c=f(this).val();return c==null?null:f.isArray(c)?f.map(c,function(a,c){return{name:b.name,value:a.replace(bF,"\r\n")}}):{name:b.name,value:c.replace(bF,"\r\n")}}).get()}}),f.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(a,b){f.fn[b]=function(a){return this.on(b,a)}}),f.each(["get","post"],function(a,c){f[c]=function(a,d,e,g){f.isFunction(d)&&(g=g||e,e=d,d=b);return f.ajax({type:c,url:a,data:d,success:e,dataType:g})}}),f.extend({getScript:function(a,c){return f.get(a,b,c,"script")},getJSON:function(a,b,c){return f.get(a,b,c,"json")},ajaxSetup:function(a,b){b?b_(a,f.ajaxSettings):(b=a,a=f.ajaxSettings),b_(a,b);return a},ajaxSettings:{url:bV,isLocal:bJ.test(bW[1]),global:!0,type:"GET",contentType:"application/x-www-form-urlencoded",processData:!0,async:!0,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":bX},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":a.String,"text html":!0,"text json":f.parseJSON,"text xml":f.parseXML},flatOptions:{context:!0,url:!0}},ajaxPrefilter:bZ(bT),ajaxTransport:bZ(bU),ajax:function(a,c){function w(a,c,l,m){if(s!==2){s=2,q&&clearTimeout(q),p=b,n=m||"",v.readyState=a>0?4:0;var o,r,u,w=c,x=l?cb(d,v,l):b,y,z;if(a>=200&&a<300||a===304){if(d.ifModified){if(y=v.getResponseHeader("Last-Modified"))f.lastModified[k]=y;if(z=v.getResponseHeader("Etag"))f.etag[k]=z}if(a===304)w="notmodified",o=!0;else try{r=cc(d,x),w="success",o=!0}catch(A){w="parsererror",u=A}}else{u=w;if(!w||a)w="error",a<0&&(a=0)}v.status=a,v.statusText=""+(c||w),o?h.resolveWith(e,[r,w,v]):h.rejectWith(e,[v,w,u]),v.statusCode(j),j=b,t&&g.trigger("ajax"+(o?"Success":"Error"),[v,d,o?r:u]),i.fireWith(e,[v,w]),t&&(g.trigger("ajaxComplete",[v,d]),--f.active||f.event.trigger("ajaxStop"))}}typeof a=="object"&&(c=a,a=b),c=c||{};var d=f.ajaxSetup({},c),e=d.context||d,g=e!==d&&(e.nodeType||e instanceof f)?f(e):f.event,h=f.Deferred(),i=f.Callbacks("once memory"),j=d.statusCode||{},k,l={},m={},n,o,p,q,r,s=0,t,u,v={readyState:0,setRequestHeader:function(a,b){if(!s){var c=a.toLowerCase();a=m[c]=m[c]||a,l[a]=b}return this},getAllResponseHeaders:function(){return s===2?n:null},getResponseHeader:function(a){var c;if(s===2){if(!o){o={};while(c=bH.exec(n))o[c[1].toLowerCase()]=c[2]}c=o[a.toLowerCase()]}return c===b?null:c},overrideMimeType:function(a){s||(d.mimeType=a);return this},abort:function(a){a=a||"abort",p&&p.abort(a),w(0,a);return this}};h.promise(v),v.success=v.done,v.error=v.fail,v.complete=i.add,v.statusCode=function(a){if(a){var b;if(s<2)for(b in a)j[b]=[j[b],a[b]];else b=a[v.status],v.then(b,b)}return this},d.url=((a||d.url)+"").replace(bG,"").replace(bL,bW[1]+"//"),d.dataTypes=f.trim(d.dataType||"*").toLowerCase().split(bP),d.crossDomain==null&&(r=bR.exec(d.url.toLowerCase()),d.crossDomain=!(!r||r[1]==bW[1]&&r[2]==bW[2]&&(r[3]||(r[1]==="http:"?80:443))==(bW[3]||(bW[1]==="http:"?80:443)))),d.data&&d.processData&&typeof d.data!="string"&&(d.data=f.param(d.data,d.traditional)),b$(bT,d,c,v);if(s===2)return!1;t=d.global,d.type=d.type.toUpperCase(),d.hasContent=!bK.test(d.type),t&&f.active++===0&&f.event.trigger("ajaxStart");if(!d.hasContent){d.data&&(d.url+=(bM.test(d.url)?"&":"?")+d.data,delete d.data),k=d.url;if(d.cache===!1){var x=f.now(),y=d.url.replace(bQ,"$1_="+x);d.url=y+(y===d.url?(bM.test(d.url)?"&":"?")+"_="+x:"")}}(d.data&&d.hasContent&&d.contentType!==!1||c.contentType)&&v.setRequestHeader("Content-Type",d.contentType),d.ifModified&&(k=k||d.url,f.lastModified[k]&&v.setRequestHeader("If-Modified-Since",f.lastModified[k]),f.etag[k]&&v.setRequestHeader("If-None-Match",f.etag[k])),v.setRequestHeader("Accept",d.dataTypes[0]&&d.accepts[d.dataTypes[0]]?d.accepts[d.dataTypes[0]]+(d.dataTypes[0]!=="*"?", "+bX+"; q=0.01":""):d.accepts["*"]);for(u in d.headers)v.setRequestHeader(u,d.headers[u]);if(d.beforeSend&&(d.beforeSend.call(e,v,d)===!1||s===2)){v.abort();return!1}for(u in{success:1,error:1,complete:1})v[u](d[u]);p=b$(bU,d,c,v);if(!p)w(-1,"No Transport");else{v.readyState=1,t&&g.trigger("ajaxSend",[v,d]),d.async&&d.timeout>0&&(q=setTimeout(function(){v.abort("timeout")},d.timeout));try{s=1,p.send(l,w)}catch(z){if(s<2)w(-1,z);else throw z}}return v},param:function(a,c){var d=[],e=function(a,b){b=f.isFunction(b)?b():b,d[d.length]=encodeURIComponent(a)+"="+encodeURIComponent(b)};c===b&&(c=f.ajaxSettings.traditional);if(f.isArray(a)||a.jquery&&!f.isPlainObject(a))f.each(a,function(){e(this.name,this.value)});else for(var g in a)ca(g,a[g],c,e);return d.join("&").replace(bD,"+")}}),f.extend({active:0,lastModified:{},etag:{}});var cd=f.now(),ce=/(\=)\?(&|$)|\?\?/i;f.ajaxSetup({jsonp:"callback",jsonpCallback:function(){return f.expando+"_"+cd++}}),f.ajaxPrefilter("json jsonp",function(b,c,d){var e=b.contentType==="application/x-www-form-urlencoded"&&typeof b.data=="string";if(b.dataTypes[0]==="jsonp"||b.jsonp!==!1&&(ce.test(b.url)||e&&ce.test(b.data))){var g,h=b.jsonpCallback=f.isFunction(b.jsonpCallback)?b.jsonpCallback():b.jsonpCallback,i=a[h],j=b.url,k=b.data,l="$1"+h+"$2";b.jsonp!==!1&&(j=j.replace(ce,l),b.url===j&&(e&&(k=k.replace(ce,l)),b.data===k&&(j+=(/\?/.test(j)?"&":"?")+b.jsonp+"="+h))),b.url=j,b.data=k,a[h]=function(a){g=[a]},d.always(function(){a[h]=i,g&&f.isFunction(i)&&a[h](g[0])}),b.converters["script json"]=function(){g||f.error(h+" was not called");return g[0]},b.dataTypes[0]="json";return"script"}}),f.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/javascript|ecmascript/},converters:{"text script":function(a){f.globalEval(a);return a}}}),f.ajaxPrefilter("script",function(a){a.cache===b&&(a.cache=!1),a.crossDomain&&(a.type="GET",a.global=!1)}),f.ajaxTransport("script",function(a){if(a.crossDomain){var d,e=c.head||c.getElementsByTagName("head")[0]||c.documentElement;return{send:function(f,g){d=c.createElement("script"),d.async="async",a.scriptCharset&&(d.charset=a.scriptCharset),d.src=a.url,d.onload=d.onreadystatechange=function(a,c){if(c||!d.readyState||/loaded|complete/.test(d.readyState))d.onload=d.onreadystatechange=null,e&&d.parentNode&&e.removeChild(d),d=b,c||g(200,"success")},e.insertBefore(d,e.firstChild)},abort:function(){d&&d.onload(0,1)}}}});var cf=a.ActiveXObject?function(){for(var a in ch)ch[a](0,1)}:!1,cg=0,ch;f.ajaxSettings.xhr=a.ActiveXObject?function(){return!this.isLocal&&ci()||cj()}:ci,function(a){f.extend(f.support,{ajax:!!a,cors:!!a&&"withCredentials"in a})}(f.ajaxSettings.xhr()),f.support.ajax&&f.ajaxTransport(function(c){if(!c.crossDomain||f.support.cors){var d;return{send:function(e,g){var h=c.xhr(),i,j;c.username?h.open(c.type,c.url,c.async,c.username,c.password):h.open(c.type,c.url,c.async);if(c.xhrFields)for(j in c.xhrFields)h[j]=c.xhrFields[j];c.mimeType&&h.overrideMimeType&&h.overrideMimeType(c.mimeType),!c.crossDomain&&!e["X-Requested-With"]&&(e["X-Requested-With"]="XMLHttpRequest");try{for(j in e)h.setRequestHeader(j,e[j])}catch(k){}h.send(c.hasContent&&c.data||null),d=function(a,e){var j,k,l,m,n;try{if(d&&(e||h.readyState===4)){d=b,i&&(h.onreadystatechange=f.noop,cf&&delete ch[i]);if(e)h.readyState!==4&&h.abort();else{j=h.status,l=h.getAllResponseHeaders(),m={},n=h.responseXML,n&&n.documentElement&&(m.xml=n),m.text=h.responseText;try{k=h.statusText}catch(o){k=""}!j&&c.isLocal&&!c.crossDomain?j=m.text?200:404:j===1223&&(j=204)}}}catch(p){e||g(-1,p)}m&&g(j,k,m,l)},!c.async||h.readyState===4?d():(i=++cg,cf&&(ch||(ch={},f(a).unload(cf)),ch[i]=d),h.onreadystatechange=d)},abort:function(){d&&d(0,1)}}}});var ck={},cl,cm,cn=/^(?:toggle|show|hide)$/,co=/^([+\-]=)?([\d+.\-]+)([a-z%]*)$/i,cp,cq=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]],cr;f.fn.extend({show:function(a,b,c){var d,e;if(a||a===0)return this.animate(cu("show",3),a,b,c);for(var g=0,h=this.length;g<h;g++)d=this[g],d.style&&(e=d.style.display,!f._data(d,"olddisplay")&&e==="none"&&(e=d.style.display=""),e===""&&f.css(d,"display")==="none"&&f._data(d,"olddisplay",cv(d.nodeName)));for(g=0;g<h;g++){d=this[g];if(d.style){e=d.style.display;if(e===""||e==="none")d.style.display=f._data(d,"olddisplay")||""}}return this},hide:function(a,b,c){if(a||a===0)return this.animate(cu("hide",3),a,b,c);var d,e,g=0,h=this.length;for(;g<h;g++)d=this[g],d.style&&(e=f.css(d,"display"),e!=="none"&&!f._data(d,"olddisplay")&&f._data(d,"olddisplay",e));for(g=0;g<h;g++)this[g].style&&(this[g].style.display="none");return this},_toggle:f.fn.toggle,toggle:function(a,b,c){var d=typeof a=="boolean";f.isFunction(a)&&f.isFunction(b)?this._toggle.apply(this,arguments):a==null||d?this.each(function(){var b=d?a:f(this).is(":hidden");f(this)[b?"show":"hide"]()}):this.animate(cu("toggle",3),a,b,c);return this},fadeTo:function(a,b,c,d){return this.filter(":hidden").css("opacity",0).show().end().animate({opacity:b},a,c,d)},animate:function(a,b,c,d){function g(){e.queue===!1&&f._mark(this);var b=f.extend({},e),c=this.nodeType===1,d=c&&f(this).is(":hidden"),g,h,i,j,k,l,m,n,o;b.animatedProperties={};for(i in a){g=f.camelCase(i),i!==g&&(a[g]=a[i],delete a[i]),h=a[g],f.isArray(h)?(b.animatedProperties[g]=h[1],h=a[g]=h[0]):b.animatedProperties[g]=b.specialEasing&&b.specialEasing[g]||b.easing||"swing";if(h==="hide"&&d||h==="show"&&!d)return b.complete.call(this);c&&(g==="height"||g==="width")&&(b.overflow=[this.style.overflow,this.style.overflowX,this.style.overflowY],f.css(this,"display")==="inline"&&f.css(this,"float")==="none"&&(!f.support.inlineBlockNeedsLayout||cv(this.nodeName)==="inline"?this.style.display="inline-block":this.style.zoom=1))}b.overflow!=null&&(this.style.overflow="hidden");for(i in a)j=new f.fx(this,b,i),h=a[i],cn.test(h)?(o=f._data(this,"toggle"+i)||(h==="toggle"?d?"show":"hide":0),o?(f._data(this,"toggle"+i,o==="show"?"hide":"show"),j[o]()):j[h]()):(k=co.exec(h),l=j.cur(),k?(m=parseFloat(k[2]),n=k[3]||(f.cssNumber[i]?"":"px"),n!=="px"&&(f.style(this,i,(m||1)+n),l=(m||1)/j.cur()*l,f.style(this,i,l+n)),k[1]&&(m=(k[1]==="-="?-1:1)*m+l),j.custom(l,m,n)):j.custom(l,h,""));return!0}var e=f.speed(b,c,d);if(f.isEmptyObject(a))return this.each(e.complete,[!1]);a=f.extend({},a);return e.queue===!1?this.each(g):this.queue(e.queue,g)},stop:function(a,c,d){typeof a!="string"&&(d=c,c=a,a=b),c&&a!==!1&&this.queue(a||"fx",[]);return this.each(function(){function h(a,b,c){var e=b[c];f.removeData(a,c,!0),e.stop(d)}var b,c=!1,e=f.timers,g=f._data(this);d||f._unmark(!0,this);if(a==null)for(b in g)g[b]&&g[b].stop&&b.indexOf(".run")===b.length-4&&h(this,g,b);else g[b=a+".run"]&&g[b].stop&&h(this,g,b);for(b=e.length;b--;)e[b].elem===this&&(a==null||e[b].queue===a)&&(d?e[b](!0):e[b].saveState(),c=!0,e.splice(b,1));(!d||!c)&&f.dequeue(this,a)})}}),f.each({slideDown:cu("show",1),slideUp:cu("hide",1),slideToggle:cu("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"},fadeToggle:{opacity:"toggle"}},function(a,b){f.fn[a]=function(a,c,d){return this.animate(b,a,c,d)}}),f.extend({speed:function(a,b,c){var d=a&&typeof a=="object"?f.extend({},a):{complete:c||!c&&b||f.isFunction(a)&&a,duration:a,easing:c&&b||b&&!f.isFunction(b)&&b};d.duration=f.fx.off?0:typeof d.duration=="number"?d.duration:d.duration in f.fx.speeds?f.fx.speeds[d.duration]:f.fx.speeds._default;if(d.queue==null||d.queue===!0)d.queue="fx";d.old=d.complete,d.complete=function(a){f.isFunction(d.old)&&d.old.call(this),d.queue?f.dequeue(this,d.queue):a!==!1&&f._unmark(this)};return d},easing:{linear:function(a,b,c,d){return c+d*a},swing:function(a,b,c,d){return(-Math.cos(a*Math.PI)/2+.5)*d+c}},timers:[],fx:function(a,b,c){this.options=b,this.elem=a,this.prop=c,b.orig=b.orig||{}}}),f.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this),(f.fx.step[this.prop]||f.fx.step._default)(this)},cur:function(){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];var a,b=f.css(this.elem,this.prop);return isNaN(a=parseFloat(b))?!b||b==="auto"?0:b:a},custom:function(a,c,d){function h(a){return e.step(a)}var e=this,g=f.fx;this.startTime=cr||cs(),this.end=c,this.now=this.start=a,this.pos=this.state=0,this.unit=d||this.unit||(f.cssNumber[this.prop]?"":"px"),h.queue=this.options.queue,h.elem=this.elem,h.saveState=function(){e.options.hide&&f._data(e.elem,"fxshow"+e.prop)===b&&f._data(e.elem,"fxshow"+e.prop,e.start)},h()&&f.timers.push(h)&&!cp&&(cp=setInterval(g.tick,g.interval))},show:function(){var a=f._data(this.elem,"fxshow"+this.prop);this.options.orig[this.prop]=a||f.style(this.elem,this.prop),this.options.show=!0,a!==b?this.custom(this.cur(),a):this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur()),f(this.elem).show()},hide:function(){this.options.orig[this.prop]=f._data(this.elem,"fxshow"+this.prop)||f.style(this.elem,this.prop),this.options.hide=!0,this.custom(this.cur(),0)},step:function(a){var b,c,d,e=cr||cs(),g=!0,h=this.elem,i=this.options;if(a||e>=i.duration+this.startTime){this.now=this.end,this.pos=this.state=1,this.update(),i.animatedProperties[this.prop]=!0;for(b in i.animatedProperties)i.animatedProperties[b]!==!0&&(g=!1);if(g){i.overflow!=null&&!f.support.shrinkWrapBlocks&&f.each(["","X","Y"],function(a,b){h.style["overflow"+b]=i.overflow[a]}),i.hide&&f(h).hide();if(i.hide||i.show)for(b in i.animatedProperties)f.style(h,b,i.orig[b]),f.removeData(h,"fxshow"+b,!0),f.removeData(h,"toggle"+b,!0);d=i.complete,d&&(i.complete=!1,d.call(h))}return!1}i.duration==Infinity?this.now=e:(c=e-this.startTime,this.state=c/i.duration,this.pos=f.easing[i.animatedProperties[this.prop]](this.state,c,0,1,i.duration),this.now=this.start+(this.end-this.start)*this.pos),this.update();return!0}},f.extend(f.fx,{tick:function(){var a,b=f.timers,c=0;for(;c<b.length;c++)a=b[c],!a()&&b[c]===a&&b.splice(c--,1);b.length||f.fx.stop()},interval:13,stop:function(){clearInterval(cp),cp=null},speeds:{slow:600,fast:200,_default:400},step:{opacity:function(a){f.style(a.elem,"opacity",a.now)},_default:function(a){a.elem.style&&a.elem.style[a.prop]!=null?a.elem.style[a.prop]=a.now+a.unit:a.elem[a.prop]=a.now}}}),f.each(["width","height"],function(a,b){f.fx.step[b]=function(a){f.style(a.elem,b,Math.max(0,a.now)+a.unit)}}),f.expr&&f.expr.filters&&(f.expr.filters.animated=function(a){return f.grep(f.timers,function(b){return a===b.elem}).length});var cw=/^t(?:able|d|h)$/i,cx=/^(?:body|html)$/i;"getBoundingClientRect"in c.documentElement?f.fn.offset=function(a){var b=this[0],c;if(a)return this.each(function(b){f.offset.setOffset(this,a,b)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return f.offset.bodyOffset(b);try{c=b.getBoundingClientRect()}catch(d){}var e=b.ownerDocument,g=e.documentElement;if(!c||!f.contains(g,b))return c?{top:c.top,left:c.left}:{top:0,left:0};var h=e.body,i=cy(e),j=g.clientTop||h.clientTop||0,k=g.clientLeft||h.clientLeft||0,l=i.pageYOffset||f.support.boxModel&&g.scrollTop||h.scrollTop,m=i.pageXOffset||f.support.boxModel&&g.scrollLeft||h.scrollLeft,n=c.top+l-j,o=c.left+m-k;return{top:n,left:o}}:f.fn.offset=function(a){var b=this[0];if(a)return this.each(function(b){f.offset.setOffset(this,a,b)});if(!b||!b.ownerDocument)return null;if(b===b.ownerDocument.body)return f.offset.bodyOffset(b);var c,d=b.offsetParent,e=b,g=b.ownerDocument,h=g.documentElement,i=g.body,j=g.defaultView,k=j?j.getComputedStyle(b,null):b.currentStyle,l=b.offsetTop,m=b.offsetLeft;while((b=b.parentNode)&&b!==i&&b!==h){if(f.support.fixedPosition&&k.position==="fixed")break;c=j?j.getComputedStyle(b,null):b.currentStyle,l-=b.scrollTop,m-=b.scrollLeft,b===d&&(l+=b.offsetTop,m+=b.offsetLeft,f.support.doesNotAddBorder&&(!f.support.doesAddBorderForTableAndCells||!cw.test(b.nodeName))&&(l+=parseFloat(c.borderTopWidth)||0,m+=parseFloat(c.borderLeftWidth)||0),e=d,d=b.offsetParent),f.support.subtractsBorderForOverflowNotVisible&&c.overflow!=="visible"&&(l+=parseFloat(c.borderTopWidth)||0,m+=parseFloat(c.borderLeftWidth)||0),k=c}if(k.position==="relative"||k.position==="static")l+=i.offsetTop,m+=i.offsetLeft;f.support.fixedPosition&&k.position==="fixed"&&(l+=Math.max(h.scrollTop,i.scrollTop),m+=Math.max(h.scrollLeft,i.scrollLeft));return{top:l,left:m}},f.offset={bodyOffset:function(a){var b=a.offsetTop,c=a.offsetLeft;f.support.doesNotIncludeMarginInBodyOffset&&(b+=parseFloat(f.css(a,"marginTop"))||0,c+=parseFloat(f.css(a,"marginLeft"))||0);return{top:b,left:c}},setOffset:function(a,b,c){var d=f.css(a,"position");d==="static"&&(a.style.position="relative");var e=f(a),g=e.offset(),h=f.css(a,"top"),i=f.css(a,"left"),j=(d==="absolute"||d==="fixed")&&f.inArray("auto",[h,i])>-1,k={},l={},m,n;j?(l=e.position(),m=l.top,n=l.left):(m=parseFloat(h)||0,n=parseFloat(i)||0),f.isFunction(b)&&(b=b.call(a,c,g)),b.top!=null&&(k.top=b.top-g.top+m),b.left!=null&&(k.left=b.left-g.left+n),"using"in b?b.using.call(a,k):e.css(k)}},f.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),c=this.offset(),d=cx.test(b[0].nodeName)?{top:0,left:0}:b.offset();c.top-=parseFloat(f.css(a,"marginTop"))||0,c.left-=parseFloat(f.css(a,"marginLeft"))||0,d.top+=parseFloat(f.css(b[0],"borderTopWidth"))||0,d.left+=parseFloat(f.css(b[0],"borderLeftWidth"))||0;return{top:c.top-d.top,left:c.left-d.left}},offsetParent:function(){return this.map(function(){var a=this.offsetParent||c.body;while(a&&!cx.test(a.nodeName)&&f.css(a,"position")==="static")a=a.offsetParent;return a})}}),f.each(["Left","Top"],function(a,c){var d="scroll"+c;f.fn[d]=function(c){var e,g;if(c===b){e=this[0];if(!e)return null;g=cy(e);return g?"pageXOffset"in g?g[a?"pageYOffset":"pageXOffset"]:f.support.boxModel&&g.document.documentElement[d]||g.document.body[d]:e[d]}return this.each(function(){g=cy(this),g?g.scrollTo(a?f(g).scrollLeft():c,a?c:f(g).scrollTop()):this[d]=c})}}),f.each(["Height","Width"],function(a,c){var d=c.toLowerCase();f.fn["inner"+c]=function(){var a=this[0];return a?a.style?parseFloat(f.css(a,d,"padding")):this[d]():null},f.fn["outer"+c]=function(a){var b=this[0];return b?b.style?parseFloat(f.css(b,d,a?"margin":"border")):this[d]():null},f.fn[d]=function(a){var e=this[0];if(!e)return a==null?null:this;if(f.isFunction(a))return this.each(function(b){var c=f(this);c[d](a.call(this,b,c[d]()))});if(f.isWindow(e)){var g=e.document.documentElement["client"+c],h=e.document.body;return e.document.compatMode==="CSS1Compat"&&g||h&&h["client"+c]||g}if(e.nodeType===9)return Math.max(e.documentElement["client"+c],e.body["scroll"+c],e.documentElement["scroll"+c],e.body["offset"+c],e.documentElement["offset"+c]);if(a===b){var i=f.css(e,d),j=parseFloat(i);return f.isNumeric(j)?j:i}return this.css(d,typeof a=="string"?a:a+"px")}}),a.jQuery=a.$=f,typeof define=="function"&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return f})})(window);
\ No newline at end of file
Binary file DVN-web/installer/dvninstall/doc/guides/_static/logo.png has changed
Binary file DVN-web/installer/dvninstall/doc/guides/_static/minus.png has changed
Binary file DVN-web/installer/dvninstall/doc/guides/_static/plus.png has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/_static/pygments.css	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,62 @@
+.highlight .hll { background-color: #ffffcc }
+.highlight  { background: #eeffcc; }
+.highlight .c { color: #408090; font-style: italic } /* Comment */
+.highlight .err { border: 1px solid #FF0000 } /* Error */
+.highlight .k { color: #007020; font-weight: bold } /* Keyword */
+.highlight .o { color: #666666 } /* Operator */
+.highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */
+.highlight .cp { color: #007020 } /* Comment.Preproc */
+.highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */
+.highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */
+.highlight .gd { color: #A00000 } /* Generic.Deleted */
+.highlight .ge { font-style: italic } /* Generic.Emph */
+.highlight .gr { color: #FF0000 } /* Generic.Error */
+.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */
+.highlight .gi { color: #00A000 } /* Generic.Inserted */
+.highlight .go { color: #333333 } /* Generic.Output */
+.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */
+.highlight .gs { font-weight: bold } /* Generic.Strong */
+.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
+.highlight .gt { color: #0044DD } /* Generic.Traceback */
+.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */
+.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */
+.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */
+.highlight .kp { color: #007020 } /* Keyword.Pseudo */
+.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */
+.highlight .kt { color: #902000 } /* Keyword.Type */
+.highlight .m { color: #208050 } /* Literal.Number */
+.highlight .s { color: #4070a0 } /* Literal.String */
+.highlight .na { color: #4070a0 } /* Name.Attribute */
+.highlight .nb { color: #007020 } /* Name.Builtin */
+.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */
+.highlight .no { color: #60add5 } /* Name.Constant */
+.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */
+.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */
+.highlight .ne { color: #007020 } /* Name.Exception */
+.highlight .nf { color: #06287e } /* Name.Function */
+.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */
+.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */
+.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */
+.highlight .nv { color: #bb60d5 } /* Name.Variable */
+.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */
+.highlight .w { color: #bbbbbb } /* Text.Whitespace */
+.highlight .mf { color: #208050 } /* Literal.Number.Float */
+.highlight .mh { color: #208050 } /* Literal.Number.Hex */
+.highlight .mi { color: #208050 } /* Literal.Number.Integer */
+.highlight .mo { color: #208050 } /* Literal.Number.Oct */
+.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */
+.highlight .sc { color: #4070a0 } /* Literal.String.Char */
+.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */
+.highlight .s2 { color: #4070a0 } /* Literal.String.Double */
+.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */
+.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */
+.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */
+.highlight .sx { color: #c65d09 } /* Literal.String.Other */
+.highlight .sr { color: #235388 } /* Literal.String.Regex */
+.highlight .s1 { color: #4070a0 } /* Literal.String.Single */
+.highlight .ss { color: #517918 } /* Literal.String.Symbol */
+.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */
+.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */
+.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */
+.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */
+.highlight .il { color: #208050 } /* Literal.Number.Integer.Long */
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/_static/searchtools.js	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,622 @@
+/*
+ * searchtools.js_t
+ * ~~~~~~~~~~~~~~~~
+ *
+ * Sphinx JavaScript utilties for the full-text search.
+ *
+ * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+
+/**
+ * Porter Stemmer
+ */
+var Stemmer = function() {
+
+  var step2list = {
+    ational: 'ate',
+    tional: 'tion',
+    enci: 'ence',
+    anci: 'ance',
+    izer: 'ize',
+    bli: 'ble',
+    alli: 'al',
+    entli: 'ent',
+    eli: 'e',
+    ousli: 'ous',
+    ization: 'ize',
+    ation: 'ate',
+    ator: 'ate',
+    alism: 'al',
+    iveness: 'ive',
+    fulness: 'ful',
+    ousness: 'ous',
+    aliti: 'al',
+    iviti: 'ive',
+    biliti: 'ble',
+    logi: 'log'
+  };
+
+  var step3list = {
+    icate: 'ic',
+    ative: '',
+    alize: 'al',
+    iciti: 'ic',
+    ical: 'ic',
+    ful: '',
+    ness: ''
+  };
+
+  var c = "[^aeiou]";          // consonant
+  var v = "[aeiouy]";          // vowel
+  var C = c + "[^aeiouy]*";    // consonant sequence
+  var V = v + "[aeiou]*";      // vowel sequence
+
+  var mgr0 = "^(" + C + ")?" + V + C;                      // [C]VC... is m>0
+  var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$";    // [C]VC[V] is m=1
+  var mgr1 = "^(" + C + ")?" + V + C + V + C;              // [C]VCVC... is m>1
+  var s_v   = "^(" + C + ")?" + v;                         // vowel in stem
+
+  this.stemWord = function (w) {
+    var stem;
+    var suffix;
+    var firstch;
+    var origword = w;
+
+    if (w.length < 3)
+      return w;
+
+    var re;
+    var re2;
+    var re3;
+    var re4;
+
+    firstch = w.substr(0,1);
+    if (firstch == "y")
+      w = firstch.toUpperCase() + w.substr(1);
+
+    // Step 1a
+    re = /^(.+?)(ss|i)es$/;
+    re2 = /^(.+?)([^s])s$/;
+
+    if (re.test(w))
+      w = w.replace(re,"$1$2");
+    else if (re2.test(w))
+      w = w.replace(re2,"$1$2");
+
+    // Step 1b
+    re = /^(.+?)eed$/;
+    re2 = /^(.+?)(ed|ing)$/;
+    if (re.test(w)) {
+      var fp = re.exec(w);
+      re = new RegExp(mgr0);
+      if (re.test(fp[1])) {
+        re = /.$/;
+        w = w.replace(re,"");
+      }
+    }
+    else if (re2.test(w)) {
+      var fp = re2.exec(w);
+      stem = fp[1];
+      re2 = new RegExp(s_v);
+      if (re2.test(stem)) {
+        w = stem;
+        re2 = /(at|bl|iz)$/;
+        re3 = new RegExp("([^aeiouylsz])\\1$");
+        re4 = new RegExp("^" + C + v + "[^aeiouwxy]$");
+        if (re2.test(w))
+          w = w + "e";
+        else if (re3.test(w)) {
+          re = /.$/;
+          w = w.replace(re,"");
+        }
+        else if (re4.test(w))
+          w = w + "e";
+      }
+    }
+
+    // Step 1c
+    re = /^(.+?)y$/;
+    if (re.test(w)) {
+      var fp = re.exec(w);
+      stem = fp[1];
+      re = new RegExp(s_v);
+      if (re.test(stem))
+        w = stem + "i";
+    }
+
+    // Step 2
+    re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;
+    if (re.test(w)) {
+      var fp = re.exec(w);
+      stem = fp[1];
+      suffix = fp[2];
+      re = new RegExp(mgr0);
+      if (re.test(stem))
+        w = stem + step2list[suffix];
+    }
+
+    // Step 3
+    re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;
+    if (re.test(w)) {
+      var fp = re.exec(w);
+      stem = fp[1];
+      suffix = fp[2];
+      re = new RegExp(mgr0);
+      if (re.test(stem))
+        w = stem + step3list[suffix];
+    }
+
+    // Step 4
+    re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/;
+    re2 = /^(.+?)(s|t)(ion)$/;
+    if (re.test(w)) {
+      var fp = re.exec(w);
+      stem = fp[1];
+      re = new RegExp(mgr1);
+      if (re.test(stem))
+        w = stem;
+    }
+    else if (re2.test(w)) {
+      var fp = re2.exec(w);
+      stem = fp[1] + fp[2];
+      re2 = new RegExp(mgr1);
+      if (re2.test(stem))
+        w = stem;
+    }
+
+    // Step 5
+    re = /^(.+?)e$/;
+    if (re.test(w)) {
+      var fp = re.exec(w);
+      stem = fp[1];
+      re = new RegExp(mgr1);
+      re2 = new RegExp(meq1);
+      re3 = new RegExp("^" + C + v + "[^aeiouwxy]$");
+      if (re.test(stem) || (re2.test(stem) && !(re3.test(stem))))
+        w = stem;
+    }
+    re = /ll$/;
+    re2 = new RegExp(mgr1);
+    if (re.test(w) && re2.test(w)) {
+      re = /.$/;
+      w = w.replace(re,"");
+    }
+
+    // and turn initial Y back to y
+    if (firstch == "y")
+      w = firstch.toLowerCase() + w.substr(1);
+    return w;
+  }
+}
+
+
+
+/**
+ * Simple result scoring code.
+ */
+var Scorer = {
+  // Implement the following function to further tweak the score for each result
+  // The function takes a result array [filename, title, anchor, descr, score]
+  // and returns the new score.
+  /*
+  score: function(result) {
+    return result[4];
+  },
+  */
+
+  // query matches the full name of an object
+  objNameMatch: 11,
+  // or matches in the last dotted part of the object name
+  objPartialMatch: 6,
+  // Additive scores depending on the priority of the object
+  objPrio: {0:  15,   // used to be importantResults
+            1:  5,   // used to be objectResults
+            2: -5},  // used to be unimportantResults
+  //  Used when the priority is not in the mapping.
+  objPrioDefault: 0,
+
+  // query found in title
+  title: 15,
+  // query found in terms
+  term: 5
+};
+
+
+/**
+ * Search Module
+ */
+var Search = {
+
+  _index : null,
+  _queued_query : null,
+  _pulse_status : -1,
+
+  init : function() {
+      var params = $.getQueryParameters();
+      if (params.q) {
+          var query = params.q[0];
+          $('input[name="q"]')[0].value = query;
+          this.performSearch(query);
+      }
+  },
+
+  loadIndex : function(url) {
+    $.ajax({type: "GET", url: url, data: null,
+            dataType: "script", cache: true,
+            complete: function(jqxhr, textstatus) {
+              if (textstatus != "success") {
+                document.getElementById("searchindexloader").src = url;
+              }
+            }});
+  },
+
+  setIndex : function(index) {
+    var q;
+    this._index = index;
+    if ((q = this._queued_query) !== null) {
+      this._queued_query = null;
+      Search.query(q);
+    }
+  },
+
+  hasIndex : function() {
+      return this._index !== null;
+  },
+
+  deferQuery : function(query) {
+      this._queued_query = query;
+  },
+
+  stopPulse : function() {
+      this._pulse_status = 0;
+  },
+
+  startPulse : function() {
+    if (this._pulse_status >= 0)
+        return;
+    function pulse() {
+      var i;
+      Search._pulse_status = (Search._pulse_status + 1) % 4;
+      var dotString = '';
+      for (i = 0; i < Search._pulse_status; i++)
+        dotString += '.';
+      Search.dots.text(dotString);
+      if (Search._pulse_status > -1)
+        window.setTimeout(pulse, 500);
+    }
+    pulse();
+  },
+
+  /**
+   * perform a search for something (or wait until index is loaded)
+   */
+  performSearch : function(query) {
+    // create the required interface elements
+    this.out = $('#search-results');
+    this.title = $('<h2>' + _('Searching') + '</h2>').appendTo(this.out);
+    this.dots = $('<span></span>').appendTo(this.title);
+    this.status = $('<p style="display: none"></p>').appendTo(this.out);
+    this.output = $('<ul class="search"/>').appendTo(this.out);
+
+    $('#search-progress').text(_('Preparing search...'));
+    this.startPulse();
+
+    // index already loaded, the browser was quick!
+    if (this.hasIndex())
+      this.query(query);
+    else
+      this.deferQuery(query);
+  },
+
+  /**
+   * execute search (requires search index to be loaded)
+   */
+  query : function(query) {
+    var i;
+    var stopwords = ["a","and","are","as","at","be","but","by","for","if","in","into","is","it","near","no","not","of","on","or","such","that","the","their","then","there","these","they","this","to","was","will","with"];
+
+    // stem the searchterms and add them to the correct list
+    var stemmer = new Stemmer();
+    var searchterms = [];
+    var excluded = [];
+    var hlterms = [];
+    var tmp = query.split(/\s+/);
+    var objectterms = [];
+    for (i = 0; i < tmp.length; i++) {
+      if (tmp[i] !== "") {
+          objectterms.push(tmp[i].toLowerCase());
+      }
+
+      if ($u.indexOf(stopwords, tmp[i]) != -1 || tmp[i].match(/^\d+$/) ||
+          tmp[i] === "") {
+        // skip this "word"
+        continue;
+      }
+      // stem the word
+      var word = stemmer.stemWord(tmp[i]).toLowerCase();
+      var toAppend;
+      // select the correct list
+      if (word[0] == '-') {
+        toAppend = excluded;
+        word = word.substr(1);
+      }
+      else {
+        toAppend = searchterms;
+        hlterms.push(tmp[i].toLowerCase());
+      }
+      // only add if not already in the list
+      if (!$u.contains(toAppend, word))
+        toAppend.push(word);
+    }
+    var highlightstring = '?highlight=' + $.urlencode(hlterms.join(" "));
+
+    // console.debug('SEARCH: searching for:');
+    // console.info('required: ', searchterms);
+    // console.info('excluded: ', excluded);
+
+    // prepare search
+    var terms = this._index.terms;
+    var titleterms = this._index.titleterms;
+
+    // array of [filename, title, anchor, descr, score]
+    var results = [];
+    $('#search-progress').empty();
+
+    // lookup as object
+    for (i = 0; i < objectterms.length; i++) {
+      var others = [].concat(objectterms.slice(0, i),
+                             objectterms.slice(i+1, objectterms.length));
+      results = results.concat(this.performObjectSearch(objectterms[i], others));
+    }
+
+    // lookup as search terms in fulltext
+    results = results.concat(this.performTermsSearch(searchterms, excluded, terms, Scorer.term))
+                     .concat(this.performTermsSearch(searchterms, excluded, titleterms, Scorer.title));
+
+    // let the scorer override scores with a custom scoring function
+    if (Scorer.score) {
+      for (i = 0; i < results.length; i++)
+        results[i][4] = Scorer.score(results[i]);
+    }
+
+    // now sort the results by score (in opposite order of appearance, since the
+    // display function below uses pop() to retrieve items) and then
+    // alphabetically
+    results.sort(function(a, b) {
+      var left = a[4];
+      var right = b[4];
+      if (left > right) {
+        return 1;
+      } else if (left < right) {
+        return -1;
+      } else {
+        // same score: sort alphabetically
+        left = a[1].toLowerCase();
+        right = b[1].toLowerCase();
+        return (left > right) ? -1 : ((left < right) ? 1 : 0);
+      }
+    });
+
+    // for debugging
+    //Search.lastresults = results.slice();  // a copy
+    //console.info('search results:', Search.lastresults);
+
+    // print the results
+    var resultCount = results.length;
+    function displayNextItem() {
+      // results left, load the summary and display it
+      if (results.length) {
+        var item = results.pop();
+        var listItem = $('<li style="display:none"></li>');
+        if (DOCUMENTATION_OPTIONS.FILE_SUFFIX === '') {
+          // dirhtml builder
+          var dirname = item[0] + '/';
+          if (dirname.match(/\/index\/$/)) {
+            dirname = dirname.substring(0, dirname.length-6);
+          } else if (dirname == 'index/') {
+            dirname = '';
+          }
+          listItem.append($('<a/>').attr('href',
+            DOCUMENTATION_OPTIONS.URL_ROOT + dirname +
+            highlightstring + item[2]).html(item[1]));
+        } else {
+          // normal html builders
+          listItem.append($('<a/>').attr('href',
+            item[0] + DOCUMENTATION_OPTIONS.FILE_SUFFIX +
+            highlightstring + item[2]).html(item[1]));
+        }
+        if (item[3]) {
+          listItem.append($('<span> (' + item[3] + ')</span>'));
+          Search.output.append(listItem);
+          listItem.slideDown(5, function() {
+            displayNextItem();
+          });
+        } else if (DOCUMENTATION_OPTIONS.HAS_SOURCE) {
+          $.ajax({url: DOCUMENTATION_OPTIONS.URL_ROOT + '_sources/' + item[0] + '.txt',
+                  dataType: "text",
+                  complete: function(jqxhr, textstatus) {
+                    var data = jqxhr.responseText;
+                    if (data !== '') {
+                      listItem.append(Search.makeSearchSummary(data, searchterms, hlterms));
+                    }
+                    Search.output.append(listItem);
+                    listItem.slideDown(5, function() {
+                      displayNextItem();
+                    });
+                  }});
+        } else {
+          // no source available, just display title
+          Search.output.append(listItem);
+          listItem.slideDown(5, function() {
+            displayNextItem();
+          });
+        }
+      }
+      // search finished, update title and status message
+      else {
+        Search.stopPulse();
+        Search.title.text(_('Search Results'));
+        if (!resultCount)
+          Search.status.text(_('Your search did not match any documents. Please make sure that all words are spelled correctly and that you\'ve selected enough categories.'));
+        else
+            Search.status.text(_('Search finished, found %s page(s) matching the search query.').replace('%s', resultCount));
+        Search.status.fadeIn(500);
+      }
+    }
+    displayNextItem();
+  },
+
+  /**
+   * search for object names
+   */
+  performObjectSearch : function(object, otherterms) {
+    var filenames = this._index.filenames;
+    var objects = this._index.objects;
+    var objnames = this._index.objnames;
+    var titles = this._index.titles;
+
+    var i;
+    var results = [];
+
+    for (var prefix in objects) {
+      for (var name in objects[prefix]) {
+        var fullname = (prefix ? prefix + '.' : '') + name;
+        if (fullname.toLowerCase().indexOf(object) > -1) {
+          var score = 0;
+          var parts = fullname.split('.');
+          // check for different match types: exact matches of full name or
+          // "last name" (i.e. last dotted part)
+          if (fullname == object || parts[parts.length - 1] == object) {
+            score += Scorer.objNameMatch;
+          // matches in last name
+          } else if (parts[parts.length - 1].indexOf(object) > -1) {
+            score += Scorer.objPartialMatch;
+          }
+          var match = objects[prefix][name];
+          var objname = objnames[match[1]][2];
+          var title = titles[match[0]];
+          // If more than one term searched for, we require other words to be
+          // found in the name/title/description
+          if (otherterms.length > 0) {
+            var haystack = (prefix + ' ' + name + ' ' +
+                            objname + ' ' + title).toLowerCase();
+            var allfound = true;
+            for (i = 0; i < otherterms.length; i++) {
+              if (haystack.indexOf(otherterms[i]) == -1) {
+                allfound = false;
+                break;
+              }
+            }
+            if (!allfound) {
+              continue;
+            }
+          }
+          var descr = objname + _(', in ') + title;
+
+          var anchor = match[3];
+          if (anchor === '')
+            anchor = fullname;
+          else if (anchor == '-')
+            anchor = objnames[match[1]][1] + '-' + fullname;
+          // add custom score for some objects according to scorer
+          if (Scorer.objPrio.hasOwnProperty(match[2])) {
+            score += Scorer.objPrio[match[2]];
+          } else {
+            score += Scorer.objPrioDefault;
+          }
+          results.push([filenames[match[0]], fullname, '#'+anchor, descr, score]);
+        }
+      }
+    }
+
+    return results;
+  },
+
+  /**
+   * search for full-text terms in the index
+   */
+  performTermsSearch : function(searchterms, excluded, terms, score) {
+    var filenames = this._index.filenames;
+    var titles = this._index.titles;
+
+    var i, j, file, files;
+    var fileMap = {};
+    var results = [];
+
+    // perform the search on the required terms
+    for (i = 0; i < searchterms.length; i++) {
+      var word = searchterms[i];
+      // no match but word was a required one
+      if (!(files = terms[word]))
+        break;
+      if (files.length === undefined) {
+        files = [files];
+      }
+      // create the mapping
+      for (j = 0; j < files.length; j++) {
+        file = files[j];
+        if (file in fileMap)
+          fileMap[file].push(word);
+        else
+          fileMap[file] = [word];
+      }
+    }
+
+    // now check if the files don't contain excluded terms
+    for (file in fileMap) {
+      var valid = true;
+
+      // check if all requirements are matched
+      if (fileMap[file].length != searchterms.length)
+          continue;
+
+      // ensure that none of the excluded terms is in the search result
+      for (i = 0; i < excluded.length; i++) {
+        if (terms[excluded[i]] == file ||
+          $u.contains(terms[excluded[i]] || [], file)) {
+          valid = false;
+          break;
+        }
+      }
+
+      // if we have still a valid result we can add it to the result list
+      if (valid) {
+        results.push([filenames[file], titles[file], '', null, score]);
+      }
+    }
+    return results;
+  },
+
+  /**
+   * helper function to return a node containing the
+   * search summary for a given text. keywords is a list
+   * of stemmed words, hlwords is the list of normal, unstemmed
+   * words. the first one is used to find the occurance, the
+   * latter for highlighting it.
+   */
+  makeSearchSummary : function(text, keywords, hlwords) {
+    var textLower = text.toLowerCase();
+    var start = 0;
+    $.each(keywords, function() {
+      var i = textLower.indexOf(this.toLowerCase());
+      if (i > -1)
+        start = i;
+    });
+    start = Math.max(start - 120, 0);
+    var excerpt = ((start > 0) ? '...' : '') +
+      $.trim(text.substr(start, 240)) +
+      ((start + 240 - text.length) ? '...' : '');
+    var rv = $('<div class="context"></div>').text(excerpt);
+    $.each(hlwords, function() {
+      rv = rv.highlightText(this, 'highlighted');
+    });
+    return rv;
+  }
+};
+
+$(document).ready(function() {
+  Search.init();
+});
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/_static/underscore.js	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,31 @@
+// Underscore.js 1.3.1
+// (c) 2009-2012 Jeremy Ashkenas, DocumentCloud Inc.
+// Underscore is freely distributable under the MIT license.
+// Portions of Underscore are inspired or borrowed from Prototype,
+// Oliver Steele's Functional, and John Resig's Micro-Templating.
+// For all details and documentation:
+// http://documentcloud.github.com/underscore
+(function(){function q(a,c,d){if(a===c)return a!==0||1/a==1/c;if(a==null||c==null)return a===c;if(a._chain)a=a._wrapped;if(c._chain)c=c._wrapped;if(a.isEqual&&b.isFunction(a.isEqual))return a.isEqual(c);if(c.isEqual&&b.isFunction(c.isEqual))return c.isEqual(a);var e=l.call(a);if(e!=l.call(c))return false;switch(e){case "[object String]":return a==String(c);case "[object Number]":return a!=+a?c!=+c:a==0?1/a==1/c:a==+c;case "[object Date]":case "[object Boolean]":return+a==+c;case "[object RegExp]":return a.source==
+c.source&&a.global==c.global&&a.multiline==c.multiline&&a.ignoreCase==c.ignoreCase}if(typeof a!="object"||typeof c!="object")return false;for(var f=d.length;f--;)if(d[f]==a)return true;d.push(a);var f=0,g=true;if(e=="[object Array]"){if(f=a.length,g=f==c.length)for(;f--;)if(!(g=f in a==f in c&&q(a[f],c[f],d)))break}else{if("constructor"in a!="constructor"in c||a.constructor!=c.constructor)return false;for(var h in a)if(b.has(a,h)&&(f++,!(g=b.has(c,h)&&q(a[h],c[h],d))))break;if(g){for(h in c)if(b.has(c,
+h)&&!f--)break;g=!f}}d.pop();return g}var r=this,G=r._,n={},k=Array.prototype,o=Object.prototype,i=k.slice,H=k.unshift,l=o.toString,I=o.hasOwnProperty,w=k.forEach,x=k.map,y=k.reduce,z=k.reduceRight,A=k.filter,B=k.every,C=k.some,p=k.indexOf,D=k.lastIndexOf,o=Array.isArray,J=Object.keys,s=Function.prototype.bind,b=function(a){return new m(a)};if(typeof exports!=="undefined"){if(typeof module!=="undefined"&&module.exports)exports=module.exports=b;exports._=b}else r._=b;b.VERSION="1.3.1";var j=b.each=
+b.forEach=function(a,c,d){if(a!=null)if(w&&a.forEach===w)a.forEach(c,d);else if(a.length===+a.length)for(var e=0,f=a.length;e<f;e++){if(e in a&&c.call(d,a[e],e,a)===n)break}else for(e in a)if(b.has(a,e)&&c.call(d,a[e],e,a)===n)break};b.map=b.collect=function(a,c,b){var e=[];if(a==null)return e;if(x&&a.map===x)return a.map(c,b);j(a,function(a,g,h){e[e.length]=c.call(b,a,g,h)});if(a.length===+a.length)e.length=a.length;return e};b.reduce=b.foldl=b.inject=function(a,c,d,e){var f=arguments.length>2;a==
+null&&(a=[]);if(y&&a.reduce===y)return e&&(c=b.bind(c,e)),f?a.reduce(c,d):a.reduce(c);j(a,function(a,b,i){f?d=c.call(e,d,a,b,i):(d=a,f=true)});if(!f)throw new TypeError("Reduce of empty array with no initial value");return d};b.reduceRight=b.foldr=function(a,c,d,e){var f=arguments.length>2;a==null&&(a=[]);if(z&&a.reduceRight===z)return e&&(c=b.bind(c,e)),f?a.reduceRight(c,d):a.reduceRight(c);var g=b.toArray(a).reverse();e&&!f&&(c=b.bind(c,e));return f?b.reduce(g,c,d,e):b.reduce(g,c)};b.find=b.detect=
+function(a,c,b){var e;E(a,function(a,g,h){if(c.call(b,a,g,h))return e=a,true});return e};b.filter=b.select=function(a,c,b){var e=[];if(a==null)return e;if(A&&a.filter===A)return a.filter(c,b);j(a,function(a,g,h){c.call(b,a,g,h)&&(e[e.length]=a)});return e};b.reject=function(a,c,b){var e=[];if(a==null)return e;j(a,function(a,g,h){c.call(b,a,g,h)||(e[e.length]=a)});return e};b.every=b.all=function(a,c,b){var e=true;if(a==null)return e;if(B&&a.every===B)return a.every(c,b);j(a,function(a,g,h){if(!(e=
+e&&c.call(b,a,g,h)))return n});return e};var E=b.some=b.any=function(a,c,d){c||(c=b.identity);var e=false;if(a==null)return e;if(C&&a.some===C)return a.some(c,d);j(a,function(a,b,h){if(e||(e=c.call(d,a,b,h)))return n});return!!e};b.include=b.contains=function(a,c){var b=false;if(a==null)return b;return p&&a.indexOf===p?a.indexOf(c)!=-1:b=E(a,function(a){return a===c})};b.invoke=function(a,c){var d=i.call(arguments,2);return b.map(a,function(a){return(b.isFunction(c)?c||a:a[c]).apply(a,d)})};b.pluck=
+function(a,c){return b.map(a,function(a){return a[c]})};b.max=function(a,c,d){if(!c&&b.isArray(a))return Math.max.apply(Math,a);if(!c&&b.isEmpty(a))return-Infinity;var e={computed:-Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;b>=e.computed&&(e={value:a,computed:b})});return e.value};b.min=function(a,c,d){if(!c&&b.isArray(a))return Math.min.apply(Math,a);if(!c&&b.isEmpty(a))return Infinity;var e={computed:Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;b<e.computed&&(e={value:a,computed:b})});
+return e.value};b.shuffle=function(a){var b=[],d;j(a,function(a,f){f==0?b[0]=a:(d=Math.floor(Math.random()*(f+1)),b[f]=b[d],b[d]=a)});return b};b.sortBy=function(a,c,d){return b.pluck(b.map(a,function(a,b,g){return{value:a,criteria:c.call(d,a,b,g)}}).sort(function(a,b){var c=a.criteria,d=b.criteria;return c<d?-1:c>d?1:0}),"value")};b.groupBy=function(a,c){var d={},e=b.isFunction(c)?c:function(a){return a[c]};j(a,function(a,b){var c=e(a,b);(d[c]||(d[c]=[])).push(a)});return d};b.sortedIndex=function(a,
+c,d){d||(d=b.identity);for(var e=0,f=a.length;e<f;){var g=e+f>>1;d(a[g])<d(c)?e=g+1:f=g}return e};b.toArray=function(a){return!a?[]:a.toArray?a.toArray():b.isArray(a)?i.call(a):b.isArguments(a)?i.call(a):b.values(a)};b.size=function(a){return b.toArray(a).length};b.first=b.head=function(a,b,d){return b!=null&&!d?i.call(a,0,b):a[0]};b.initial=function(a,b,d){return i.call(a,0,a.length-(b==null||d?1:b))};b.last=function(a,b,d){return b!=null&&!d?i.call(a,Math.max(a.length-b,0)):a[a.length-1]};b.rest=
+b.tail=function(a,b,d){return i.call(a,b==null||d?1:b)};b.compact=function(a){return b.filter(a,function(a){return!!a})};b.flatten=function(a,c){return b.reduce(a,function(a,e){if(b.isArray(e))return a.concat(c?e:b.flatten(e));a[a.length]=e;return a},[])};b.without=function(a){return b.difference(a,i.call(arguments,1))};b.uniq=b.unique=function(a,c,d){var d=d?b.map(a,d):a,e=[];b.reduce(d,function(d,g,h){if(0==h||(c===true?b.last(d)!=g:!b.include(d,g)))d[d.length]=g,e[e.length]=a[h];return d},[]);
+return e};b.union=function(){return b.uniq(b.flatten(arguments,true))};b.intersection=b.intersect=function(a){var c=i.call(arguments,1);return b.filter(b.uniq(a),function(a){return b.every(c,function(c){return b.indexOf(c,a)>=0})})};b.difference=function(a){var c=b.flatten(i.call(arguments,1));return b.filter(a,function(a){return!b.include(c,a)})};b.zip=function(){for(var a=i.call(arguments),c=b.max(b.pluck(a,"length")),d=Array(c),e=0;e<c;e++)d[e]=b.pluck(a,""+e);return d};b.indexOf=function(a,c,
+d){if(a==null)return-1;var e;if(d)return d=b.sortedIndex(a,c),a[d]===c?d:-1;if(p&&a.indexOf===p)return a.indexOf(c);for(d=0,e=a.length;d<e;d++)if(d in a&&a[d]===c)return d;return-1};b.lastIndexOf=function(a,b){if(a==null)return-1;if(D&&a.lastIndexOf===D)return a.lastIndexOf(b);for(var d=a.length;d--;)if(d in a&&a[d]===b)return d;return-1};b.range=function(a,b,d){arguments.length<=1&&(b=a||0,a=0);for(var d=arguments[2]||1,e=Math.max(Math.ceil((b-a)/d),0),f=0,g=Array(e);f<e;)g[f++]=a,a+=d;return g};
+var F=function(){};b.bind=function(a,c){var d,e;if(a.bind===s&&s)return s.apply(a,i.call(arguments,1));if(!b.isFunction(a))throw new TypeError;e=i.call(arguments,2);return d=function(){if(!(this instanceof d))return a.apply(c,e.concat(i.call(arguments)));F.prototype=a.prototype;var b=new F,g=a.apply(b,e.concat(i.call(arguments)));return Object(g)===g?g:b}};b.bindAll=function(a){var c=i.call(arguments,1);c.length==0&&(c=b.functions(a));j(c,function(c){a[c]=b.bind(a[c],a)});return a};b.memoize=function(a,
+c){var d={};c||(c=b.identity);return function(){var e=c.apply(this,arguments);return b.has(d,e)?d[e]:d[e]=a.apply(this,arguments)}};b.delay=function(a,b){var d=i.call(arguments,2);return setTimeout(function(){return a.apply(a,d)},b)};b.defer=function(a){return b.delay.apply(b,[a,1].concat(i.call(arguments,1)))};b.throttle=function(a,c){var d,e,f,g,h,i=b.debounce(function(){h=g=false},c);return function(){d=this;e=arguments;var b;f||(f=setTimeout(function(){f=null;h&&a.apply(d,e);i()},c));g?h=true:
+a.apply(d,e);i();g=true}};b.debounce=function(a,b){var d;return function(){var e=this,f=arguments;clearTimeout(d);d=setTimeout(function(){d=null;a.apply(e,f)},b)}};b.once=function(a){var b=false,d;return function(){if(b)return d;b=true;return d=a.apply(this,arguments)}};b.wrap=function(a,b){return function(){var d=[a].concat(i.call(arguments,0));return b.apply(this,d)}};b.compose=function(){var a=arguments;return function(){for(var b=arguments,d=a.length-1;d>=0;d--)b=[a[d].apply(this,b)];return b[0]}};
+b.after=function(a,b){return a<=0?b():function(){if(--a<1)return b.apply(this,arguments)}};b.keys=J||function(a){if(a!==Object(a))throw new TypeError("Invalid object");var c=[],d;for(d in a)b.has(a,d)&&(c[c.length]=d);return c};b.values=function(a){return b.map(a,b.identity)};b.functions=b.methods=function(a){var c=[],d;for(d in a)b.isFunction(a[d])&&c.push(d);return c.sort()};b.extend=function(a){j(i.call(arguments,1),function(b){for(var d in b)a[d]=b[d]});return a};b.defaults=function(a){j(i.call(arguments,
+1),function(b){for(var d in b)a[d]==null&&(a[d]=b[d])});return a};b.clone=function(a){return!b.isObject(a)?a:b.isArray(a)?a.slice():b.extend({},a)};b.tap=function(a,b){b(a);return a};b.isEqual=function(a,b){return q(a,b,[])};b.isEmpty=function(a){if(b.isArray(a)||b.isString(a))return a.length===0;for(var c in a)if(b.has(a,c))return false;return true};b.isElement=function(a){return!!(a&&a.nodeType==1)};b.isArray=o||function(a){return l.call(a)=="[object Array]"};b.isObject=function(a){return a===Object(a)};
+b.isArguments=function(a){return l.call(a)=="[object Arguments]"};if(!b.isArguments(arguments))b.isArguments=function(a){return!(!a||!b.has(a,"callee"))};b.isFunction=function(a){return l.call(a)=="[object Function]"};b.isString=function(a){return l.call(a)=="[object String]"};b.isNumber=function(a){return l.call(a)=="[object Number]"};b.isNaN=function(a){return a!==a};b.isBoolean=function(a){return a===true||a===false||l.call(a)=="[object Boolean]"};b.isDate=function(a){return l.call(a)=="[object Date]"};
+b.isRegExp=function(a){return l.call(a)=="[object RegExp]"};b.isNull=function(a){return a===null};b.isUndefined=function(a){return a===void 0};b.has=function(a,b){return I.call(a,b)};b.noConflict=function(){r._=G;return this};b.identity=function(a){return a};b.times=function(a,b,d){for(var e=0;e<a;e++)b.call(d,e)};b.escape=function(a){return(""+a).replace(/&/g,"&amp;").replace(/</g,"&lt;").replace(/>/g,"&gt;").replace(/"/g,"&quot;").replace(/'/g,"&#x27;").replace(/\//g,"&#x2F;")};b.mixin=function(a){j(b.functions(a),
+function(c){K(c,b[c]=a[c])})};var L=0;b.uniqueId=function(a){var b=L++;return a?a+b:b};b.templateSettings={evaluate:/<%([\s\S]+?)%>/g,interpolate:/<%=([\s\S]+?)%>/g,escape:/<%-([\s\S]+?)%>/g};var t=/.^/,u=function(a){return a.replace(/\\\\/g,"\\").replace(/\\'/g,"'")};b.template=function(a,c){var d=b.templateSettings,d="var __p=[],print=function(){__p.push.apply(__p,arguments);};with(obj||{}){__p.push('"+a.replace(/\\/g,"\\\\").replace(/'/g,"\\'").replace(d.escape||t,function(a,b){return"',_.escape("+
+u(b)+"),'"}).replace(d.interpolate||t,function(a,b){return"',"+u(b)+",'"}).replace(d.evaluate||t,function(a,b){return"');"+u(b).replace(/[\r\n\t]/g," ")+";__p.push('"}).replace(/\r/g,"\\r").replace(/\n/g,"\\n").replace(/\t/g,"\\t")+"');}return __p.join('');",e=new Function("obj","_",d);return c?e(c,b):function(a){return e.call(this,a,b)}};b.chain=function(a){return b(a).chain()};var m=function(a){this._wrapped=a};b.prototype=m.prototype;var v=function(a,c){return c?b(a).chain():a},K=function(a,c){m.prototype[a]=
+function(){var a=i.call(arguments);H.call(a,this._wrapped);return v(c.apply(b,a),this._chain)}};b.mixin(b);j("pop,push,reverse,shift,sort,splice,unshift".split(","),function(a){var b=k[a];m.prototype[a]=function(){var d=this._wrapped;b.apply(d,arguments);var e=d.length;(a=="shift"||a=="splice")&&e===0&&delete d[0];return v(d,this._chain)}});j(["concat","join","slice"],function(a){var b=k[a];m.prototype[a]=function(){return v(b.apply(this._wrapped,arguments),this._chain)}});m.prototype.chain=function(){this._chain=
+true;return this};m.prototype.value=function(){return this._wrapped}}).call(this);
Binary file DVN-web/installer/dvninstall/doc/guides/_static/up-pressed.png has changed
Binary file DVN-web/installer/dvninstall/doc/guides/_static/up.png has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/_static/websupport.js	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,808 @@
+/*
+ * websupport.js
+ * ~~~~~~~~~~~~~
+ *
+ * sphinx.websupport utilties for all documentation.
+ *
+ * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+(function($) {
+  $.fn.autogrow = function() {
+    return this.each(function() {
+    var textarea = this;
+
+    $.fn.autogrow.resize(textarea);
+
+    $(textarea)
+      .focus(function() {
+        textarea.interval = setInterval(function() {
+          $.fn.autogrow.resize(textarea);
+        }, 500);
+      })
+      .blur(function() {
+        clearInterval(textarea.interval);
+      });
+    });
+  };
+
+  $.fn.autogrow.resize = function(textarea) {
+    var lineHeight = parseInt($(textarea).css('line-height'), 10);
+    var lines = textarea.value.split('\n');
+    var columns = textarea.cols;
+    var lineCount = 0;
+    $.each(lines, function() {
+      lineCount += Math.ceil(this.length / columns) || 1;
+    });
+    var height = lineHeight * (lineCount + 1);
+    $(textarea).css('height', height);
+  };
+})(jQuery);
+
+(function($) {
+  var comp, by;
+
+  function init() {
+    initEvents();
+    initComparator();
+  }
+
+  function initEvents() {
+    $('a.comment-close').live("click", function(event) {
+      event.preventDefault();
+      hide($(this).attr('id').substring(2));
+    });
+    $('a.vote').live("click", function(event) {
+      event.preventDefault();
+      handleVote($(this));
+    });
+    $('a.reply').live("click", function(event) {
+      event.preventDefault();
+      openReply($(this).attr('id').substring(2));
+    });
+    $('a.close-reply').live("click", function(event) {
+      event.preventDefault();
+      closeReply($(this).attr('id').substring(2));
+    });
+    $('a.sort-option').live("click", function(event) {
+      event.preventDefault();
+      handleReSort($(this));
+    });
+    $('a.show-proposal').live("click", function(event) {
+      event.preventDefault();
+      showProposal($(this).attr('id').substring(2));
+    });
+    $('a.hide-proposal').live("click", function(event) {
+      event.preventDefault();
+      hideProposal($(this).attr('id').substring(2));
+    });
+    $('a.show-propose-change').live("click", function(event) {
+      event.preventDefault();
+      showProposeChange($(this).attr('id').substring(2));
+    });
+    $('a.hide-propose-change').live("click", function(event) {
+      event.preventDefault();
+      hideProposeChange($(this).attr('id').substring(2));
+    });
+    $('a.accept-comment').live("click", function(event) {
+      event.preventDefault();
+      acceptComment($(this).attr('id').substring(2));
+    });
+    $('a.delete-comment').live("click", function(event) {
+      event.preventDefault();
+      deleteComment($(this).attr('id').substring(2));
+    });
+    $('a.comment-markup').live("click", function(event) {
+      event.preventDefault();
+      toggleCommentMarkupBox($(this).attr('id').substring(2));
+    });
+  }
+
+  /**
+   * Set comp, which is a comparator function used for sorting and
+   * inserting comments into the list.
+   */
+  function setComparator() {
+    // If the first three letters are "asc", sort in ascending order
+    // and remove the prefix.
+    if (by.substring(0,3) == 'asc') {
+      var i = by.substring(3);
+      comp = function(a, b) { return a[i] - b[i]; };
+    } else {
+      // Otherwise sort in descending order.
+      comp = function(a, b) { return b[by] - a[by]; };
+    }
+
+    // Reset link styles and format the selected sort option.
+    $('a.sel').attr('href', '#').removeClass('sel');
+    $('a.by' + by).removeAttr('href').addClass('sel');
+  }
+
+  /**
+   * Create a comp function. If the user has preferences stored in
+   * the sortBy cookie, use those, otherwise use the default.
+   */
+  function initComparator() {
+    by = 'rating'; // Default to sort by rating.
+    // If the sortBy cookie is set, use that instead.
+    if (document.cookie.length > 0) {
+      var start = document.cookie.indexOf('sortBy=');
+      if (start != -1) {
+        start = start + 7;
+        var end = document.cookie.indexOf(";", start);
+        if (end == -1) {
+          end = document.cookie.length;
+          by = unescape(document.cookie.substring(start, end));
+        }
+      }
+    }
+    setComparator();
+  }
+
+  /**
+   * Show a comment div.
+   */
+  function show(id) {
+    $('#ao' + id).hide();
+    $('#ah' + id).show();
+    var context = $.extend({id: id}, opts);
+    var popup = $(renderTemplate(popupTemplate, context)).hide();
+    popup.find('textarea[name="proposal"]').hide();
+    popup.find('a.by' + by).addClass('sel');
+    var form = popup.find('#cf' + id);
+    form.submit(function(event) {
+      event.preventDefault();
+      addComment(form);
+    });
+    $('#s' + id).after(popup);
+    popup.slideDown('fast', function() {
+      getComments(id);
+    });
+  }
+
+  /**
+   * Hide a comment div.
+   */
+  function hide(id) {
+    $('#ah' + id).hide();
+    $('#ao' + id).show();
+    var div = $('#sc' + id);
+    div.slideUp('fast', function() {
+      div.remove();
+    });
+  }
+
+  /**
+   * Perform an ajax request to get comments for a node
+   * and insert the comments into the comments tree.
+   */
+  function getComments(id) {
+    $.ajax({
+     type: 'GET',
+     url: opts.getCommentsURL,
+     data: {node: id},
+     success: function(data, textStatus, request) {
+       var ul = $('#cl' + id);
+       var speed = 100;
+       $('#cf' + id)
+         .find('textarea[name="proposal"]')
+         .data('source', data.source);
+
+       if (data.comments.length === 0) {
+         ul.html('<li>No comments yet.</li>');
+         ul.data('empty', true);
+       } else {
+         // If there are comments, sort them and put them in the list.
+         var comments = sortComments(data.comments);
+         speed = data.comments.length * 100;
+         appendComments(comments, ul);
+         ul.data('empty', false);
+       }
+       $('#cn' + id).slideUp(speed + 200);
+       ul.slideDown(speed);
+     },
+     error: function(request, textStatus, error) {
+       showError('Oops, there was a problem retrieving the comments.');
+     },
+     dataType: 'json'
+    });
+  }
+
+  /**
+   * Add a comment via ajax and insert the comment into the comment tree.
+   */
+  function addComment(form) {
+    var node_id = form.find('input[name="node"]').val();
+    var parent_id = form.find('input[name="parent"]').val();
+    var text = form.find('textarea[name="comment"]').val();
+    var proposal = form.find('textarea[name="proposal"]').val();
+
+    if (text == '') {
+      showError('Please enter a comment.');
+      return;
+    }
+
+    // Disable the form that is being submitted.
+    form.find('textarea,input').attr('disabled', 'disabled');
+
+    // Send the comment to the server.
+    $.ajax({
+      type: "POST",
+      url: opts.addCommentURL,
+      dataType: 'json',
+      data: {
+        node: node_id,
+        parent: parent_id,
+        text: text,
+        proposal: proposal
+      },
+      success: function(data, textStatus, error) {
+        // Reset the form.
+        if (node_id) {
+          hideProposeChange(node_id);
+        }
+        form.find('textarea')
+          .val('')
+          .add(form.find('input'))
+          .removeAttr('disabled');
+	var ul = $('#cl' + (node_id || parent_id));
+        if (ul.data('empty')) {
+          $(ul).empty();
+          ul.data('empty', false);
+        }
+        insertComment(data.comment);
+        var ao = $('#ao' + node_id);
+        ao.find('img').attr({'src': opts.commentBrightImage});
+        if (node_id) {
+          // if this was a "root" comment, remove the commenting box
+          // (the user can get it back by reopening the comment popup)
+          $('#ca' + node_id).slideUp();
+        }
+      },
+      error: function(request, textStatus, error) {
+        form.find('textarea,input').removeAttr('disabled');
+        showError('Oops, there was a problem adding the comment.');
+      }
+    });
+  }
+
+  /**
+   * Recursively append comments to the main comment list and children
+   * lists, creating the comment tree.
+   */
+  function appendComments(comments, ul) {
+    $.each(comments, function() {
+      var div = createCommentDiv(this);
+      ul.append($(document.createElement('li')).html(div));
+      appendComments(this.children, div.find('ul.comment-children'));
+      // To avoid stagnating data, don't store the comments children in data.
+      this.children = null;
+      div.data('comment', this);
+    });
+  }
+
+  /**
+   * After adding a new comment, it must be inserted in the correct
+   * location in the comment tree.
+   */
+  function insertComment(comment) {
+    var div = createCommentDiv(comment);
+
+    // To avoid stagnating data, don't store the comments children in data.
+    comment.children = null;
+    div.data('comment', comment);
+
+    var ul = $('#cl' + (comment.node || comment.parent));
+    var siblings = getChildren(ul);
+
+    var li = $(document.createElement('li'));
+    li.hide();
+
+    // Determine where in the parents children list to insert this comment.
+    for(i=0; i < siblings.length; i++) {
+      if (comp(comment, siblings[i]) <= 0) {
+        $('#cd' + siblings[i].id)
+          .parent()
+          .before(li.html(div));
+        li.slideDown('fast');
+        return;
+      }
+    }
+
+    // If we get here, this comment rates lower than all the others,
+    // or it is the only comment in the list.
+    ul.append(li.html(div));
+    li.slideDown('fast');
+  }
+
+  function acceptComment(id) {
+    $.ajax({
+      type: 'POST',
+      url: opts.acceptCommentURL,
+      data: {id: id},
+      success: function(data, textStatus, request) {
+        $('#cm' + id).fadeOut('fast');
+        $('#cd' + id).removeClass('moderate');
+      },
+      error: function(request, textStatus, error) {
+        showError('Oops, there was a problem accepting the comment.');
+      }
+    });
+  }
+
+  function deleteComment(id) {
+    $.ajax({
+      type: 'POST',
+      url: opts.deleteCommentURL,
+      data: {id: id},
+      success: function(data, textStatus, request) {
+        var div = $('#cd' + id);
+        if (data == 'delete') {
+          // Moderator mode: remove the comment and all children immediately
+          div.slideUp('fast', function() {
+            div.remove();
+          });
+          return;
+        }
+        // User mode: only mark the comment as deleted
+        div
+          .find('span.user-id:first')
+          .text('[deleted]').end()
+          .find('div.comment-text:first')
+          .text('[deleted]').end()
+          .find('#cm' + id + ', #dc' + id + ', #ac' + id + ', #rc' + id +
+                ', #sp' + id + ', #hp' + id + ', #cr' + id + ', #rl' + id)
+          .remove();
+        var comment = div.data('comment');
+        comment.username = '[deleted]';
+        comment.text = '[deleted]';
+        div.data('comment', comment);
+      },
+      error: function(request, textStatus, error) {
+        showError('Oops, there was a problem deleting the comment.');
+      }
+    });
+  }
+
+  function showProposal(id) {
+    $('#sp' + id).hide();
+    $('#hp' + id).show();
+    $('#pr' + id).slideDown('fast');
+  }
+
+  function hideProposal(id) {
+    $('#hp' + id).hide();
+    $('#sp' + id).show();
+    $('#pr' + id).slideUp('fast');
+  }
+
+  function showProposeChange(id) {
+    $('#pc' + id).hide();
+    $('#hc' + id).show();
+    var textarea = $('#pt' + id);
+    textarea.val(textarea.data('source'));
+    $.fn.autogrow.resize(textarea[0]);
+    textarea.slideDown('fast');
+  }
+
+  function hideProposeChange(id) {
+    $('#hc' + id).hide();
+    $('#pc' + id).show();
+    var textarea = $('#pt' + id);
+    textarea.val('').removeAttr('disabled');
+    textarea.slideUp('fast');
+  }
+
+  function toggleCommentMarkupBox(id) {
+    $('#mb' + id).toggle();
+  }
+
+  /** Handle when the user clicks on a sort by link. */
+  function handleReSort(link) {
+    var classes = link.attr('class').split(/\s+/);
+    for (var i=0; i<classes.length; i++) {
+      if (classes[i] != 'sort-option') {
+	by = classes[i].substring(2);
+      }
+    }
+    setComparator();
+    // Save/update the sortBy cookie.
+    var expiration = new Date();
+    expiration.setDate(expiration.getDate() + 365);
+    document.cookie= 'sortBy=' + escape(by) +
+                     ';expires=' + expiration.toUTCString();
+    $('ul.comment-ul').each(function(index, ul) {
+      var comments = getChildren($(ul), true);
+      comments = sortComments(comments);
+      appendComments(comments, $(ul).empty());
+    });
+  }
+
+  /**
+   * Function to process a vote when a user clicks an arrow.
+   */
+  function handleVote(link) {
+    if (!opts.voting) {
+      showError("You'll need to login to vote.");
+      return;
+    }
+
+    var id = link.attr('id');
+    if (!id) {
+      // Didn't click on one of the voting arrows.
+      return;
+    }
+    // If it is an unvote, the new vote value is 0,
+    // Otherwise it's 1 for an upvote, or -1 for a downvote.
+    var value = 0;
+    if (id.charAt(1) != 'u') {
+      value = id.charAt(0) == 'u' ? 1 : -1;
+    }
+    // The data to be sent to the server.
+    var d = {
+      comment_id: id.substring(2),
+      value: value
+    };
+
+    // Swap the vote and unvote links.
+    link.hide();
+    $('#' + id.charAt(0) + (id.charAt(1) == 'u' ? 'v' : 'u') + d.comment_id)
+      .show();
+
+    // The div the comment is displayed in.
+    var div = $('div#cd' + d.comment_id);
+    var data = div.data('comment');
+
+    // If this is not an unvote, and the other vote arrow has
+    // already been pressed, unpress it.
+    if ((d.value !== 0) && (data.vote === d.value * -1)) {
+      $('#' + (d.value == 1 ? 'd' : 'u') + 'u' + d.comment_id).hide();
+      $('#' + (d.value == 1 ? 'd' : 'u') + 'v' + d.comment_id).show();
+    }
+
+    // Update the comments rating in the local data.
+    data.rating += (data.vote === 0) ? d.value : (d.value - data.vote);
+    data.vote = d.value;
+    div.data('comment', data);
+
+    // Change the rating text.
+    div.find('.rating:first')
+      .text(data.rating + ' point' + (data.rating == 1 ? '' : 's'));
+
+    // Send the vote information to the server.
+    $.ajax({
+      type: "POST",
+      url: opts.processVoteURL,
+      data: d,
+      error: function(request, textStatus, error) {
+        showError('Oops, there was a problem casting that vote.');
+      }
+    });
+  }
+
+  /**
+   * Open a reply form used to reply to an existing comment.
+   */
+  function openReply(id) {
+    // Swap out the reply link for the hide link
+    $('#rl' + id).hide();
+    $('#cr' + id).show();
+
+    // Add the reply li to the children ul.
+    var div = $(renderTemplate(replyTemplate, {id: id})).hide();
+    $('#cl' + id)
+      .prepend(div)
+      // Setup the submit handler for the reply form.
+      .find('#rf' + id)
+      .submit(function(event) {
+        event.preventDefault();
+        addComment($('#rf' + id));
+        closeReply(id);
+      })
+      .find('input[type=button]')
+      .click(function() {
+        closeReply(id);
+      });
+    div.slideDown('fast', function() {
+      $('#rf' + id).find('textarea').focus();
+    });
+  }
+
+  /**
+   * Close the reply form opened with openReply.
+   */
+  function closeReply(id) {
+    // Remove the reply div from the DOM.
+    $('#rd' + id).slideUp('fast', function() {
+      $(this).remove();
+    });
+
+    // Swap out the hide link for the reply link
+    $('#cr' + id).hide();
+    $('#rl' + id).show();
+  }
+
+  /**
+   * Recursively sort a tree of comments using the comp comparator.
+   */
+  function sortComments(comments) {
+    comments.sort(comp);
+    $.each(comments, function() {
+      this.children = sortComments(this.children);
+    });
+    return comments;
+  }
+
+  /**
+   * Get the children comments from a ul. If recursive is true,
+   * recursively include childrens' children.
+   */
+  function getChildren(ul, recursive) {
+    var children = [];
+    ul.children().children("[id^='cd']")
+      .each(function() {
+        var comment = $(this).data('comment');
+        if (recursive)
+          comment.children = getChildren($(this).find('#cl' + comment.id), true);
+        children.push(comment);
+      });
+    return children;
+  }
+
+  /** Create a div to display a comment in. */
+  function createCommentDiv(comment) {
+    if (!comment.displayed && !opts.moderator) {
+      return $('<div class="moderate">Thank you!  Your comment will show up '
+               + 'once it is has been approved by a moderator.</div>');
+    }
+    // Prettify the comment rating.
+    comment.pretty_rating = comment.rating + ' point' +
+      (comment.rating == 1 ? '' : 's');
+    // Make a class (for displaying not yet moderated comments differently)
+    comment.css_class = comment.displayed ? '' : ' moderate';
+    // Create a div for this comment.
+    var context = $.extend({}, opts, comment);
+    var div = $(renderTemplate(commentTemplate, context));
+
+    // If the user has voted on this comment, highlight the correct arrow.
+    if (comment.vote) {
+      var direction = (comment.vote == 1) ? 'u' : 'd';
+      div.find('#' + direction + 'v' + comment.id).hide();
+      div.find('#' + direction + 'u' + comment.id).show();
+    }
+
+    if (opts.moderator || comment.text != '[deleted]') {
+      div.find('a.reply').show();
+      if (comment.proposal_diff)
+        div.find('#sp' + comment.id).show();
+      if (opts.moderator && !comment.displayed)
+        div.find('#cm' + comment.id).show();
+      if (opts.moderator || (opts.username == comment.username))
+        div.find('#dc' + comment.id).show();
+    }
+    return div;
+  }
+
+  /**
+   * A simple template renderer. Placeholders such as <%id%> are replaced
+   * by context['id'] with items being escaped. Placeholders such as <#id#>
+   * are not escaped.
+   */
+  function renderTemplate(template, context) {
+    var esc = $(document.createElement('div'));
+
+    function handle(ph, escape) {
+      var cur = context;
+      $.each(ph.split('.'), function() {
+        cur = cur[this];
+      });
+      return escape ? esc.text(cur || "").html() : cur;
+    }
+
+    return template.replace(/<([%#])([\w\.]*)\1>/g, function() {
+      return handle(arguments[2], arguments[1] == '%' ? true : false);
+    });
+  }
+
+  /** Flash an error message briefly. */
+  function showError(message) {
+    $(document.createElement('div')).attr({'class': 'popup-error'})
+      .append($(document.createElement('div'))
+               .attr({'class': 'error-message'}).text(message))
+      .appendTo('body')
+      .fadeIn("slow")
+      .delay(2000)
+      .fadeOut("slow");
+  }
+
+  /** Add a link the user uses to open the comments popup. */
+  $.fn.comment = function() {
+    return this.each(function() {
+      var id = $(this).attr('id').substring(1);
+      var count = COMMENT_METADATA[id];
+      var title = count + ' comment' + (count == 1 ? '' : 's');
+      var image = count > 0 ? opts.commentBrightImage : opts.commentImage;
+      var addcls = count == 0 ? ' nocomment' : '';
+      $(this)
+        .append(
+          $(document.createElement('a')).attr({
+            href: '#',
+            'class': 'sphinx-comment-open' + addcls,
+            id: 'ao' + id
+          })
+            .append($(document.createElement('img')).attr({
+              src: image,
+              alt: 'comment',
+              title: title
+            }))
+            .click(function(event) {
+              event.preventDefault();
+              show($(this).attr('id').substring(2));
+            })
+        )
+        .append(
+          $(document.createElement('a')).attr({
+            href: '#',
+            'class': 'sphinx-comment-close hidden',
+            id: 'ah' + id
+          })
+            .append($(document.createElement('img')).attr({
+              src: opts.closeCommentImage,
+              alt: 'close',
+              title: 'close'
+            }))
+            .click(function(event) {
+              event.preventDefault();
+              hide($(this).attr('id').substring(2));
+            })
+        );
+    });
+  };
+
+  var opts = {
+    processVoteURL: '/_process_vote',
+    addCommentURL: '/_add_comment',
+    getCommentsURL: '/_get_comments',
+    acceptCommentURL: '/_accept_comment',
+    deleteCommentURL: '/_delete_comment',
+    commentImage: '/static/_static/comment.png',
+    closeCommentImage: '/static/_static/comment-close.png',
+    loadingImage: '/static/_static/ajax-loader.gif',
+    commentBrightImage: '/static/_static/comment-bright.png',
+    upArrow: '/static/_static/up.png',
+    downArrow: '/static/_static/down.png',
+    upArrowPressed: '/static/_static/up-pressed.png',
+    downArrowPressed: '/static/_static/down-pressed.png',
+    voting: false,
+    moderator: false
+  };
+
+  if (typeof COMMENT_OPTIONS != "undefined") {
+    opts = jQuery.extend(opts, COMMENT_OPTIONS);
+  }
+
+  var popupTemplate = '\
+    <div class="sphinx-comments" id="sc<%id%>">\
+      <p class="sort-options">\
+        Sort by:\
+        <a href="#" class="sort-option byrating">best rated</a>\
+        <a href="#" class="sort-option byascage">newest</a>\
+        <a href="#" class="sort-option byage">oldest</a>\
+      </p>\
+      <div class="comment-header">Comments</div>\
+      <div class="comment-loading" id="cn<%id%>">\
+        loading comments... <img src="<%loadingImage%>" alt="" /></div>\
+      <ul id="cl<%id%>" class="comment-ul"></ul>\
+      <div id="ca<%id%>">\
+      <p class="add-a-comment">Add a comment\
+        (<a href="#" class="comment-markup" id="ab<%id%>">markup</a>):</p>\
+      <div class="comment-markup-box" id="mb<%id%>">\
+        reStructured text markup: <i>*emph*</i>, <b>**strong**</b>, \
+        <tt>``code``</tt>, \
+        code blocks: <tt>::</tt> and an indented block after blank line</div>\
+      <form method="post" id="cf<%id%>" class="comment-form" action="">\
+        <textarea name="comment" cols="80"></textarea>\
+        <p class="propose-button">\
+          <a href="#" id="pc<%id%>" class="show-propose-change">\
+            Propose a change &#9657;\
+          </a>\
+          <a href="#" id="hc<%id%>" class="hide-propose-change">\
+            Propose a change &#9663;\
+          </a>\
+        </p>\
+        <textarea name="proposal" id="pt<%id%>" cols="80"\
+                  spellcheck="false"></textarea>\
+        <input type="submit" value="Add comment" />\
+        <input type="hidden" name="node" value="<%id%>" />\
+        <input type="hidden" name="parent" value="" />\
+      </form>\
+      </div>\
+    </div>';
+
+  var commentTemplate = '\
+    <div id="cd<%id%>" class="sphinx-comment<%css_class%>">\
+      <div class="vote">\
+        <div class="arrow">\
+          <a href="#" id="uv<%id%>" class="vote" title="vote up">\
+            <img src="<%upArrow%>" />\
+          </a>\
+          <a href="#" id="uu<%id%>" class="un vote" title="vote up">\
+            <img src="<%upArrowPressed%>" />\
+          </a>\
+        </div>\
+        <div class="arrow">\
+          <a href="#" id="dv<%id%>" class="vote" title="vote down">\
+            <img src="<%downArrow%>" id="da<%id%>" />\
+          </a>\
+          <a href="#" id="du<%id%>" class="un vote" title="vote down">\
+            <img src="<%downArrowPressed%>" />\
+          </a>\
+        </div>\
+      </div>\
+      <div class="comment-content">\
+        <p class="tagline comment">\
+          <span class="user-id"><%username%></span>\
+          <span class="rating"><%pretty_rating%></span>\
+          <span class="delta"><%time.delta%></span>\
+        </p>\
+        <div class="comment-text comment"><#text#></div>\
+        <p class="comment-opts comment">\
+          <a href="#" class="reply hidden" id="rl<%id%>">reply &#9657;</a>\
+          <a href="#" class="close-reply" id="cr<%id%>">reply &#9663;</a>\
+          <a href="#" id="sp<%id%>" class="show-proposal">proposal &#9657;</a>\
+          <a href="#" id="hp<%id%>" class="hide-proposal">proposal &#9663;</a>\
+          <a href="#" id="dc<%id%>" class="delete-comment hidden">delete</a>\
+          <span id="cm<%id%>" class="moderation hidden">\
+            <a href="#" id="ac<%id%>" class="accept-comment">accept</a>\
+          </span>\
+        </p>\
+        <pre class="proposal" id="pr<%id%>">\
+<#proposal_diff#>\
+        </pre>\
+          <ul class="comment-children" id="cl<%id%>"></ul>\
+        </div>\
+        <div class="clearleft"></div>\
+      </div>\
+    </div>';
+
+  var replyTemplate = '\
+    <li>\
+      <div class="reply-div" id="rd<%id%>">\
+        <form id="rf<%id%>">\
+          <textarea name="comment" cols="80"></textarea>\
+          <input type="submit" value="Add reply" />\
+          <input type="button" value="Cancel" />\
+          <input type="hidden" name="parent" value="<%id%>" />\
+          <input type="hidden" name="node" value="" />\
+        </form>\
+      </div>\
+    </li>';
+
+  $(document).ready(function() {
+    init();
+  });
+})(jQuery);
+
+$(document).ready(function() {
+  // add comment anchors for all paragraphs that are commentable
+  $('.sphinx-has-comment').comment();
+
+  // highlight search words in search results
+  $("div.context").each(function() {
+    var params = $.getQueryParameters();
+    var terms = (params.q) ? params.q[0].split(/\s+/) : [];
+    var result = $(this);
+    $.each(terms, function() {
+      result.highlightText(this.toLowerCase(), 'highlighted');
+    });
+  });
+
+  // directly open comment window if requested
+  var anchor = document.location.hash;
+  if (anchor.substring(0, 9) == '#comment-') {
+    $('#ao' + anchor.substring(9)).click();
+    document.location.hash = '#s' + anchor.substring(9);
+  }
+});
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/dataverse-R-ingest.html	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,270 @@
+
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+  "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+    
+    <title>Ingest of R (.RData) files &mdash; The Harvard Dataverse Network 3.6.1 documentation</title>
+    
+    <link rel="stylesheet" href="_static/agogo.css" type="text/css" />
+    <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+    
+    <script type="text/javascript">
+      var DOCUMENTATION_OPTIONS = {
+        URL_ROOT:    './',
+        VERSION:     '3.6.1',
+        COLLAPSE_INDEX: false,
+        FILE_SUFFIX: '.html',
+        HAS_SOURCE:  true
+      };
+    </script>
+    <script type="text/javascript" src="_static/jquery.js"></script>
+    <script type="text/javascript" src="_static/underscore.js"></script>
+    <script type="text/javascript" src="_static/doctools.js"></script>
+    <script type="text/javascript" src="http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
+    <link rel="top" title="The Harvard Dataverse Network 3.6.1 documentation" href="index.html" /> 
+  </head>
+  <body>
+    <div class="header-wrapper">
+      <div class="header">
+        <div class="headertitle"><a
+          href="index.html">The Harvard Dataverse Network 3.6.1 documentation</a></div>
+        <div class="rel">
+          <a href="genindex.html" title="General Index"
+             accesskey="I">index</a>
+        </div>
+       </div>
+    </div>
+
+    <div class="content-wrapper">
+      <div class="content">
+        <div class="document">
+            
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body">
+            
+  <div class="section" id="ingest-of-r-rdata-files">
+<h1>Ingest of R (.RData) files<a class="headerlink" href="#ingest-of-r-rdata-files" title="Permalink to this headline">¶</a></h1>
+<div class="section" id="overview">
+<h2>Overview.<a class="headerlink" href="#overview" title="Permalink to this headline">¶</a></h2>
+<p>Support for ingesting R data files has been added in version 3.5. R
+has been increasingly popular in the research/academic community,
+owing to the fact that it is free and open-source (unlike SPSS and
+STATA). Consequently, more and more data is becoming available
+exclusively as R data files. This long-awaited feature makes it
+possible to ingest such data into DVN as &#8220;subsettable&#8221; files.</p>
+</div>
+<div class="section" id="requirements">
+<h2>Requirements.<a class="headerlink" href="#requirements" title="Permalink to this headline">¶</a></h2>
+<p>R ingest relies on R having been installed, configured and made
+available to the DVN application via RServe (see the Installers
+Guide). This is in contrast to the SPSS and Stata ingest - which can
+be performed without R present. (though R is still needed to perform
+most subsetting/analysis tasks on the resulting data files).</p>
+<p>The data must be formatted as an R dataframe (data.frame()). If an
+.RData file contains multiple dataframes, only the 1st one will be
+ingested.</p>
+</div>
+<div class="section" id="data-types-compared-to-other-supported-formats-stat-spss">
+<h2>Data Types, compared to other supported formats (Stat, SPSS)<a class="headerlink" href="#data-types-compared-to-other-supported-formats-stat-spss" title="Permalink to this headline">¶</a></h2>
+<div class="section" id="integers-doubles-character-strings">
+<h3>Integers, Doubles, Character strings<a class="headerlink" href="#integers-doubles-character-strings" title="Permalink to this headline">¶</a></h3>
+<p>The handling of these types is intuitive and straightforward. The
+resulting tab file columns, summary statistics and UNF signatures
+should be identical to those produced by ingesting the same vectors
+from SPSS and Stata.</p>
+<p><strong>A couple of things that are unique to R/new in DVN:</strong></p>
+<p>R explicitly supports Missing Values for all of the types above;
+Missing Values encoded in R vectors will be recognized and preserved
+in TAB files (as &#8216;NA&#8217;), counted in the generated summary statistics
+and data analysis.</p>
+<p>In addition to Missing Values, R recognizes &#8220;Not a Value&#8221; (NaN) and
+positive and negative infinity for floating point variables. These
+are now properly supported by the DVN.</p>
+<p>Also note, that unlike Stata, that does recognize &#8220;float&#8221; and &#8220;double&#8221;
+as distinct data types, all floating point values in R are in fact
+double precision.</p>
+</div>
+<div class="section" id="r-factors">
+<h3>R Factors<a class="headerlink" href="#r-factors" title="Permalink to this headline">¶</a></h3>
+<p>These are ingested as &#8220;Categorical Values&#8221; in the DVN.</p>
+<p>One thing to keep in mind: in both Stata and SPSS, the actual value of
+a categorical variable can be both character and numeric. In R, all
+factor values are strings, even if they are string representations of
+numbers. So the values of the resulting categoricals in the DVN will
+always be of string type too.</p>
+<div class="line-block">
+<div class="line"><strong>New:</strong> To properly handle <em>ordered factors</em> in R, the DVN now supports the concept of an &#8220;Ordered Categorical&#8221; - a categorical value where an explicit order is assigned to the list of value labels.</div>
+</div>
+</div>
+<div class="section" id="new-boolean-values">
+<h3>(New!) Boolean values<a class="headerlink" href="#new-boolean-values" title="Permalink to this headline">¶</a></h3>
+<p>R Boolean (logical) values are supported.</p>
+</div>
+<div class="section" id="limitations-of-r-as-compared-to-spss-and-stata">
+<h3>Limitations of R, as compared to SPSS and STATA.<a class="headerlink" href="#limitations-of-r-as-compared-to-spss-and-stata" title="Permalink to this headline">¶</a></h3>
+<p>Most noticeably, R lacks a standard mechanism for defining descriptive
+labels for the data frame variables.  In the DVN, similarly to
+both Stata and SPSS, variables have distinct names and labels; with
+the latter reserved for longer, descriptive text.
+With variables ingested from R data frames the variable name will be
+used for both the &#8220;name&#8221; and the &#8220;label&#8221;.</p>
+<div class="line-block">
+<div class="line"><em>Optional R packages exist for providing descriptive variable labels;
+in one of the future versions support may be added for such a
+mechanism. It would of course work only for R files that were
+created with such optional packages</em>.</div>
+</div>
+<p>Similarly, R categorical values (factors) lack descriptive labels too.
+<strong>Note:</strong> This is potentially confusing, since R factors do
+actually have &#8220;labels&#8221;.  This is a matter of terminology - an R
+factor&#8217;s label is in fact the same thing as the &#8220;value&#8221; of a
+categorical variable in SPSS or Stata and DVN; it contains the actual
+meaningful data for the given observation. It is NOT a field reserved
+for explanatory, human-readable text, such as the case with the
+SPSS/Stata &#8220;label&#8221;.</p>
+<p>Ingesting an R factor with the level labels &#8220;MALE&#8221; and &#8220;FEMALE&#8221; will
+produce a categorical variable with &#8220;MALE&#8221; and &#8220;FEMALE&#8221; in the
+values and labels both.</p>
+</div>
+</div>
+<div class="section" id="time-values-in-r">
+<h2>Time values in R<a class="headerlink" href="#time-values-in-r" title="Permalink to this headline">¶</a></h2>
+<p>This warrants a dedicated section of its own, because of some unique
+ways in which time values are handled in R.</p>
+<p>R makes an effort to treat a time value as a real time instance. This
+is in contrast with either SPSS or Stata, where time value
+representations such as &#8220;Sep-23-2013 14:57:21&#8221; are allowed; note that
+in the absence of an explicitly defined time zone, this value cannot
+be mapped to an exact point in real time.  R handles times in the
+&#8220;Unix-style&#8221; way: the value is converted to the
+&#8220;seconds-since-the-Epoch&#8221; Greenwitch time (GMT or UTC) and the
+resulting numeric value is stored in the data file; time zone
+adjustments are made in real time as needed.</p>
+<p>Things still get ambiguous and confusing when R <strong>displays</strong> this time
+value: unless the time zone was explicitly defined, R will adjust the
+value to the current time zone. The resulting behavior is often
+counter-intuitive: if you create a time value, for example:</p>
+<blockquote>
+<div>timevalue&lt;-as.POSIXct(&#8220;03/19/2013 12:57:00&#8221;, format = &#8220;%m/%d/%Y %H:%M:%OS&#8221;);</div></blockquote>
+<p>on a computer configured for the San Francisco time zone, the value
+will be differently displayed on computers in different time zones;
+for example, as &#8220;12:57 PST&#8221; while still on the West Coast, but as
+&#8220;15:57 EST&#8221; in Boston.</p>
+<p>If it is important that the values are always displayed the same way,
+regardless of the current time zones, it is recommended that the time
+zone is explicitly defined. For example:</p>
+<blockquote>
+<div>attr(timevalue,&#8221;tzone&#8221;)&lt;-&#8220;PST&#8221;</div></blockquote>
+<dl class="docutils">
+<dt>or</dt>
+<dd>timevalue&lt;-as.POSIXct(&#8220;03/19/2013 12:57:00&#8221;, format = &#8220;%m/%d/%Y %H:%M:%OS&#8221;, tz=&#8221;PST&#8221;);</dd>
+</dl>
+<p>Now the value will always be displayed as &#8220;15:57 PST&#8221;, regardless of
+the time zone that is current for the OS ... <strong>BUT ONLY</strong> if the OS
+where R is installed actually understands the time zone &#8220;PST&#8221;, which
+is not by any means guaranteed! Otherwise, it will <strong>quietly adjust</strong>
+the stored GMT value to <strong>the current time zone</strong>, yet it will still
+display it with the &#8220;PST&#8221; tag attached!** One way to rephrase this is
+that R does a fairly decent job <strong>storing</strong> time values in a
+non-ambiguous, platform-independent manner - but gives you no guarantee that
+the values will be displayed in any way that is predictable or intuitive.</p>
+<p>In practical terms, it is recommended to use the long/descriptive
+forms of time zones, as they are more likely to be properly recognized
+on most computers. For example, &#8220;Japan&#8221; instead of &#8220;JST&#8221;.  Another possible
+solution is to explicitly use GMT or UTC (since it is very likely to be
+properly recognized on any system), or the &#8220;UTC+&lt;OFFSET&gt;&#8221; notation. Still, none of the above
+<strong>guarantees</strong> proper, non-ambiguous handling of time values in R data
+sets. The fact that R <strong>quietly</strong> modifies time values when it doesn&#8217;t
+recognize the supplied timezone attribute, yet still appends it to the
+<strong>changed</strong> time value does make it quite difficult. (These issues are
+discussed in depth on R-related forums, and no attempt is made to
+summarize it all in any depth here; this is just to made you aware of
+this being a potentially complex issue!)</p>
+<p>An important thing to keep in mind, in connection with the DVN ingest
+of R files, is that it will <strong>reject</strong> an R data file with any time
+values that have time zones that we can&#8217;t recognize. This is done in
+order to avoid (some) of the potential issues outlined above.</p>
+<p>It is also recommended that any vectors containing time values
+ingested into the DVN are reviewed, and the resulting entries in the
+TAB files are compared against the original values in the R data
+frame, to make sure they have been ingested as expected.</p>
+<p>Another <strong>potential issue</strong> here is the <strong>UNF</strong>. The way the UNF
+algorithm works, the same date/time values with and without the
+timezone (e.g. &#8220;12:45&#8221; vs. &#8220;12:45 EST&#8221;) <strong>produce different
+UNFs</strong>. Considering that time values in Stata/SPSS do not have time
+zones, but ALL time values in R do (yes, they all do - if the timezone
+wasn&#8217;t defined explicitely, it implicitly becomes a time value in the
+&#8220;UTC&#8221; zone!), this means that it is <strong>impossible</strong> to have 2 time
+value vectors, in Stata/SPSS and R, that produce the same UNF.</p>
+<div class="line-block">
+<div class="line"><strong>A pro tip:</strong> if it is important to produce SPSS/Stata and R versions of</div>
+</div>
+<p>the same data set that result in the same UNF when ingested, you may
+define the time variables as <strong>strings</strong> in the R data frame, and use
+the &#8220;YYYY-MM-DD HH:mm:ss&#8221; formatting notation. This is the formatting used by the UNF
+algorithm to normalize time values, so doing the above will result in
+the same UNF as the vector of the same time values in Stata.</p>
+<p>Note: date values (dates only, without time) should be handled the
+exact same way as those in SPSS and Stata, and should produce the same
+UNFs.</p>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+        </div>
+        <div class="sidebar">
+          <h3>Table Of Contents</h3>
+          <ul>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-user-main.html">User Guide</a></li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-installer-main.html">Installers Guide</a></li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-developer-main.html">DVN Developers Guide</a></li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-api-main.html">APIs Guide</a></li>
+</ul>
+
+          <h3 style="margin-top: 1.5em;">Search</h3>
+          <form class="search" action="search.html" method="get">
+            <input type="text" name="q" />
+            <input type="submit" value="Go" />
+            <input type="hidden" name="check_keywords" value="yes" />
+            <input type="hidden" name="area" value="default" />
+          </form>
+          <p class="searchtip" style="font-size: 90%">
+            Enter search terms.
+          </p>
+        </div>
+        <div class="clearer"></div>
+      </div>
+    </div>
+
+    <div class="footer-wrapper">
+      <div class="footer">
+        <div class="left">
+          <a href="genindex.html" title="General Index"
+             >index</a>
+            <br/>
+            <a href="_sources/dataverse-R-ingest.txt"
+               rel="nofollow">Show Source</a>
+        </div>
+
+        <div class="right">
+          
+    <div class="footer">
+        &copy; Copyright 1997-2013, President &amp; Fellows Harvard University.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2b1.
+    </div>
+        </div>
+        <div class="clearer"></div>
+      </div>
+    </div>
+
+  </body>
+</html>
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/dataverse-api-main.html	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,684 @@
+
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+  "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+    
+    <title>APIs Guide &mdash; The Harvard Dataverse Network 3.6.1 documentation</title>
+    
+    <link rel="stylesheet" href="_static/agogo.css" type="text/css" />
+    <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+    
+    <script type="text/javascript">
+      var DOCUMENTATION_OPTIONS = {
+        URL_ROOT:    './',
+        VERSION:     '3.6.1',
+        COLLAPSE_INDEX: false,
+        FILE_SUFFIX: '.html',
+        HAS_SOURCE:  true
+      };
+    </script>
+    <script type="text/javascript" src="_static/jquery.js"></script>
+    <script type="text/javascript" src="_static/underscore.js"></script>
+    <script type="text/javascript" src="_static/doctools.js"></script>
+    <script type="text/javascript" src="http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
+    <link rel="top" title="The Harvard Dataverse Network 3.6.1 documentation" href="index.html" />
+    <link rel="prev" title="DVN Developers Guide" href="dataverse-developer-main.html" /> 
+  </head>
+  <body>
+    <div class="header-wrapper">
+      <div class="header">
+        <div class="headertitle"><a
+          href="index.html">The Harvard Dataverse Network 3.6.1 documentation</a></div>
+        <div class="rel">
+          <a href="dataverse-developer-main.html" title="DVN Developers Guide"
+             accesskey="P">previous</a> |
+          <a href="genindex.html" title="General Index"
+             accesskey="I">index</a>
+        </div>
+       </div>
+    </div>
+
+    <div class="content-wrapper">
+      <div class="content">
+        <div class="document">
+            
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body">
+            
+  <div class="section" id="apis-guide">
+<h1>APIs Guide<a class="headerlink" href="#apis-guide" title="Permalink to this headline">¶</a></h1>
+<p id="api"><strong>Introduction</strong></p>
+<p>We strongly encourage anyone interested in building tools to
+interoperate with the Dataverse Network to utilize our open source
+APIs. Please visit our <a class="reference external" href="http://thedata.org/book/apps">website</a>  for
+examples of external apps that have been built to work with our APIs.</p>
+<div class="section" id="data-sharing-api">
+<span id="id1"></span><h2>Data Sharing API<a class="headerlink" href="#data-sharing-api" title="Permalink to this headline">¶</a></h2>
+<p>As of version 3.0, a new API for programmatic access to the DVN data and
+metadata has been added. The API allows a remote, non-DVN
+archive/application to search the holdings and download files from a
+Dataverse Network.</p>
+<p>The Data Sharing API documentation is available below:</p>
+<div class="section" id="api-urls">
+<h3>API URLs<a class="headerlink" href="#api-urls" title="Permalink to this headline">¶</a></h3>
+<p>The URLs for the Data Sharing API resources are of the form:</p>
+<p><tt class="docutils literal"><span class="pre">/dvn/api/{/arg}{?{{arg}&amp;...}}</span></tt></p>
+<p>Generally, mandatory arguments are embedded in the URL and optional
+arguments are supplied as query parameters, in the <tt class="docutils literal"><span class="pre">?param=...</span></tt> notation.
+See the documentation for the individual resources below for details.</p>
+<p>The API supports basic HTTP Authentication. So that the access
+credentials are not transmitted in the clear, the API verbs (methods)
+below are <strong>only accessible over HTTPS</strong>.</p>
+</div>
+<div class="section" id="metadata-api">
+<h3>Metadata API<a class="headerlink" href="#metadata-api" title="Permalink to this headline">¶</a></h3>
+<p>The API for accessing Dataverse Network metadata is implemented in 4 verbs
+(resources):</p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">metadataSearchFields</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">metadataSearch</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">metadataFormatsAvailable</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">metadata</span></tt></div>
+</div>
+<div class="section" id="metadatasearchfields">
+<h4>metadataSearchFields<a class="headerlink" href="#metadatasearchfields" title="Permalink to this headline">¶</a></h4>
+<p><strong>Arguments:</strong></p>
+<p><tt class="docutils literal"><span class="pre">none</span></tt></p>
+<p><strong>URL example:</strong></p>
+<p><tt class="docutils literal"><span class="pre">/dvn/api/metadataSearchFields/</span></tt></p>
+<p><strong>Output:</strong></p>
+<p>XML record in the format below:</p>
+<div class="highlight-guess"><div class="highlight"><pre><span class="nt">&lt;MetadataSearchFields&gt;</span>
+<span class="nt">&lt;SearchableField&gt;</span>
+<span class="nt">&lt;fieldName&gt;</span>title<span class="nt">&lt;/fieldName&gt;</span>
+<span class="nt">&lt;fieldDescription&gt;</span>title<span class="nt">&lt;/fieldDescription&gt;</span>
+<span class="nt">&lt;/SearchableField&gt;</span>
+<span class="nt">&lt;SearchableField&gt;</span>
+<span class="nt">&lt;fieldName&gt;</span>authorName<span class="nt">&lt;/fieldName&gt;</span>
+<span class="nt">&lt;fieldDescription&gt;</span>authorName<span class="nt">&lt;/fieldDescription&gt;</span>
+<span class="nt">&lt;/SearchableField&gt;</span>
+<span class="nt">&lt;SearchableField&gt;</span>
+<span class="nt">&lt;fieldName&gt;</span>otherId<span class="nt">&lt;/fieldName&gt;</span>
+<span class="nt">&lt;fieldDescription&gt;</span>otherId<span class="nt">&lt;/fieldDescription&gt;</span>
+<span class="nt">&lt;/SearchableField&gt;</span>
+...
+<span class="nt">&lt;/MetadataSearchableFields&gt;</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="metadatasearch">
+<h4>metadataSearch<a class="headerlink" href="#metadatasearch" title="Permalink to this headline">¶</a></h4>
+<p><strong>Arguments:</strong></p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">queryString:</span> <span class="pre">mandatory,</span> <span class="pre">embedded.</span></tt></div>
+<div class="line"><em>Standard Lucene-style search queries are supported; (same query format currently used to define OAI sets, etc.)</em></div>
+</div>
+<p><strong>URLs examples:</strong></p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">/dvn/api/metadataSearch/title:test</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">/dvn/api/metadataSearch/title:test</span> <span class="pre">AND</span> <span class="pre">authorName:leonid</span></tt></div>
+</div>
+<p><strong>Output:</strong></p>
+<p>XML record in the format below:</p>
+<div class="highlight-guess"><div class="highlight"><pre><span class="nt">&lt;MetadataSearchResults&gt;</span>
+<span class="nt">&lt;searchQuery&gt;</span>title:test<span class="nt">&lt;/searchQuery&gt;</span>
+<span class="nt">&lt;searchHits&gt;</span>
+<span class="nt">&lt;study</span> <span class="na">ID=</span><span class="s">&quot;hdl:TEST/10007&quot;</span><span class="nt">/&gt;</span>
+...
+<span class="nt">&lt;/searchHits&gt;</span>
+<span class="nt">&lt;/MetadataSearchResults&gt;</span>
+</pre></div>
+</div>
+<p><strong>Error Conditions:</strong></p>
+<p>Note that when the query does not produce any results, the resource returns an XML record
+with an empty <tt class="docutils literal"><span class="pre">&lt;searchHits&gt;</span></tt> list, NOT a 404.</p>
+</div>
+<div class="section" id="metadataformatsavailable">
+<h4>metadataFormatsAvailable<a class="headerlink" href="#metadataformatsavailable" title="Permalink to this headline">¶</a></h4>
+<p><strong>Arguments:</strong></p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">objectId:</span> <span class="pre">mandatory,</span> <span class="pre">embedded.</span></tt></div>
+<div class="line"><em>Both global and local (database) IDs are supported.</em></div>
+</div>
+<p><strong>URLs examples:</strong></p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">/dvn/api/metadataFormatsAvailable/hdl:1902.1/6635</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">/dvn/api/metadataFormatsAvailable/9956</span></tt></div>
+</div>
+<p><strong>Output:</strong></p>
+<p>XML record in the format below:</p>
+<div class="highlight-guess"><div class="highlight"><pre><span class="nt">&lt;MetadataFormatsAvailable</span> <span class="na">studyId=</span><span class="s">&quot;hdl:TEST/10007&quot;</span><span class="nt">&gt;</span>
+<span class="nt">&lt;formatAvailable</span> <span class="na">selectSupported=</span><span class="s">&quot;true&quot;</span> <span class="na">excludeSupported=</span><span class="s">&quot;true&quot;</span><span class="nt">&gt;</span>
+<span class="nt">&lt;formatName&gt;</span>ddi<span class="nt">&lt;/formatName&gt;</span>
+<span class="nt">&lt;formatSchema&gt;</span>http://www.icpsr.umich.edu/DDI/Version2-0.xsd<span class="nt">&lt;/formatSchema&gt;</span>
+<span class="nt">&lt;formatMime&gt;</span>application/xml<span class="nt">&lt;/formatMime&gt;</span>
+<span class="nt">&lt;/formatAvailable&gt;</span>
+<span class="nt">&lt;formatAvailable&gt;</span>
+<span class="nt">&lt;formatName&gt;</span>oai_dc<span class="nt">&lt;/formatName&gt;</span>
+<span class="nt">&lt;formatSchema&gt;</span>http://www.openarchives.org/OAI/2.0/oai_dc.xsd<span class="nt">&lt;/formatSchema&gt;</span>
+<span class="nt">&lt;formatMime&gt;</span>application/xml<span class="nt">&lt;/formatMime&gt;</span>
+<span class="nt">&lt;/formatAvailable&gt;</span>
+<span class="nt">&lt;/MetadataFormatsAvailable&gt;</span>
+</pre></div>
+</div>
+<p>(<strong>Note</strong> the <tt class="docutils literal"><span class="pre">selectSupported</span></tt> and <tt class="docutils literal"><span class="pre">excludeSupported</span></tt> attributes above!)</p>
+<p><strong>Error Conditions:</strong></p>
+<p><tt class="docutils literal"><span class="pre">404</span> <span class="pre">NOT</span> <span class="pre">FOUND</span></tt> if study does not exist</p>
+</div>
+<div class="section" id="metadata">
+<h4>metadata<a class="headerlink" href="#metadata" title="Permalink to this headline">¶</a></h4>
+<p><strong>Arguments:</strong></p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">objectId:</span> <span class="pre">mandatory,</span> <span class="pre">embedded.</span></tt></div>
+<div class="line"><em>Both global and local (database) IDs are supported.</em></div>
+</div>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">formatType:</span> <span class="pre">optional,</span> <span class="pre">query.</span></tt></div>
+<div class="line"><em>Defaults to DDI if not supplied.</em></div>
+</div>
+<p><strong>URLs examples:</strong></p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">/dvn/api/metadata/hdl:1902.1/6635</span> <span class="pre">/dvn/api/metadata/9956</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">/dvn/api/metadata/hdl:1902.1/6635?formatType=ddi</span></tt></div>
+</div>
+<p><strong>Output:</strong></p>
+<p>Metadata record in the format requested, if available. No extra
+headers, etc.</p>
+<p><strong>Partial selection of metadata sections:</strong></p>
+<p>When requesting partial records is supported (see
+<tt class="docutils literal"><span class="pre">metadataFormatsAvailable</span></tt>, above for more info), these additional parameters can be supplied:</p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">partialExclude:</span> <span class="pre">optional,</span> <span class="pre">query.</span></tt></div>
+<div class="line"><em>Xpath query representing metadata section to drop, where supported.</em></div>
+</div>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">partialInclude:</span> <span class="pre">optional,</span> <span class="pre">query.</span></tt></div>
+<div class="line"><em>Xpath query representing metadata section to include, where supported.</em></div>
+</div>
+<p><strong>Examples:</strong></p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">/dvn/api/metadata/hdl:1902.1/6635?formatType=ddi&amp;partialExclude=codeBook/dataDscr</span></tt></div>
+<div class="line">will produce a DDI without the dataDscr section.</div>
+<div class="line"><em>[I’m expecting this to be the single most useful and common real-life application of thisfeature - L.A.]</em></div>
+</div>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">/dvn/api/metadata/hdl:1902.1/6635?formatType=ddi&amp;partialInclude=codeBook/stdyDscr</span></tt></div>
+<div class="line">will produce a DDI with the stdyDscr section only.</div>
+</div>
+<p>(<strong>Note</strong>: for now, only simple top-level Xpath queries like the above are supported).</p>
+<p>One other limitation of the current implementation: it does not validate the supplied <tt class="docutils literal"><span class="pre">partialExclude</span></tt> and <tt class="docutils literal"><span class="pre">partialInclude</span></tt> arguments; no error messages/diagnostics will be given if the Xpath queries are not part of the metadata schema. For example, if you request partialInclude=foobar, it will quietly produce an empty DDI, and <tt class="docutils literal"><span class="pre">partialExclude=foobar</span></tt> will not exclude anything (and you will get a complete DDI).</p>
+<p><strong>Error Conditions:</strong></p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">404</span> <span class="pre">NOT</span> <span class="pre">FOUND</span></tt></div>
+<div class="line">if study does not exist</div>
+</div>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">503</span> <span class="pre">SERVICE</span> <span class="pre">UNAVAILABLE</span></tt></div>
+<div class="line">if study exists, but the format requested is not available;</div>
+<div class="line">also, when partial exclude or include is requested, if it’s not supported by the service (see the documenation for metadataFormatsAvailable above).</div>
+</div>
+<p><strong>Notes:</strong></p>
+<p>A real-life workflow scenario may go as follows:</p>
+<ol class="loweralpha simple">
+<li>Find the searchable index fields on this DVN (meatadataSearchFields)</li>
+<li>Run a search (metadataSearch)</li>
+<li>For [select] studies returned, find what metadata formats are available (metadataFormatsAvailable)</li>
+<li>Retrieve the metadata in the desired format (metadata)</li>
+</ol>
+</div>
+</div>
+<div class="section" id="file-access-api">
+<h3>File Access API<a class="headerlink" href="#file-access-api" title="Permalink to this headline">¶</a></h3>
+<p>The Dataverse Network API for downloading digital objects (files) is implemented in 2
+verbs (resources):</p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">downloadInfo</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">download</span></tt></div>
+</div>
+<div class="section" id="downloadinfo">
+<h4>downloadInfo<a class="headerlink" href="#downloadinfo" title="Permalink to this headline">¶</a></h4>
+<p><strong>Arguments:</strong></p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">objectId:</span> <span class="pre">mandatory,</span> <span class="pre">embedded.</span></tt></div>
+<div class="line">Database ID of the Dataverse Network Study File.</div>
+</div>
+<p><strong>URLs example:</strong></p>
+<p><tt class="docutils literal"><span class="pre">/dvn/api/downloadInfo/9956</span></tt></p>
+<p><strong>Output:</strong></p>
+<p>XML record in the format below:</p>
+<p><em>(Note: the record below is only an example; we will provide full schema/documentation of theFileDownloadInfo record format below)</em></p>
+<div class="highlight-guess"><div class="highlight"><pre><span class="nt">&lt;FileDownloadInfo&gt;</span>
+<span class="nt">&lt;studyFile</span> <span class="na">fileId=</span><span class="s">&quot;9956&quot;</span><span class="nt">&gt;</span>
+
+<span class="nt">&lt;fileName&gt;</span>prettypicture.jpg<span class="nt">&lt;/fileName&gt;</span>
+<span class="nt">&lt;fileMimeType&gt;</span>image/jpeg<span class="nt">&lt;/fileMimeType&gt;</span>
+<span class="nt">&lt;fileSize&gt;</span>52825<span class="nt">&lt;/fileSize&gt;</span>
+
+<span class="nt">&lt;Authentication&gt;</span>
+        <span class="nt">&lt;authUser&gt;</span>testUser<span class="nt">&lt;/authUser&gt;</span>
+        <span class="nt">&lt;authMethod&gt;</span>password<span class="nt">&lt;/authMethod&gt;</span>
+<span class="nt">&lt;/Authentication&gt;</span>
+
+<span class="nt">&lt;Authorization</span> <span class="na">directAccess=</span><span class="s">&quot;true&quot;</span><span class="nt">/&gt;</span>
+
+<span class="nt">&lt;accessPermissions</span> <span class="na">accessGranted=</span><span class="s">&quot;true&quot;</span><span class="nt">&gt;</span>Authorized Access only<span class="nt">&lt;/accessPermissions&gt;</span>
+
+<span class="nt">&lt;accessRestrictions</span> <span class="na">accessGranted=</span><span class="s">&quot;true&quot;</span><span class="nt">&gt;</span>Terms of Use<span class="nt">&lt;/accessRestrictions&gt;</span>
+
+<span class="nt">&lt;accessServicesSupported&gt;</span>
+
+        <span class="nt">&lt;accessService&gt;</span>
+                <span class="nt">&lt;serviceName&gt;</span>thumbnail<span class="nt">&lt;/serviceName&gt;</span>
+                <span class="nt">&lt;serviceArgs&gt;</span>imageThumb=true<span class="nt">&lt;/serviceArgs&gt;</span>
+                <span class="nt">&lt;contentType&gt;</span>image/png<span class="nt">&lt;/contentType&gt;</span>
+                <span class="nt">&lt;serviceDesc&gt;</span>Image Thumbnail<span class="nt">&lt;/serviceDesc&gt;</span>
+        <span class="nt">&lt;/accessService&gt;</span>
+
+<span class="nt">&lt;/accessServicesSupported&gt;</span>
+<span class="nt">&lt;/studyFile&gt;</span>
+<span class="nt">&lt;/FileDownloadInfo&gt;</span>
+</pre></div>
+</div>
+<p><strong>Error Conditions:</strong></p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">404</span> <span class="pre">NOT</span> <span class="pre">FOUND</span></tt></div>
+<div class="line">Study file does not exist.</div>
+</div>
+</div>
+<div class="section" id="download">
+<h4>download<a class="headerlink" href="#download" title="Permalink to this headline">¶</a></h4>
+<p><strong>Arguments:</strong></p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">objectId:</span> <span class="pre">mandatory,</span> <span class="pre">embedded.</span></tt></div>
+<div class="line">Database ID of the DVN Study File.</div>
+</div>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">Optional</span> <span class="pre">Query</span> <span class="pre">args:</span></tt></div>
+<div class="line">As specified in the output of downloadInfo, above.</div>
+</div>
+<p><strong>URLs examples:</strong></p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">/dvn/api/download/9956</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">/dvn/api/download/9956?imageThumb=true</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">/dvn/api/download/9957?fileFormat=stata</span></tt></div>
+</div>
+<p><strong>Output:</strong></p>
+<p>Byte Stream (with proper HTTP headers specifying the content
+type, file name and such)</p>
+<p><strong>Error Conditions:</strong></p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">404</span> <span class="pre">NOT</span> <span class="pre">FOUND</span></tt></div>
+<div class="line">Study file does not exist.</div>
+</div>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">401</span> <span class="pre">AUTHORIZATION</span> <span class="pre">REQUIRED</span></tt></div>
+<div class="line">Access to restricted object attempted without HTTP Authorization header supplied.</div>
+</div>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">403</span> <span class="pre">PERMISSION</span> <span class="pre">DENIED</span> <span class="pre">HTTP</span></tt></div>
+<div class="line">Authorization header supplied, but the authenticated user is not</div>
+<div class="line">authorized to directly access the object protected by Access</div>
+<div class="line">Permissions and/or Access Restrictions (“Terms of Use”).</div>
+</div>
+</div>
+</div>
+</div>
+<div class="section" id="data-deposit-api">
+<span id="id2"></span><h2>Data Deposit API<a class="headerlink" href="#data-deposit-api" title="Permalink to this headline">¶</a></h2>
+<p>As of version 3.6, a new API for programmatic deposit of data and metadata to the Dataverse Network has been added. The API allows a remote, non-Dataverse Network archive/application to deposit files and metadata to a Dataverse Network installation.</p>
+<div class="section" id="overview-of-data-deposit-api">
+<h3>Overview of Data Deposit API<a class="headerlink" href="#overview-of-data-deposit-api" title="Permalink to this headline">¶</a></h3>
+<p>&#8220;v1&#8221; of the DVN Data Deposit API is a partial implementation of the SWORDv2 protocol, the specification for which available at <a class="reference external" href="http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html">http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html</a></p>
+<p>Please reference the SWORDv2 specification for expected HTTP status codes (i.e. 201, 204, 404, etc.), headers (i.e. &#8220;Location&#8221;), etc.</p>
+<div class="section" id="data-deposit-api-v1-curl-examples">
+<h4>Data Deposit API v1 <cite>curl</cite> examples<a class="headerlink" href="#data-deposit-api-v1-curl-examples" title="Permalink to this headline">¶</a></h4>
+<p>The following <cite>curl</cite> commands demonstrate supported operations:</p>
+<div class="section" id="retrieve-sword-service-document">
+<h5>Retrieve SWORD service document<a class="headerlink" href="#retrieve-sword-service-document" title="Permalink to this headline">¶</a></h5>
+<p>The service document enumerates the dataverses (&#8220;collections&#8221; from a SWORD perspective) the user can deposit data into. The &#8220;collectionPolicy&#8221; element for each dataverse contains the deposit terms of use for the network and dataverse.</p>
+<p><tt class="docutils literal"><span class="pre">curl</span> <span class="pre">https://$USERNAME:$PASSWORD&#64;$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/service-document</span></tt></p>
+</div>
+<div class="section" id="create-a-study-with-an-atom-entry-xml-file">
+<h5>Create a study with an Atom entry (XML file)<a class="headerlink" href="#create-a-study-with-an-atom-entry-xml-file" title="Permalink to this headline">¶</a></h5>
+<p><tt class="docutils literal"><span class="pre">curl</span> <span class="pre">--data-binary</span> <span class="pre">&quot;&#64;atom-entry-study.xml&quot;</span> <span class="pre">-H</span> <span class="pre">&quot;Content-Type:</span> <span class="pre">application/atom+xml&quot;</span> <span class="pre">https://$USERNAME:$PASSWORD&#64;$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS</span></tt></p>
+<div class="highlight-guess"><div class="highlight"><pre><span class="cp">&lt;?xml version=&quot;1.0&quot;?&gt;</span>
+<span class="c">&lt;!--</span>
+<span class="c">modified from http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html#protocoloperations_editingcontent_metadata</span>
+<span class="c">--&gt;</span>
+<span class="nt">&lt;entry</span> <span class="na">xmlns=</span><span class="s">&quot;http://www.w3.org/2005/Atom&quot;</span>
+       <span class="na">xmlns:dcterms=</span><span class="s">&quot;http://purl.org/dc/terms/&quot;</span><span class="nt">&gt;</span>
+   <span class="c">&lt;!-- some embedded metadata --&gt;</span>
+   <span class="nt">&lt;dcterms:title&gt;</span>Roasting at Home<span class="nt">&lt;/dcterms:title&gt;</span>
+   <span class="nt">&lt;dcterms:creator&gt;</span>Peets, John<span class="nt">&lt;/dcterms:creator&gt;</span>
+   <span class="nt">&lt;dcterms:creator&gt;</span>Stumptown, Jane<span class="nt">&lt;/dcterms:creator&gt;</span>
+   <span class="c">&lt;!-- Producer with financial or admin responsibility of the data --&gt;</span>
+   <span class="nt">&lt;dcterms:publisher&gt;</span>Coffee Bean State University<span class="nt">&lt;/dcterms:publisher&gt;</span>
+   <span class="c">&lt;!-- related publications --&gt;</span>
+   <span class="nt">&lt;dcterms:isReferencedBy</span> <span class="na">holdingsURI=</span><span class="s">&quot;http://dx.doi.org/10.1038/dvn333&quot;</span> <span class="na">agency=</span><span class="s">&quot;DOI&quot;</span>
+       <span class="na">IDNo=</span><span class="s">&quot;10.1038/dvn333&quot;</span><span class="nt">&gt;</span>Peets, J., <span class="ni">&amp;amp;</span> Stumptown, J. (2013). Roasting at Home. New England Journal of Coffee, 3(1), 22-34.<span class="nt">&lt;/dcterms:isReferencedBy&gt;</span>
+   <span class="c">&lt;!-- production date --&gt;</span>
+   <span class="nt">&lt;dcterms:date&gt;</span>2013-07-11<span class="nt">&lt;/dcterms:date&gt;</span>
+   <span class="c">&lt;!-- Other Identifier for the data in this study (or potentially global id if unused) --&gt;</span>
+   <span class="c">&lt;!--</span>
+<span class="c">   &lt;dcterms:identifier&gt;hdl:1XXZY.1/XYXZ&lt;/dcterms:identifier&gt;</span>
+<span class="c">   --&gt;</span>
+   <span class="nt">&lt;dcterms:description&gt;</span>Considerations before you start roasting your own coffee at home.<span class="nt">&lt;/dcterms:description&gt;</span>
+   <span class="c">&lt;!-- keywords --&gt;</span>
+   <span class="nt">&lt;dcterms:subject&gt;</span>coffee<span class="nt">&lt;/dcterms:subject&gt;</span>
+   <span class="nt">&lt;dcterms:subject&gt;</span>beverage<span class="nt">&lt;/dcterms:subject&gt;</span>
+   <span class="nt">&lt;dcterms:subject&gt;</span>caffeine<span class="nt">&lt;/dcterms:subject&gt;</span>
+   <span class="c">&lt;!-- geographic coverage --&gt;</span>
+   <span class="nt">&lt;dcterms:coverage&gt;</span>United States<span class="nt">&lt;/dcterms:coverage&gt;</span>
+   <span class="nt">&lt;dcterms:coverage&gt;</span>Canada<span class="nt">&lt;/dcterms:coverage&gt;</span>
+   <span class="c">&lt;!-- kind of data --&gt;</span>
+   <span class="nt">&lt;dcterms:type&gt;</span>aggregate data<span class="nt">&lt;/dcterms:type&gt;</span>
+   <span class="c">&lt;!-- List of sources of the data collection--&gt;</span>
+   <span class="nt">&lt;dcterms:source&gt;</span>Stumptown, Jane. 2011. Home Roasting. Coffeemill Press.<span class="nt">&lt;/dcterms:source&gt;</span>
+   <span class="c">&lt;!-- restrictions --&gt;</span>
+   <span class="nt">&lt;dcterms:rights&gt;</span>Creative Commons CC-BY 3.0 (unported) http://creativecommons.org/licenses/by/3.0/<span class="nt">&lt;/dcterms:rights&gt;</span>
+   <span class="c">&lt;!-- related materials --&gt;</span>
+   <span class="nt">&lt;dcterms:relation&gt;</span>Peets, John. 2010. Roasting Coffee at the Coffee Shop. Coffeemill Press<span class="nt">&lt;/dcterms:relation&gt;</span>
+<span class="nt">&lt;/entry&gt;</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="dublin-core-dc-qualified-mapping-ddi-dataverse-network-db-element-crosswalk">
+<h5>Dublin Core (DC) Qualified Mapping - DDI - Dataverse Network DB Element Crosswalk<a class="headerlink" href="#dublin-core-dc-qualified-mapping-ddi-dataverse-network-db-element-crosswalk" title="Permalink to this headline">¶</a></h5>
+<table border="1" class="docutils">
+<colgroup>
+<col width="12%" />
+<col width="19%" />
+<col width="13%" />
+<col width="57%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">DC (terms: namespace)</th>
+<th class="head">DVN DB Element</th>
+<th class="head">DDI Element 2.x</th>
+<th class="head">Note</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td>dcterms:title</td>
+<td>title</td>
+<td>2.1.1.1 title</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td>dcterms:creator</td>
+<td>author (LastName, FirstName)</td>
+<td>2.1.2.1 AuthEnty</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-even"><td>dcterms:subject</td>
+<td>keyword</td>
+<td>2.2.1.1. keyword</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td>dcterms:description</td>
+<td>abstract</td>
+<td>2.2.2 abstract</td>
+<td>Describing the purpose, scope or nature of the data collection...</td>
+</tr>
+<tr class="row-even"><td>dcterms:publisher</td>
+<td>producer</td>
+<td>2.1.3.1 producer</td>
+<td>person or agency financially or administratively responsible for the dataset</td>
+</tr>
+<tr class="row-odd"><td>dcterms:contributor</td>
+<td>n/a</td>
+<td>n/a</td>
+<td>see dcterms:creator above</td>
+</tr>
+<tr class="row-even"><td>dcterms:date</td>
+<td>productionDate (YYYY-MM-DD or YYYY-MM or YYYY)</td>
+<td>2.1.3.3 prodDate</td>
+<td>production or published date of dataset</td>
+</tr>
+<tr class="row-odd"><td>dcterms:type</td>
+<td>kindOfData</td>
+<td>2.2.3.10 dataKind</td>
+<td>Type of data included in the file: survey data, census/enumeration data, aggregate data, clinical</td>
+</tr>
+<tr class="row-even"><td>dcterms:format</td>
+<td>n/a</td>
+<td>n/a</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-odd"><td>dcterms:identifier</td>
+<td>otherID</td>
+<td>2.1.1.5 IDNo</td>
+<td>Don&#8217;t use this field to map a journal article ID. Only ID&#8217;s that directly belong to dataset</td>
+</tr>
+<tr class="row-even"><td>dcterms:source</td>
+<td>dataSources</td>
+<td>2.3.1.8.1 dataSrc</td>
+<td>List of books, articles, data files if any that served as the sources for the data collection</td>
+</tr>
+<tr class="row-odd"><td>dcterms:language</td>
+<td>n/a</td>
+<td>n/a</td>
+<td>&nbsp;</td>
+</tr>
+<tr class="row-even"><td>dcterms:relation</td>
+<td>relatedMaterial</td>
+<td>2.5.1 relMat</td>
+<td>any related material (journal article is not included here - see: dcterms:isReferencedBy below)</td>
+</tr>
+<tr class="row-odd"><td>dcterms:coverage</td>
+<td>geographicCoverage</td>
+<td>2.2.3.4 geogCover</td>
+<td>Info on the geographic coverage of the data</td>
+</tr>
+<tr class="row-even"><td>dcterms:rights</td>
+<td>restrictions</td>
+<td>2.4.2.3 restrctn</td>
+<td>any restrictions on the access or use of the dataset</td>
+</tr>
+<tr class="row-odd"><td>dcterms:bibliographicCitation</td>
+<td>dataCitation</td>
+<td>? (2.1.7 biblCit)</td>
+<td>data citation for the study in the Dataverse Network</td>
+</tr>
+<tr class="row-even"><td>dcterms:isReferencedBy</td>
+<td>studyRelPublications</td>
+<td>? (not set by DDI community yet)</td>
+<td>the publication (journal article, book, other work) that uses this dataset (include citation, permanent identifier (DOI), and permanent URL)</td>
+</tr>
+</tbody>
+</table>
+</div>
+<div class="section" id="add-files-to-a-study-with-a-zip-file">
+<h5>Add files to a study with a zip file<a class="headerlink" href="#add-files-to-a-study-with-a-zip-file" title="Permalink to this headline">¶</a></h5>
+<p><tt class="docutils literal"><span class="pre">curl</span> <span class="pre">--data-binary</span> <span class="pre">&#64;example.zip</span> <span class="pre">-H</span> <span class="pre">&quot;Content-Disposition:</span> <span class="pre">filename=example.zip&quot;</span> <span class="pre">-H</span> <span class="pre">&quot;Content-Type:</span> <span class="pre">application/zip&quot;</span> <span class="pre">-H</span> <span class="pre">&quot;Packaging:</span> <span class="pre">http://purl.org/net/sword/package/SimpleZip&quot;</span> <span class="pre">https://$USERNAME:$PASSWORD&#64;$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/study/hdl:TEST/12345</span></tt></p>
+</div>
+<div class="section" id="display-a-study-atom-entry">
+<h5>Display a study atom entry<a class="headerlink" href="#display-a-study-atom-entry" title="Permalink to this headline">¶</a></h5>
+<p>Contains data citation (bibliographicCitation), alternate URI [persistent URI of study], edit URI, edit media URI, statement URI.</p>
+<p><tt class="docutils literal"><span class="pre">curl</span> <span class="pre">https://$USERNAME:$PASSWORD&#64;$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/hdl:TEST/12345</span></tt></p>
+</div>
+<div class="section" id="display-a-study-statement">
+<h5>Display a study statement<a class="headerlink" href="#display-a-study-statement" title="Permalink to this headline">¶</a></h5>
+<p>Contains feed of file entries, latestVersionState, locked boolean</p>
+<p><tt class="docutils literal"><span class="pre">curl</span> <span class="pre">https://$USERNAME:$PASSWORD&#64;$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/statement/study/hdl:TEST/12345</span></tt></p>
+</div>
+<div class="section" id="delete-a-file-by-database-id">
+<h5>Delete a file by database id<a class="headerlink" href="#delete-a-file-by-database-id" title="Permalink to this headline">¶</a></h5>
+<p><tt class="docutils literal"><span class="pre">curl</span> <span class="pre">-i</span> <span class="pre">-X</span> <span class="pre">DELETE</span> <span class="pre">https://$USERNAME:$PASSWORD&#64;$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/file/2325541</span></tt></p>
+</div>
+<div class="section" id="replacing-cataloging-information-title-author-etc-for-a-study">
+<h5>Replacing cataloging information (title, author, etc.) for a study<a class="headerlink" href="#replacing-cataloging-information-title-author-etc-for-a-study" title="Permalink to this headline">¶</a></h5>
+<p>Please note that all cataloging information will be replaced, including fields that can not be expressed with &#8220;dcterms&#8221; fields.</p>
+<p><tt class="docutils literal"><span class="pre">curl</span> <span class="pre">--upload-file</span> <span class="pre">&quot;atom-entry-study2.xml&quot;</span> <span class="pre">-H</span> <span class="pre">&quot;Content-Type:</span> <span class="pre">application/atom+xml&quot;</span> <span class="pre">https://$USERNAME:$PASSWORD&#64;$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/hdl:TEST/12345</span></tt></p>
+<div class="highlight-guess"><div class="highlight"><pre><span class="cp">&lt;?xml version=&quot;1.0&quot;?&gt;</span>
+<span class="c">&lt;!--</span>
+<span class="c">for modifying a study created with atom-entry-study.xml</span>
+<span class="c">--&gt;</span>
+<span class="nt">&lt;entry</span> <span class="na">xmlns=</span><span class="s">&quot;http://www.w3.org/2005/Atom&quot;</span>
+       <span class="na">xmlns:dcterms=</span><span class="s">&quot;http://purl.org/dc/terms/&quot;</span><span class="nt">&gt;</span>
+   <span class="c">&lt;!-- some embedded metadata --&gt;</span>
+   <span class="nt">&lt;dcterms:title&gt;</span>The Levels of Caffeine in Cold Brew Coffee<span class="nt">&lt;/dcterms:title&gt;</span>
+   <span class="nt">&lt;dcterms:creator&gt;</span>Peets, John L.<span class="nt">&lt;/dcterms:creator&gt;</span>
+   <span class="nt">&lt;dcterms:creator&gt;</span>Stumptown Research Institute<span class="nt">&lt;/dcterms:creator&gt;</span>
+   <span class="nt">&lt;dcterms:isReferencedBy</span> <span class="na">holdingsURI=</span><span class="s">&quot;http://dx.doi.org/10.1038/dvn333&quot;</span> <span class="na">agency=</span><span class="s">&quot;DOI&quot;</span>
+       <span class="na">IDNo=</span><span class="s">&quot;10.1038/dvn333&quot;</span><span class="nt">&gt;</span>Peets, J., <span class="ni">&amp;amp;</span> Stumptown, J. (2013). Roasting at Home. New England Journal of Coffee, 3(1), 22-34.<span class="nt">&lt;/dcterms:isReferencedBy&gt;</span>
+   <span class="nt">&lt;dcterms:date&gt;</span>2013-08-11<span class="nt">&lt;/dcterms:date&gt;</span>
+   <span class="nt">&lt;dcterms:description&gt;</span>This study evaluates the caffeine levels of a cold brewed coffee.<span class="nt">&lt;/dcterms:description&gt;</span>
+   <span class="nt">&lt;dcterms:subject&gt;</span>coffee bean<span class="nt">&lt;/dcterms:subject&gt;</span>
+   <span class="nt">&lt;dcterms:subject&gt;</span>caffeine<span class="nt">&lt;/dcterms:subject&gt;</span>
+   <span class="nt">&lt;dcterms:subject&gt;</span>cold brew process<span class="nt">&lt;/dcterms:subject&gt;</span>
+   <span class="nt">&lt;dcterms:subject&gt;</span>Stumptown Coffee Company<span class="nt">&lt;/dcterms:subject&gt;</span>
+   <span class="nt">&lt;dcterms:rights&gt;</span>Creative Commons CC-BY 3.0 (unported) http://creativecommons.org/licenses/by/3.0/<span class="nt">&lt;/dcterms:rights&gt;</span>
+<span class="nt">&lt;/entry&gt;</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="list-studies-in-a-dataverse">
+<h5>List studies in a dataverse<a class="headerlink" href="#list-studies-in-a-dataverse" title="Permalink to this headline">¶</a></h5>
+<p><tt class="docutils literal"><span class="pre">curl</span> <span class="pre">https://$USERNAME:$PASSWORD&#64;$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS</span></tt></p>
+</div>
+<div class="section" id="delete-a-study-non-released-studies-only">
+<h5>Delete a study (non-released studies only)<a class="headerlink" href="#delete-a-study-non-released-studies-only" title="Permalink to this headline">¶</a></h5>
+<p><tt class="docutils literal"><span class="pre">curl</span> <span class="pre">-i</span> <span class="pre">-X</span> <span class="pre">DELETE</span> <span class="pre">https://$USERNAME:$PASSWORD&#64;$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/hdl:TEST/12345</span></tt></p>
+</div>
+<div class="section" id="deaccession-a-study-released-studies-only">
+<h5>Deaccession a study (released studies only)<a class="headerlink" href="#deaccession-a-study-released-studies-only" title="Permalink to this headline">¶</a></h5>
+<p><tt class="docutils literal"><span class="pre">curl</span> <span class="pre">-i</span> <span class="pre">-X</span> <span class="pre">DELETE</span> <span class="pre">https://$USERNAME:$PASSWORD&#64;$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/hdl:TEST/12345</span></tt></p>
+</div>
+<div class="section" id="release-a-study">
+<h5>Release a study<a class="headerlink" href="#release-a-study" title="Permalink to this headline">¶</a></h5>
+<p><tt class="docutils literal"><span class="pre">curl</span> <span class="pre">-X</span> <span class="pre">POST</span> <span class="pre">-H</span> <span class="pre">&quot;In-Progress:</span> <span class="pre">false&quot;</span> <span class="pre">--upload-file</span> <span class="pre">zero-length-file.txt</span> <span class="pre">https://$USERNAME:$PASSWORD&#64;$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/hdl:TEST/12345</span></tt></p>
+</div>
+<div class="section" id="determine-if-a-dataverse-has-been-released">
+<h5>Determine if a dataverse has been released<a class="headerlink" href="#determine-if-a-dataverse-has-been-released" title="Permalink to this headline">¶</a></h5>
+<p>Look for a <cite>dataverseHasBeenReleased</cite> boolean.</p>
+<p><tt class="docutils literal"><span class="pre">curl</span> <span class="pre">https://$USERNAME:$PASSWORD&#64;$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS</span></tt></p>
+</div>
+</div>
+<div class="section" id="curl-reference">
+<h4><cite>curl</cite> reference<a class="headerlink" href="#curl-reference" title="Permalink to this headline">¶</a></h4>
+<p>Per <a class="reference external" href="http://curl.haxx.se/docs/manpage.html">http://curl.haxx.se/docs/manpage.html</a></p>
+<ul class="simple">
+<li><cite>&#8211;upload-file</cite> is an HTTP <cite>PUT</cite></li>
+<li><cite>&#8211;data-binary</cite> is an HTTP <cite>POST</cite></li>
+</ul>
+</div>
+</div>
+<div class="section" id="dvn-data-deposit-api-v1-client-sample-code-python">
+<h3>DVN Data Deposit API v1 client sample code (Python)<a class="headerlink" href="#dvn-data-deposit-api-v1-client-sample-code-python" title="Permalink to this headline">¶</a></h3>
+<p><a class="reference external" href="https://github.com/dvn/swordpoc/tree/master/dvn_client">https://github.com/dvn/swordpoc/tree/master/dvn_client</a> contains sample Python code for writing a DVN Data Deposit API v1 client. It makes use of a Python client library which conforms to the SWORDv2 specification: <a class="reference external" href="https://github.com/swordapp/python-client-sword2">https://github.com/swordapp/python-client-sword2</a></p>
+</div>
+<div class="section" id="swordv2-client-libraries">
+<h3>SWORDv2 client libraries<a class="headerlink" href="#swordv2-client-libraries" title="Permalink to this headline">¶</a></h3>
+<ul class="simple">
+<li>Python: <a class="reference external" href="https://github.com/swordapp/python-client-sword2">https://github.com/swordapp/python-client-sword2</a></li>
+<li>Java: <a class="reference external" href="https://github.com/swordapp/JavaClient2.0">https://github.com/swordapp/JavaClient2.0</a></li>
+<li>Ruby: <a class="reference external" href="https://github.com/swordapp/sword2ruby">https://github.com/swordapp/sword2ruby</a></li>
+<li>PHP: <a class="reference external" href="https://github.com/swordapp/swordappv2-php-library">https://github.com/swordapp/swordappv2-php-library</a></li>
+</ul>
+</div>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+        </div>
+        <div class="sidebar">
+          <h3>Table Of Contents</h3>
+          <ul class="current">
+<li class="toctree-l1"><a class="reference internal" href="dataverse-user-main.html">User Guide</a></li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-installer-main.html">Installers Guide</a></li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-developer-main.html">DVN Developers Guide</a></li>
+<li class="toctree-l1 current"><a class="current reference internal" href="">APIs Guide</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="#data-sharing-api">Data Sharing API</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="#api-urls">API URLs</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#metadata-api">Metadata API</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="#metadatasearchfields">metadataSearchFields</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#metadatasearch">metadataSearch</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#metadataformatsavailable">metadataFormatsAvailable</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#metadata">metadata</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="#file-access-api">File Access API</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="#downloadinfo">downloadInfo</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#download">download</a></li>
+</ul>
+</li>
+</ul>
+</li>
+<li class="toctree-l2"><a class="reference internal" href="#data-deposit-api">Data Deposit API</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="#overview-of-data-deposit-api">Overview of Data Deposit API</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="#data-deposit-api-v1-curl-examples">Data Deposit API v1 <cite>curl</cite> examples</a><ul>
+<li class="toctree-l5"><a class="reference internal" href="#retrieve-sword-service-document">Retrieve SWORD service document</a></li>
+<li class="toctree-l5"><a class="reference internal" href="#create-a-study-with-an-atom-entry-xml-file">Create a study with an Atom entry (XML file)</a></li>
+<li class="toctree-l5"><a class="reference internal" href="#dublin-core-dc-qualified-mapping-ddi-dataverse-network-db-element-crosswalk">Dublin Core (DC) Qualified Mapping - DDI - Dataverse Network DB Element Crosswalk</a></li>
+<li class="toctree-l5"><a class="reference internal" href="#add-files-to-a-study-with-a-zip-file">Add files to a study with a zip file</a></li>
+<li class="toctree-l5"><a class="reference internal" href="#display-a-study-atom-entry">Display a study atom entry</a></li>
+<li class="toctree-l5"><a class="reference internal" href="#display-a-study-statement">Display a study statement</a></li>
+<li class="toctree-l5"><a class="reference internal" href="#delete-a-file-by-database-id">Delete a file by database id</a></li>
+<li class="toctree-l5"><a class="reference internal" href="#replacing-cataloging-information-title-author-etc-for-a-study">Replacing cataloging information (title, author, etc.) for a study</a></li>
+<li class="toctree-l5"><a class="reference internal" href="#list-studies-in-a-dataverse">List studies in a dataverse</a></li>
+<li class="toctree-l5"><a class="reference internal" href="#delete-a-study-non-released-studies-only">Delete a study (non-released studies only)</a></li>
+<li class="toctree-l5"><a class="reference internal" href="#deaccession-a-study-released-studies-only">Deaccession a study (released studies only)</a></li>
+<li class="toctree-l5"><a class="reference internal" href="#release-a-study">Release a study</a></li>
+<li class="toctree-l5"><a class="reference internal" href="#determine-if-a-dataverse-has-been-released">Determine if a dataverse has been released</a></li>
+</ul>
+</li>
+<li class="toctree-l4"><a class="reference internal" href="#curl-reference"><cite>curl</cite> reference</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="#dvn-data-deposit-api-v1-client-sample-code-python">DVN Data Deposit API v1 client sample code (Python)</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#swordv2-client-libraries">SWORDv2 client libraries</a></li>
+</ul>
+</li>
+</ul>
+</li>
+</ul>
+
+          <h3 style="margin-top: 1.5em;">Search</h3>
+          <form class="search" action="search.html" method="get">
+            <input type="text" name="q" />
+            <input type="submit" value="Go" />
+            <input type="hidden" name="check_keywords" value="yes" />
+            <input type="hidden" name="area" value="default" />
+          </form>
+          <p class="searchtip" style="font-size: 90%">
+            Enter search terms.
+          </p>
+        </div>
+        <div class="clearer"></div>
+      </div>
+    </div>
+
+    <div class="footer-wrapper">
+      <div class="footer">
+        <div class="left">
+          <a href="dataverse-developer-main.html" title="DVN Developers Guide"
+             >previous</a> |
+          <a href="genindex.html" title="General Index"
+             >index</a>
+            <br/>
+            <a href="_sources/dataverse-api-main.txt"
+               rel="nofollow">Show Source</a>
+        </div>
+
+        <div class="right">
+          
+    <div class="footer">
+        &copy; Copyright 1997-2013, President &amp; Fellows Harvard University.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2b1.
+    </div>
+        </div>
+        <div class="clearer"></div>
+      </div>
+    </div>
+
+  </body>
+</html>
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/dataverse-developer-main.html	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,777 @@
+
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+  "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+    
+    <title>DVN Developers Guide &mdash; The Harvard Dataverse Network 3.6.1 documentation</title>
+    
+    <link rel="stylesheet" href="_static/agogo.css" type="text/css" />
+    <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+    
+    <script type="text/javascript">
+      var DOCUMENTATION_OPTIONS = {
+        URL_ROOT:    './',
+        VERSION:     '3.6.1',
+        COLLAPSE_INDEX: false,
+        FILE_SUFFIX: '.html',
+        HAS_SOURCE:  true
+      };
+    </script>
+    <script type="text/javascript" src="_static/jquery.js"></script>
+    <script type="text/javascript" src="_static/underscore.js"></script>
+    <script type="text/javascript" src="_static/doctools.js"></script>
+    <script type="text/javascript" src="http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
+    <link rel="top" title="The Harvard Dataverse Network 3.6.1 documentation" href="index.html" />
+    <link rel="next" title="APIs Guide" href="dataverse-api-main.html" />
+    <link rel="prev" title="Installers Guide" href="dataverse-installer-main.html" /> 
+  </head>
+  <body>
+    <div class="header-wrapper">
+      <div class="header">
+        <div class="headertitle"><a
+          href="index.html">The Harvard Dataverse Network 3.6.1 documentation</a></div>
+        <div class="rel">
+          <a href="dataverse-installer-main.html" title="Installers Guide"
+             accesskey="P">previous</a> |
+          <a href="dataverse-api-main.html" title="APIs Guide"
+             accesskey="N">next</a> |
+          <a href="genindex.html" title="General Index"
+             accesskey="I">index</a>
+        </div>
+       </div>
+    </div>
+
+    <div class="content-wrapper">
+      <div class="content">
+        <div class="document">
+            
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body">
+            
+  <div class="section" id="dvn-developers-guide">
+<h1>DVN Developers Guide<a class="headerlink" href="#dvn-developers-guide" title="Permalink to this headline">¶</a></h1>
+<p>Please note: This guide was updated in October 2013 to reflex the switch
+from Ant to Maven in DVN 3.6.1.</p>
+<div class="section" id="build-environment-configuring-netbeans">
+<h2>Build Environment (Configuring NetBeans)<a class="headerlink" href="#build-environment-configuring-netbeans" title="Permalink to this headline">¶</a></h2>
+<p>This chapter describes setting up the build environment that you will
+need to build the DVN application from source code.</p>
+<div class="section" id="install-netbeans-and-glassfish">
+<h3>Install NetBeans and GlassFish<a class="headerlink" href="#install-netbeans-and-glassfish" title="Permalink to this headline">¶</a></h3>
+<p>As of DVN version 3.6.1 and the switch to Maven, a DVN development
+environment should not have any dependency on a particular IDE, but use
+of NetBeans 7.2.1 is encouraged because it&#8217;s the version used by most of
+the current developers (on Mac OS X).</p>
+<p>The NetBeans project is currently offering an installer bundle that
+contains both NetBeans 7.2.1 and a supported version of GlassFish
+(3.1.2.2). If they choose to discontinue the bundle, you will have to
+download and install the two packages separately. Note that you can have
+multiple versions of both NetBeans and GlassFish on your system.</p>
+<p>Please note: While we intend to investigate NetBeans 7.4 and GlassFish
+4, these are not yet known to provide a suitable development
+environment.</p>
+<p>We strongly recommend that you run both installs <strong>as a regular user</strong>.&nbsp;There&#8217;s no reason to run your development environment as root.</p>
+<div class="section" id="install-netbeans-bundle">
+<h4>Install NetBeans bundle<a class="headerlink" href="#install-netbeans-bundle" title="Permalink to this headline">¶</a></h4>
+<p>Download NetBeans 7.2.1 Java EE + GlassFish Open Source Edition 3.1.2.2
+bundle from <a class="reference external" href="https://netbeans.org/downloads/7.2.1">https://netbeans.org/downloads/7.2.1</a></p>
+<p>For Mac OS X, you will download a .dmg disk image that will open
+automatically and start the installer for you. Choose the typical
+installation but be sure to install GlassFish and JUnit when prompted.</p>
+<p>Note that you don&#8217;t have to uninstall your existing NetBeans version.
+You can have as many versions installed as you need in parallel.</p>
+<p>When you start NetBeans 7.2.1 for the first time, you will be asked if
+you want to import the settings from the previous installations. If you
+have an existing, pre-DVN 3.* development environment on your system,
+<strong>answer &#8220;no&#8221; &#8211; we want to create the new configuration from scratch.</strong></p>
+</div>
+<div class="section" id="if-you-have-to-install-glassfish-3-1-2-2">
+<h4>[If you have to] Install GlassFish 3.1.2.2<a class="headerlink" href="#if-you-have-to-install-glassfish-3-1-2-2" title="Permalink to this headline">¶</a></h4>
+<p>We <strong>strongly</strong> recommend that you install GlassFish Server 3.1.2.2,
+Open Source Edition, <strong>Full Platform</strong>. If you have to install it
+separately from NetBeans, it can be obtained from
+<a class="reference external" href="http://glassfish.java.net/downloads/3.1.2.2-final.html">http://glassfish.java.net/downloads/3.1.2.2-final.html</a></p>
+<p>The page above contains a link to the installation instructions, but the
+process is very straightforward - just download and run the installer.</p>
+<p>It is strongly recommended that you use Sun/Oracle Java JDK version 1.6.
+Please make sure you have the newest (or at least, recent) build number
+available for your platform. (On Mac OS X 10.8, since the JDK can be
+installed as part of OS distribution, the version currently provided by
+Apple should be sufficient). In other words, we do not recommend
+building DVN under JDK 1.7 until the ticket regarding the move from Java
+6 to 7 has been closed: <a class="reference external" href="https://redmine.hmdc.harvard.edu/issues/3306">https://redmine.hmdc.harvard.edu/issues/3306</a></p>
+<p>Note that you don&#8217;t have to uninstall older versions of GlassFish you
+may still have around. It&#8217;s ok to have multiple versions installed. But
+make sure you have the 3.1.2.2 installation selected as the active
+server in NetBeans.</p>
+<p><strong>Important:</strong> During the installation, leave the admin password fields
+blank. This is not a security risk since out of the box, GlassFish
+3.1.2.2 will only be accepting admin connections on the localhost
+interface. Choosing a password at this stage, however, will complicate
+the installation process unnecessarily. Since this is a development
+system, you can probably keep this configuration unchanged (admin on
+localhost only). If you need to be able to connect to the admin console
+remotely, please see the note in the Appendix section of the main
+Installers Guide.</p>
+</div>
+<div class="section" id="install-junit-if-you-haven-t-already">
+<h4>Install JUnit (if you haven&#8217;t already)<a class="headerlink" href="#install-junit-if-you-haven-t-already" title="Permalink to this headline">¶</a></h4>
+<p>Depending on how you installed NetBeans, you might already have JUnit
+installed. JUnit can be installed from Tools -&gt; Plugins.</p>
+</div>
+</div>
+<div class="section" id="check-out-a-new-copy-of-the-dvn-source-tree">
+<h3>Check out a new copy of the DVN source tree<a class="headerlink" href="#check-out-a-new-copy-of-the-dvn-source-tree" title="Permalink to this headline">¶</a></h3>
+<div class="section" id="create-a-github-account-if-you-don-t-have-one-already">
+<h4>Create a GitHub account [if you don&#8217;t have one already]<a class="headerlink" href="#create-a-github-account-if-you-don-t-have-one-already" title="Permalink to this headline">¶</a></h4>
+<p>Sign up at <a class="reference external" href="https://github.com">https://github.com</a></p>
+<p>Please note that primary audience of this guide (for now) is people who
+have push access to <a class="reference external" href="https://github.com/IQSS/dvn">https://github.com/IQSS/dvn</a> . If you do not have
+push access and want to contribute (and we hope you do!) please fork the
+repo per <a class="reference external" href="https://help.github.com/articles/fork-a-repo">https://help.github.com/articles/fork-a-repo</a> and make
+adjustments below when cloning the repo.</p>
+</div>
+<div class="section" id="set-up-an-ssh-keypair-if-you-haven-t-already">
+<h4>Set up an ssh keypair (if you haven&#8217;t already)<a class="headerlink" href="#set-up-an-ssh-keypair-if-you-haven-t-already" title="Permalink to this headline">¶</a></h4>
+<p>You <em>can</em> use git with passwords over HTTPS but it&#8217;s much nicer to set
+up SSH keys.</p>
+<p><a class="reference external" href="https://github.com/settings/ssh">https://github.com/settings/ssh</a> is the place to manage the ssh keys
+GitHub knows about for you. That page also links to a nice howto:
+<a class="reference external" href="https://help.github.com/articles/generating-ssh-keys">https://help.github.com/articles/generating-ssh-keys</a></p>
+<p>From the terminal, <tt class="docutils literal"><span class="pre">ssh-keygen</span></tt> will create new ssh keys for you:</p>
+<ul class="simple">
+<li>private key: <tt class="docutils literal"><span class="pre">~/.ssh/id_rsa</span></tt><ul>
+<li>It is <strong>very important to protect your private key</strong>. If someone
+else acquires it, they can access private repositories on GitHub
+and make commits as you! Ideally, you&#8217;ll store your ssh keys on an
+encrypted volume and protect your private key with a password when
+prompted for one by <tt class="docutils literal"><span class="pre">ssh-keygen</span></tt>. See also &#8220;Why do passphrases
+matter&#8221; at <a class="reference external" href="https://help.github.com/articles/generating-ssh-keys">https://help.github.com/articles/generating-ssh-keys</a></li>
+</ul>
+</li>
+<li>public key: <tt class="docutils literal"><span class="pre">~/.ssh/id_rsa.pub</span></tt></li>
+</ul>
+<p>After you&#8217;ve created your ssh keys, add the public key to your GitHub
+account.</p>
+</div>
+<div class="section" id="clone-the-repo">
+<h4>Clone the repo<a class="headerlink" href="#clone-the-repo" title="Permalink to this headline">¶</a></h4>
+<p>Please see <a class="reference external" href="#branches">branches</a> for detail, but in short, the
+&#8220;develop&#8221; branch is where new commits go. Below we will assume you want
+to make commits to &#8220;develop&#8221;.</p>
+<p>In NetBeans, click Team, then Git, then Clone.</p>
+<div class="section" id="remote-repository">
+<h5>Remote Repository<a class="headerlink" href="#remote-repository" title="Permalink to this headline">¶</a></h5>
+<ul class="simple">
+<li>Repository URL: <tt class="docutils literal"><span class="pre">github.com:IQSS/dvn.git</span></tt></li>
+<li>Username: <tt class="docutils literal"><span class="pre">git</span></tt></li>
+<li>Private/Public Key<ul>
+<li>Private Key File: <tt class="docutils literal"><span class="pre">/Users/[YOUR_USERNAME]/.ssh/id_rsa</span></tt></li>
+</ul>
+</li>
+<li>Passphrase: (the passphrase you chose while running <tt class="docutils literal"><span class="pre">ssh-keygen</span></tt>)</li>
+</ul>
+<p>Click Next.</p>
+<p>If you are prompted about the authenticity of github.com&#8217;s RSA key fingerprint, answer &#8220;Yes&#8221; to continue connecting. GitHub&#8217;s RSA key fingerprint is listed at <a class="reference external" href="https://help.github.com/articles/generating-ssh-keys">https://help.github.com/articles/generating-ssh-keys</a></p>
+</div>
+<div class="section" id="remote-branches">
+<h5>Remote Branches<a class="headerlink" href="#remote-branches" title="Permalink to this headline">¶</a></h5>
+<p>Under Select Remote Branches check the &#8220;develop&#8221; branch.</p>
+<p>Please note: You may see other branches listed, such as &#8220;master&#8221;, but
+there is no need to check them out at this time.</p>
+<p>Click Next.</p>
+</div>
+<div class="section" id="destination-directory">
+<h5>Destination Directory<a class="headerlink" href="#destination-directory" title="Permalink to this headline">¶</a></h5>
+<p>The defaults should be fine:</p>
+<ul class="simple">
+<li>Parent Directory: <tt class="docutils literal"><span class="pre">/Users/[YOUR_USERNAME]/NetBeansProjects</span></tt></li>
+<li>Clone Name: <tt class="docutils literal"><span class="pre">dvn</span></tt></li>
+<li>Checkout Branch: <tt class="docutils literal"><span class="pre">develop*</span></tt></li>
+<li>Remote Name: <tt class="docutils literal"><span class="pre">origin</span></tt></li>
+</ul>
+<p>Click Finish.</p>
+<p>You should see a message that 3 projects were cloned. Click &#8220;Open
+Project&#8221;.</p>
+</div>
+</div>
+</div>
+<div class="section" id="open-projects">
+<h3>Open Projects<a class="headerlink" href="#open-projects" title="Permalink to this headline">¶</a></h3>
+<p>In the &#8220;Open Projects&#8221; dialog you should see three projects, DVN-lockss,
+DVN-root, and DVN-web (a child of DVN-root).</p>
+<p>Highlight DVN-root and check &#8220;Open Required&#8221; (to include DVN-web) and click &#8220;Open&#8221;.</p>
+<p>At this point, you should have two (and only two) projects open in
+NetBeans: DVN-root and DVN-web. If you hover over the projects, it&#8217;s
+normal at this point to see warnings such as &#8220;Some dependency artifacts
+are not in the local repository&#8221; or &#8220;Cannot find application server:
+GlassFish Server 3+&#8221;. We&#8217;ll correct these next.</p>
+</div>
+<div class="section" id="build-for-the-first-time">
+<h3>Build for the first time<a class="headerlink" href="#build-for-the-first-time" title="Permalink to this headline">¶</a></h3>
+<p>In NetBeans, right-click DVN-root and click &#8220;Build&#8221;. This will download
+many dependencies via Maven and may take several minutes.</p>
+<p>When this process has completed, right-click DVN-web and click &#8220;Build&#8221;.
+You should expect to see &#8220;BUILD SUCCESS&#8221;. This means you have
+successfully built the .war application package, but do not attempt to
+deploy the application just yet! We need to configure the server
+environment first, which consists of GlassFish and PostgreSQL</p>
+</div>
+</div>
+<div class="section" id="application-environment-configuring-glassfish-and-postgresql">
+<h2>Application Environment (Configuring GlassFish and PostgreSQL)<a class="headerlink" href="#application-environment-configuring-glassfish-and-postgresql" title="Permalink to this headline">¶</a></h2>
+<p>In this chapter, we describe the process of setting up your own local
+application environment into which you will deploy the DVN application.</p>
+<div class="section" id="install-postgresql-database-server">
+<h3>Install PostgreSQL database server<a class="headerlink" href="#install-postgresql-database-server" title="Permalink to this headline">¶</a></h3>
+<p>For Mac OS X (our default development OS), you can get the installer
+from <a class="reference external" href="http://www.postgresql.org/download/macosx">http://www.postgresql.org/download/macosx</a></p>
+<p>The installation is very straightforward; just make sure you answer
+&#8220;yes&#8221; when asked if Postgres should be accepting network connections.
+(The application will be accessing the database at the &#8220;localhost&#8221;
+address).</p>
+<p>Once installed, we recommend that you also allow connections
+over local Unix sockets. This way the installer won&#8217;t have to ask you
+for the Postgres password every time it needs to talk to the database.
+To do so, modify the &#8220;local all all&#8221; line in the data/pg_hba.conf file
+to look like this:</p>
+<div class="line-block">
+<div class="line">local all all trust</div>
+</div>
+<p><strong>Note</strong> that this only opens Postgres to the local socket connections,
+and should not be considered a security risk. But if you are extra
+cautious, you may use instead:</p>
+<div class="line-block">
+<div class="line">local all all ident sameuser</div>
+</div>
+<p>Restart Postgres for the changes to take effect!</p>
+<p>Please note: if you have any problems with the PostgreSQL setup, please
+ensure the right <tt class="docutils literal"><span class="pre">psql</span></tt> is in your <tt class="docutils literal"><span class="pre">$PATH</span></tt>.</p>
+<p>You can check the instructions in the main Installers Guide for more info:
+<a class="reference internal" href="dataverse-installer-main.html#postgresql"><em>PostgreSQL section</em></a>;
+but the above should be sufficient to get your environment set up.</p>
+</div>
+<div class="section" id="run-the-install-dev-script">
+<h3>Run the install-dev script<a class="headerlink" href="#run-the-install-dev-script" title="Permalink to this headline">¶</a></h3>
+<p>The installer is supplied with the DVN source in the tools directory.
+You must run it as root (for direct access to Postgres).</p>
+<div class="line-block">
+<div class="line">To run the script:</div>
+<div class="line"><tt class="docutils literal"><span class="pre">sudo</span> <span class="pre">su</span> <span class="pre">-</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">cd</span> <span class="pre">/Users/[YOUR_USERNAME]/NetBeansProjects/dvn/tools/installer/dvninstall</span></tt></div>
+</div>
+<div class="line-block">
+<div class="line">then execute</div>
+<div class="line"><tt class="docutils literal"><span class="pre">./install-dev</span></tt></div>
+</div>
+<p>When prompted for various settings, you will likely be able to accept
+all the default values (in a development environment, they are for the
+most part the same for everybody).</p>
+</div>
+<div class="section" id="testing-login">
+<h3>Testing login<a class="headerlink" href="#testing-login" title="Permalink to this headline">¶</a></h3>
+<p>Once the <tt class="docutils literal"><span class="pre">install-dev</span></tt> script has completed successfully, you will
+have a fully functional Dataverse Network server. After making sure
+GlassFish has been started per the output of the script, you should be
+able to log in DVN with these credentials:</p>
+<ul class="simple">
+<li><a class="reference external" href="http://localhost:8080/dvn/">http://localhost:8080/dvn/</a></li>
+<li>username: networkAdmin</li>
+<li>password: networkAdmin</li>
+</ul>
+<p>Please note that when deploying from NetBeans for the first time, you
+will be prompted to select a deployment server. From the drop down,
+select &#8220;GlassFish Server 3.1.2&#8221;, click &#8220;Remember in Current IDE Session&#8221;
+and click &#8220;OK&#8221;.</p>
+</div>
+</div>
+<div class="section" id="developing-with-git">
+<h2>Developing with Git<a class="headerlink" href="#developing-with-git" title="Permalink to this headline">¶</a></h2>
+<div class="section" id="commit">
+<span id="id1"></span><h3>Commit<a class="headerlink" href="#commit" title="Permalink to this headline">¶</a></h3>
+<p><strong>Committing Changes</strong></p>
+<p>By following the instructions in the <em class="xref std std-ref">build</em> step, you
+should be in the &#8220;develop&#8221; branch, which is where we want to make
+commits as we work toward the next release.</p>
+<p>You can verify which branch you are on by clicking Team then &#8220;Repository
+Browser&#8221;.</p>
+<p>You should see <tt class="docutils literal"><span class="pre">dvn</span> <span class="pre">[develop]</span></tt> at the root of the tree and <strong>develop</strong>
+in bold under Branches -&gt; Local</p>
+<p>Click Team, then &#8220;Show Changes&#8221;. Select the desired files and
+right-click to commit.</p>
+<p>To publish your changes on GitHub, you&#8217;ll need to follow the next step:
+<a class="reference internal" href="#push"><em>push</em></a>.</p>
+</div>
+<div class="section" id="push">
+<span id="id2"></span><h3>Push<a class="headerlink" href="#push" title="Permalink to this headline">¶</a></h3>
+<p><strong>Pushing your commits to GitHub</strong></p>
+<p>After making your <a class="reference internal" href="#commit"><em>commit</em></a>, push it to GitHub by clicking
+Team -&gt; Remote -&gt; Push, then Next (to use your configured remote
+repository), then checking <strong>develop</strong> and Finish.</p>
+<p>Your commit should now appear on GitHub in the develop branch:
+<a class="reference external" href="https://github.com/IQSS/dvn/commits/develop">https://github.com/IQSS/dvn/commits/develop</a></p>
+<p>Your commit should <strong>not</strong> appear in the master branch on GitHub:
+<a class="reference external" href="https://github.com/IQSS/dvn/commits/master">https://github.com/IQSS/dvn/commits/master</a> . Not yet anyway. We only
+merge commits into master when we are ready to release.  Please see the
+<a class="reference external" href="#branches">branches</a> section for for detail.</p>
+</div>
+<div class="section" id="release">
+<h3>Release<a class="headerlink" href="#release" title="Permalink to this headline">¶</a></h3>
+<div class="section" id="merge-develop-into-master">
+<h4>Merge develop into master<a class="headerlink" href="#merge-develop-into-master" title="Permalink to this headline">¶</a></h4>
+<div class="section" id="tag-the-release">
+<h5>Tag the release<a class="headerlink" href="#tag-the-release" title="Permalink to this headline">¶</a></h5>
+<p>Here is an example of how the 3.4 tag (
+<a class="reference external" href="https://github.com/IQSS/dvn/tree/3.4">https://github.com/IQSS/dvn/tree/3.4</a>) was created and pushed to GitHub:</p>
+<div class="highlight-guess"><div class="highlight"><pre><span class="nl">murphy:</span><span class="n">dvn</span> <span class="n">pdurbin</span><span class="err">$</span> <span class="n">git</span> <span class="n">branch</span>
+<span class="o">*</span> <span class="n">develop</span>
+  <span class="n">master</span>
+<span class="nl">murphy:</span><span class="n">dvn</span> <span class="n">pdurbin</span><span class="err">$</span> <span class="n">git</span> <span class="n">pull</span>
+<span class="n">Already</span> <span class="n">up</span><span class="o">-</span><span class="n">to</span><span class="o">-</span><span class="n">date</span><span class="p">.</span>
+<span class="nl">murphy:</span><span class="n">dvn</span> <span class="n">pdurbin</span><span class="err">$</span> <span class="n">git</span> <span class="n">checkout</span> <span class="n">master</span>
+<span class="n">Switched</span> <span class="n">to</span> <span class="n">branch</span> <span class="err">&#39;</span><span class="n">master</span><span class="err">&#39;</span>
+<span class="nl">murphy:</span><span class="n">dvn</span> <span class="n">pdurbin</span><span class="err">$</span> <span class="n">git</span> <span class="n">merge</span> <span class="n">develop</span>
+<span class="n">Updating</span> <span class="n">fdbfe57</span><span class="p">.</span><span class="mf">.6</span><span class="n">ceb24f</span>
+<span class="p">(</span><span class="n">snip</span><span class="p">)</span>
+ <span class="n">create</span> <span class="n">mode</span> <span class="mi">100644</span> <span class="n">tools</span><span class="o">/</span><span class="n">installer</span><span class="o">/</span><span class="n">dvninstall</span><span class="o">/</span><span class="n">readme</span><span class="p">.</span><span class="n">md</span>
+<span class="nl">murphy:</span><span class="n">dvn</span> <span class="n">pdurbin</span><span class="err">$</span> <span class="n">git</span> <span class="n">tag</span>
+<span class="mf">3.3</span>
+<span class="nl">murphy:</span><span class="n">dvn</span> <span class="n">pdurbin</span><span class="err">$</span> <span class="n">git</span> <span class="n">tag</span> <span class="o">-</span><span class="n">a</span> <span class="mf">3.4</span> <span class="o">-</span><span class="n">m</span> <span class="err">&#39;</span><span class="n">merged</span> <span class="n">develop</span><span class="p">,</span> <span class="n">tagging</span> <span class="n">master</span> <span class="n">as</span> <span class="mf">3.4</span><span class="err">&#39;</span>
+<span class="nl">murphy:</span><span class="n">dvn</span> <span class="n">pdurbin</span><span class="err">$</span> <span class="n">git</span> <span class="n">tag</span>
+<span class="mf">3.3</span>
+<span class="mf">3.4</span>
+<span class="nl">murphy:</span><span class="n">dvn</span> <span class="n">pdurbin</span><span class="err">$</span> <span class="n">git</span> <span class="n">push</span> <span class="n">origin</span> <span class="mf">3.4</span>
+<span class="n">Counting</span> <span class="n">objects</span><span class="o">:</span> <span class="mi">1</span><span class="p">,</span> <span class="n">done</span><span class="p">.</span>
+<span class="n">Writing</span> <span class="n">objects</span><span class="o">:</span> <span class="mi">100</span><span class="o">%</span> <span class="p">(</span><span class="mi">1</span><span class="o">/</span><span class="mi">1</span><span class="p">),</span> <span class="mi">182</span> <span class="n">bytes</span><span class="p">,</span> <span class="n">done</span><span class="p">.</span>
+<span class="n">Total</span> <span class="mi">1</span> <span class="p">(</span><span class="n">delta</span> <span class="mi">0</span><span class="p">),</span> <span class="n">reused</span> <span class="mi">0</span> <span class="p">(</span><span class="n">delta</span> <span class="mi">0</span><span class="p">)</span>
+<span class="n">To</span> <span class="n">git</span><span class="err">@</span><span class="n">github</span><span class="p">.</span><span class="n">com</span><span class="o">:</span><span class="n">IQSS</span><span class="o">/</span><span class="n">dvn</span><span class="p">.</span><span class="n">git</span>
+ <span class="o">*</span> <span class="p">[</span><span class="n">new</span> <span class="n">tag</span><span class="p">]</span>         <span class="mf">3.4</span> <span class="o">-&gt;</span> <span class="mf">3.4</span>
+<span class="nl">murphy:</span><span class="n">dvn</span> <span class="n">pdurbin</span><span class="err">$</span>
+<span class="nl">murphy:</span><span class="n">dvn</span> <span class="n">pdurbin</span><span class="err">$</span> <span class="n">git</span> <span class="n">push</span> <span class="n">origin</span> <span class="n">master</span>
+<span class="n">Total</span> <span class="mi">0</span> <span class="p">(</span><span class="n">delta</span> <span class="mi">0</span><span class="p">),</span> <span class="n">reused</span> <span class="mi">0</span> <span class="p">(</span><span class="n">delta</span> <span class="mi">0</span><span class="p">)</span>
+<span class="n">To</span> <span class="n">git</span><span class="err">@</span><span class="n">github</span><span class="p">.</span><span class="n">com</span><span class="o">:</span><span class="n">IQSS</span><span class="o">/</span><span class="n">dvn</span><span class="p">.</span><span class="n">git</span>
+   <span class="n">fdbfe57</span><span class="p">.</span><span class="mf">.6</span><span class="n">ceb24f</span>  <span class="n">master</span> <span class="o">-&gt;</span> <span class="n">master</span>
+<span class="nl">murphy:</span><span class="n">dvn</span> <span class="n">pdurbin</span><span class="err">$</span>
+</pre></div>
+</div>
+</div>
+<div class="section" id="make-release-available-for-download">
+<h5>Make release available for download<a class="headerlink" href="#make-release-available-for-download" title="Permalink to this headline">¶</a></h5>
+<p>On dvn-build:</p>
+<div class="highlight-guess"><div class="highlight"><pre><span class="n">cd</span> <span class="n">tools</span><span class="o">/</span><span class="n">installer</span>
+<span class="n">make</span> <span class="n">installer</span>
+</pre></div>
+</div>
+<p>Rename the resulting &#8220;dvninstall.zip&#8221; to include the release number
+(i.e. &#8220;dvninstall_v3_4.zip&#8221;) and upload it, the separate war file, a
+readme, and a buildupdate script (all these files should include the
+release number) to SourceForge (i.e.
+<a class="reference external" href="http://sourceforge.net/projects/dvn/files/dvn/3.4/">http://sourceforge.net/projects/dvn/files/dvn/3.4/</a>).</p>
+</div>
+<div class="section" id="increment-the-version-number">
+<h5>Increment the version number<a class="headerlink" href="#increment-the-version-number" title="Permalink to this headline">¶</a></h5>
+<p>The file to edit is:</p>
+<div class="line-block">
+<div class="line"><a class="reference external" href="https://github.com/IQSS/dvn/blob/develop/src/DVN-web/sr/VersionNumber.properties">https://github.com/IQSS/dvn/blob/develop/src/DVN-web/src/VersionNumber.properties</a></div>
+</div>
+</div>
+</div>
+</div>
+<div class="section" id="branches">
+<h3>Branches<a class="headerlink" href="#branches" title="Permalink to this headline">¶</a></h3>
+<div class="section" id="current-list-of-branches">
+<h4>Current list of branches<a class="headerlink" href="#current-list-of-branches" title="Permalink to this headline">¶</a></h4>
+<p><a class="reference external" href="https://github.com/IQSS/dvn/branches">https://github.com/IQSS/dvn/branches</a></p>
+</div>
+<div class="section" id="new-branching-model-develop-vs-master">
+<h4>New branching model: develop vs. master<a class="headerlink" href="#new-branching-model-develop-vs-master" title="Permalink to this headline">¶</a></h4>
+<p>Please note that with the move to git, we are adopting the branching
+model described at
+<a class="reference external" href="http://nvie.com/posts/a-successful-git-branching-model/">http://nvie.com/posts/a-successful-git-branching-model/</a></p>
+<p>In this branching model there are two persistent branches:</p>
+<ul class="simple">
+<li>develop: where all new commits go</li>
+<li>master: where code gets merged and tagged as a release</li>
+</ul>
+<p>That is to say, <strong>please make your commits on the develop branch, not
+the master branch</strong>.</p>
+</div>
+<div class="section" id="feature-branches">
+<h4>Feature branches<a class="headerlink" href="#feature-branches" title="Permalink to this headline">¶</a></h4>
+<blockquote>
+<div>&#8220;The essence of a feature branch is that it exists as long as the
+feature is in development, but will eventually be merged back into
+develop (to definitely add the new feature to the upcoming release)
+or discarded (in case of a disappointing experiment).&#8221; &#8211;
+<a class="reference external" href="http://nvie.com/posts/a-successful-git-branching-model/">http://nvie.com/posts/a-successful-git-branching-model/</a></div></blockquote>
+</div>
+<div class="section" id="example-feature-branch-2656-lucene">
+<h4>Example feature branch: 2656-lucene<a class="headerlink" href="#example-feature-branch-2656-lucene" title="Permalink to this headline">¶</a></h4>
+<p>First, we create the branch and check it out:</p>
+<div class="highlight-python"><pre>murphy:dvn pdurbin$ git branch
+  2656-solr
+* develop
+murphy:dvn pdurbin$ git branch 2656-lucene
+murphy:dvn pdurbin$
+murphy:dvn pdurbin$ git branch
+  2656-lucene
+  2656-solr
+* develop
+murphy:dvn pdurbin$ git checkout 2656-lucene
+Switched to branch '2656-lucene'
+murphy:dvn pdurbin$
+murphy:dvn pdurbin$ git status
+# On branch 2656-lucene
+nothing to commit (working directory clean)
+murphy:dvn pdurbin$</pre>
+</div>
+<div class="line-block">
+<div class="line">Then, we make a change and a commit, and push it to:</div>
+</div>
+<div class="line-block">
+<div class="line"><a class="reference external" href="https://github.com/iqss/dvn/tree/2656-lucene">https://github.com/iqss/dvn/tree/2656-lucene</a> (creating a new remote branch):</div>
+</div>
+<div class="highlight-python"><pre>murphy:dvn pdurbin$ vim src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/Indexer.java
+murphy:dvn pdurbin$
+murphy:dvn pdurbin$ git commit -m 'start lucene faceting branch' src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/Indexer.java
+[2656-lucene 3b82f88] start lucene faceting branch
+ 1 file changed, 73 insertions(+), 2 deletions(-)
+murphy:dvn pdurbin$
+murphy:dvn pdurbin$ git push origin 2656-lucene
+Counting objects: 25, done.
+Delta compression using up to 8 threads.
+Compressing objects: 100% (10/10), done.
+Writing objects: 100% (13/13), 2.23 KiB, done.
+Total 13 (delta 6), reused 0 (delta 0)
+To git@github.com:IQSS/dvn.git
+ * [new branch]      2656-lucene -&gt; 2656-lucene
+murphy:dvn pdurbin$</pre>
+</div>
+<div class="line-block">
+<div class="line"><br /></div>
+</div>
+<p>As we work on the feature branch, we merge the latest changes from
+&#8220;develop&#8221;. We want to resolve conflicts in the feature branch itself so
+that the feature branch will merge cleanly into &#8220;develop&#8221; when we&#8217;re
+ready. In the example below, we use <tt class="docutils literal"><span class="pre">git</span> <span class="pre">mergetool</span></tt> and <tt class="docutils literal"><span class="pre">opendiff</span></tt>
+to resolve conflicts and save the merge. Then we push the newly-merged
+2656-lucene feature branch to GitHub:</p>
+<div class="line-block">
+<div class="line"><br /></div>
+</div>
+<div class="highlight-python"><pre>murphy:dvn pdurbin$ git branch
+* 2656-lucene
+  2656-solr
+  develop
+murphy:dvn pdurbin$ git checkout develop
+murphy:dvn pdurbin$ git branch
+  2656-lucene
+  2656-solr
+* develop
+murphy:dvn pdurbin$ git pull
+remote: Counting objects: 206, done.
+remote: Compressing objects: 100% (43/43), done.
+remote: Total 120 (delta 70), reused 96 (delta 46)
+Receiving objects: 100% (120/120), 17.65 KiB, done.
+Resolving deltas: 100% (70/70), completed with 40 local objects.
+From github.com:IQSS/dvn
+   8fd223d..9967413  develop    -&gt; origin/develop
+Updating 8fd223d..9967413
+Fast-forward
+ .../admin/EditNetworkPrivilegesServiceBean.java  |    5 +-
+(snip)
+ src/DVN-web/web/study/StudyFilesFragment.xhtml   |    2 +-
+ 12 files changed, 203 insertions(+), 118 deletions(-)
+murphy:dvn pdurbin$ murphy:dvn pdurbin$ git pull
+remote: Counting objects: 206, done.
+remote: Compressing objects: 100% (43/43), done.
+remote: Total 120 (delta 70), reused 96 (delta 46)
+Receiving objects: 100% (120/120), 17.65 KiB, done.
+Resolving deltas: 100% (70/70), completed with 40 local objects.
+From github.com:IQSS/dvn
+   8fd223d..9967413  develop    -&gt; origin/develop
+Updating 8fd223d..9967413
+Fast-forward
+ .../admin/EditNetworkPrivilegesServiceBean.java  |    5 +-
+(snip)
+ .../harvard/iq/dvn/core/web/study/StudyUI.java   |    2 +-
+ src/DVN-web/web/HomePage.xhtml                   |    5 +-
+murphy:dvn pdurbin$
+murphy:dvn pdurbin$ git checkout 2656-lucene
+Switched to branch '2656-lucene'
+murphy:dvn pdurbin$
+murphy:dvn pdurbin$
+murphy:dvn pdurbin$ git merge develop
+Auto-merging src/DVN-web/web/BasicSearchFragment.xhtml
+CONFLICT (content): Merge conflict in src/DVN-web/web/BasicSearchFragment.xhtml
+Auto-merging src/DVN-web/src/edu/harvard/iq/dvn/core/web/BasicSearchFragment.java
+Auto-merging src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/Indexer.java
+Automatic merge failed; fix conflicts and then commit the result.
+murphy:dvn pdurbin$
+murphy:dvn pdurbin$ git status
+# On branch 2656-lucene
+# Changes to be committed:
+#
+#       modified:   src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/admin/EditNetworkPrivilegesServiceBean.java
+(snip)
+#       new file:   src/DVN-web/web/admin/ChooseDataverseForCreateStudy.xhtml
+#       modified:   src/DVN-web/web/study/StudyFilesFragment.xhtml
+#
+# Unmerged paths:
+#   (use "git add/rm &lt;file&gt;..." as appropriate to mark resolution)
+#
+#       both modified:      src/DVN-web/web/BasicSearchFragment.xhtml
+#
+murphy:dvn pdurbin$ git mergetool
+merge tool candidates: opendiff kdiff3 tkdiff xxdiff meld tortoisemerge gvimdiff diffuse ecmerge p4merge araxis bc3 emerge vimdiff
+Merging:
+src/DVN-web/web/BasicSearchFragment.xhtml
+
+Normal merge conflict for 'src/DVN-web/web/BasicSearchFragment.xhtml':
+  {local}: modified file
+  {remote}: modified file
+Hit return to start merge resolution tool (opendiff):
+murphy:dvn pdurbin$
+murphy:dvn pdurbin$ git add .
+murphy:dvn pdurbin$
+murphy:dvn pdurbin$ git commit -m "Merge branch 'develop' into 2656-lucene"
+[2656-lucene 519cd8c] Merge branch 'develop' into 2656-lucene
+murphy:dvn pdurbin$
+murphy:dvn pdurbin$ git push origin 2656-lucene
+(snip)
+murphy:dvn pdurbin$</pre>
+</div>
+<div class="line-block">
+<div class="line">When we are ready to merge the feature branch back into the develop branch, we can do so.</div>
+</div>
+<div class="line-block">
+<div class="line">Here&#8217;s an example of merging the 2656-lucene branch back into develop:</div>
+</div>
+<div class="highlight-python"><pre>murphy:dvn pdurbin$ git checkout 2656-lucene
+Switched to branch '2656-lucene'
+murphy:dvn pdurbin$ git pull
+Already up-to-date.
+murphy:dvn pdurbin$ git checkout develop
+Switched to branch 'develop'
+murphy:dvn pdurbin$ git pull
+Already up-to-date.
+murphy:dvn pdurbin$ git merge 2656-lucene
+Removing lib/dvn-lib-EJB/lucene-core-3.0.0.jar
+Merge made by the 'recursive' strategy.
+ lib/dvn-lib-EJB/lucene-core-3.0.0.jar                                     |  Bin 1021623 -&gt; 0 bytes
+ lib/dvn-lib-EJB/lucene-core-3.5.0.jar                                     |  Bin 0 -&gt; 1466301 bytes
+ lib/dvn-lib-EJB/lucene-facet-3.5.0.jar                                    |  Bin 0 -&gt; 293582 bytes
+ src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/DvnQuery.java          |  160 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+ src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/IndexServiceBean.java  |   56 ++++++++++++++++++++
+ src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/IndexServiceLocal.java |   16 +++++-
+ src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/Indexer.java           |  432 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++--
+ src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/ResultsWithFacets.java |   71 +++++++++++++++++++++++++
+ src/DVN-web/src/SearchFieldBundle.properties                              |    4 +-
+ src/DVN-web/src/edu/harvard/iq/dvn/core/web/AdvSearchPage.java            |   86 +++++++++++++++++++++++++++++++
+ src/DVN-web/src/edu/harvard/iq/dvn/core/web/BasicSearchFragment.java      |  102 +++++++++++++++++++++++++++++++++++-
+ src/DVN-web/src/edu/harvard/iq/dvn/core/web/StudyListing.java             |   11 ++++
+ src/DVN-web/src/edu/harvard/iq/dvn/core/web/StudyListingPage.java         |  428 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++-
+ src/DVN-web/src/edu/harvard/iq/dvn/core/web/study/FacetResultUI.java      |   42 +++++++++++++++
+ src/DVN-web/src/edu/harvard/iq/dvn/core/web/study/FacetUI.java            |   62 ++++++++++++++++++++++
+ src/DVN-web/web/AdvSearchPage.xhtml                                       |    3 +-
+ src/DVN-web/web/BasicSearchFragment.xhtml                                 |    9 ++--
+ src/DVN-web/web/StudyListingPage.xhtml                                    |   43 +++++++++++-----
+ 18 files changed, 1500 insertions(+), 25 deletions(-)
+ delete mode 100644 lib/dvn-lib-EJB/lucene-core-3.0.0.jar
+ create mode 100644 lib/dvn-lib-EJB/lucene-core-3.5.0.jar
+ create mode 100644 lib/dvn-lib-EJB/lucene-facet-3.5.0.jar
+ create mode 100644 src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/DvnQuery.java
+ create mode 100644 src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/ResultsWithFacets.java
+ create mode 100644 src/DVN-web/src/edu/harvard/iq/dvn/core/web/study/FacetResultUI.java
+ create mode 100644 src/DVN-web/src/edu/harvard/iq/dvn/core/web/study/FacetUI.java
+murphy:dvn pdurbin$
+murphy:dvn pdurbin$ git status
+# On branch develop
+# Your branch is ahead of 'origin/develop' by 68 commits.
+#
+nothing to commit (working directory clean)
+murphy:dvn pdurbin$
+murphy:dvn pdurbin$ git push
+Counting objects: 51, done.
+Delta compression using up to 8 threads.
+Compressing objects: 100% (12/12), done.
+Writing objects: 100% (19/19), 1.41 KiB, done.
+Total 19 (delta 7), reused 0 (delta 0)
+To git@github.com:IQSS/dvn.git
+   b7fae01..2b88b68  develop -&gt; develop
+murphy:dvn pdurbin$</pre>
+</div>
+</div>
+<div class="section" id="switching-to-the-master-branch-to-merge-commits-from-the-develop-branch">
+<h4>Switching to the master branch to merge commits from the develop branch<a class="headerlink" href="#switching-to-the-master-branch-to-merge-commits-from-the-develop-branch" title="Permalink to this headline">¶</a></h4>
+<p>We should really only need to switch from the develop branch to the
+master branch as we prepare for a release.</p>
+<p>First, we check out the master branch by clicking Team -&gt; Git -&gt; Branch
+-&gt; Switch to Branch.</p>
+<p>Change Branch to &#8220;origin/master&#8221; and check the box for &#8220;Checkout as New
+Branch&#8221; and fill in &#8220;master&#8221; as the &#8220;Branch Name&#8221; to match the name of
+the branch we&#8217;re switching to. Then click &#8220;Switch&#8221;.</p>
+<p>Now, in the Git Repository Browser (from Team -&gt; Repository Browser) the
+root of the tree should say <tt class="docutils literal"><span class="pre">dvn</span> <span class="pre">[master]</span></tt> and you should see two
+branches under Branches -&gt; Local. <strong>master</strong> should be in bold and
+develop should not.</p>
+</div>
+</div>
+<div class="section" id="tips">
+<h3>Tips<a class="headerlink" href="#tips" title="Permalink to this headline">¶</a></h3>
+<div class="section" id="previewing-changes-before-a-pull">
+<h4>Previewing changes before a pull<a class="headerlink" href="#previewing-changes-before-a-pull" title="Permalink to this headline">¶</a></h4>
+<p>If the build fails overnight you may want to hold off on doing a pull
+until the problem is resolved. To preview what has changed since your
+last pull, you can do a <tt class="docutils literal"><span class="pre">git</span> <span class="pre">fetch</span></tt> (the first part of a pull) then
+<tt class="docutils literal"><span class="pre">git</span> <span class="pre">log</span> <span class="pre">HEAD..origin/develop</span></tt> to see the commit messages.
+<tt class="docutils literal"><span class="pre">git</span> <span class="pre">log</span> <span class="pre">-p</span></tt> or <tt class="docutils literal"><span class="pre">git</span> <span class="pre">diff</span></tt> will allow you to see the contents of the
+changes:</p>
+<div class="highlight-python"><pre>git checkout develop
+git fetch
+git log HEAD..origin/develop
+git log -p HEAD..origin/develop
+git diff HEAD..origin/develop</pre>
+</div>
+<p>After the build is working again, you can simply do a pull as normal.</p>
+</div>
+</div>
+<div class="section" id="errors">
+<h3>Errors<a class="headerlink" href="#errors" title="Permalink to this headline">¶</a></h3>
+<div class="section" id="duplicate-class">
+<h4>Duplicate class<a class="headerlink" href="#duplicate-class" title="Permalink to this headline">¶</a></h4>
+<p>The error &#8220;duplicate class&#8221; can result whenever you resolve a merge
+conflict in git.</p>
+<p>The fix is to close NetBeans and delete (or move aside) the cache like
+this:</p>
+<div class="highlight-python"><pre>cd ~/Library/Caches/NetBeans
+mv 7.2.1 7.2.1.moved</pre>
+</div>
+<p>According to <a class="reference external" href="https://netbeans.org/bugzilla/show_bug.cgi?id=197983">https://netbeans.org/bugzilla/show_bug.cgi?id=197983</a> this might be fixed in NetBeans 7.3.</p>
+</div>
+</div>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+        </div>
+        <div class="sidebar">
+          <h3>Table Of Contents</h3>
+          <ul class="current">
+<li class="toctree-l1"><a class="reference internal" href="dataverse-user-main.html">User Guide</a></li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-installer-main.html">Installers Guide</a></li>
+<li class="toctree-l1 current"><a class="current reference internal" href="">DVN Developers Guide</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="#build-environment-configuring-netbeans">Build Environment (Configuring NetBeans)</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="#install-netbeans-and-glassfish">Install NetBeans and GlassFish</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="#install-netbeans-bundle">Install NetBeans bundle</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#if-you-have-to-install-glassfish-3-1-2-2">[If you have to] Install GlassFish 3.1.2.2</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#install-junit-if-you-haven-t-already">Install JUnit (if you haven&#8217;t already)</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="#check-out-a-new-copy-of-the-dvn-source-tree">Check out a new copy of the DVN source tree</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="#create-a-github-account-if-you-don-t-have-one-already">Create a GitHub account [if you don&#8217;t have one already]</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#set-up-an-ssh-keypair-if-you-haven-t-already">Set up an ssh keypair (if you haven&#8217;t already)</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#clone-the-repo">Clone the repo</a><ul>
+<li class="toctree-l5"><a class="reference internal" href="#remote-repository">Remote Repository</a></li>
+<li class="toctree-l5"><a class="reference internal" href="#remote-branches">Remote Branches</a></li>
+<li class="toctree-l5"><a class="reference internal" href="#destination-directory">Destination Directory</a></li>
+</ul>
+</li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="#open-projects">Open Projects</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#build-for-the-first-time">Build for the first time</a></li>
+</ul>
+</li>
+<li class="toctree-l2"><a class="reference internal" href="#application-environment-configuring-glassfish-and-postgresql">Application Environment (Configuring GlassFish and PostgreSQL)</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="#install-postgresql-database-server">Install PostgreSQL database server</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#run-the-install-dev-script">Run the install-dev script</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#testing-login">Testing login</a></li>
+</ul>
+</li>
+<li class="toctree-l2"><a class="reference internal" href="#developing-with-git">Developing with Git</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="#commit">Commit</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#push">Push</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#release">Release</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="#merge-develop-into-master">Merge develop into master</a><ul>
+<li class="toctree-l5"><a class="reference internal" href="#tag-the-release">Tag the release</a></li>
+<li class="toctree-l5"><a class="reference internal" href="#make-release-available-for-download">Make release available for download</a></li>
+<li class="toctree-l5"><a class="reference internal" href="#increment-the-version-number">Increment the version number</a></li>
+</ul>
+</li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="#branches">Branches</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="#current-list-of-branches">Current list of branches</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#new-branching-model-develop-vs-master">New branching model: develop vs. master</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#feature-branches">Feature branches</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#example-feature-branch-2656-lucene">Example feature branch: 2656-lucene</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#switching-to-the-master-branch-to-merge-commits-from-the-develop-branch">Switching to the master branch to merge commits from the develop branch</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="#tips">Tips</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="#previewing-changes-before-a-pull">Previewing changes before a pull</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="#errors">Errors</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="#duplicate-class">Duplicate class</a></li>
+</ul>
+</li>
+</ul>
+</li>
+</ul>
+</li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-api-main.html">APIs Guide</a></li>
+</ul>
+
+          <h3 style="margin-top: 1.5em;">Search</h3>
+          <form class="search" action="search.html" method="get">
+            <input type="text" name="q" />
+            <input type="submit" value="Go" />
+            <input type="hidden" name="check_keywords" value="yes" />
+            <input type="hidden" name="area" value="default" />
+          </form>
+          <p class="searchtip" style="font-size: 90%">
+            Enter search terms.
+          </p>
+        </div>
+        <div class="clearer"></div>
+      </div>
+    </div>
+
+    <div class="footer-wrapper">
+      <div class="footer">
+        <div class="left">
+          <a href="dataverse-installer-main.html" title="Installers Guide"
+             >previous</a> |
+          <a href="dataverse-api-main.html" title="APIs Guide"
+             >next</a> |
+          <a href="genindex.html" title="General Index"
+             >index</a>
+            <br/>
+            <a href="_sources/dataverse-developer-main.txt"
+               rel="nofollow">Show Source</a>
+        </div>
+
+        <div class="right">
+          
+    <div class="footer">
+        &copy; Copyright 1997-2013, President &amp; Fellows Harvard University.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2b1.
+    </div>
+        </div>
+        <div class="clearer"></div>
+      </div>
+    </div>
+
+  </body>
+</html>
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/dataverse-installer-main.html	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,1209 @@
+
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+  "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+    
+    <title>Installers Guide &mdash; The Harvard Dataverse Network 3.6.1 documentation</title>
+    
+    <link rel="stylesheet" href="_static/agogo.css" type="text/css" />
+    <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+    
+    <script type="text/javascript">
+      var DOCUMENTATION_OPTIONS = {
+        URL_ROOT:    './',
+        VERSION:     '3.6.1',
+        COLLAPSE_INDEX: false,
+        FILE_SUFFIX: '.html',
+        HAS_SOURCE:  true
+      };
+    </script>
+    <script type="text/javascript" src="_static/jquery.js"></script>
+    <script type="text/javascript" src="_static/underscore.js"></script>
+    <script type="text/javascript" src="_static/doctools.js"></script>
+    <script type="text/javascript" src="http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
+    <link rel="top" title="The Harvard Dataverse Network 3.6.1 documentation" href="index.html" />
+    <link rel="next" title="DVN Developers Guide" href="dataverse-developer-main.html" />
+    <link rel="prev" title="User Guide" href="dataverse-user-main.html" /> 
+  </head>
+  <body>
+    <div class="header-wrapper">
+      <div class="header">
+        <div class="headertitle"><a
+          href="index.html">The Harvard Dataverse Network 3.6.1 documentation</a></div>
+        <div class="rel">
+          <a href="dataverse-user-main.html" title="User Guide"
+             accesskey="P">previous</a> |
+          <a href="dataverse-developer-main.html" title="DVN Developers Guide"
+             accesskey="N">next</a> |
+          <a href="genindex.html" title="General Index"
+             accesskey="I">index</a>
+        </div>
+       </div>
+    </div>
+
+    <div class="content-wrapper">
+      <div class="content">
+        <div class="document">
+            
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body">
+            
+  <div class="section" id="installers-guide">
+<h1>Installers Guide<a class="headerlink" href="#installers-guide" title="Permalink to this headline">¶</a></h1>
+<p id="introduction"><strong>Introduction</strong></p>
+<p>This is our &#8220;new and improved&#8221; installation guide, it was first
+released with the Dataverse Network application versions 2.2.4, when we
+introduced the new, automated and much simplified installation process.
+As of February 2012, it has been updated to reflect the changes made in
+the newly released version 3.0 of the software. (Our existing users will
+notice however, that the changes in the installation process have been
+fairly minimal).</p>
+<p>The guide is intended for anyone who needs to install the DVN app,
+developers and Dataverse Network administrators alike.</p>
+<p>The top-down organization of the chapters and sections is that of
+increasing complexity. First a very basic, simple installation scenario
+is presented. The instructions are straightforward and only the required
+components are discussed. This use case will in fact be sufficient for
+most DVN developers and many Dataverse Network administrators. Chances
+are you are one of such users, so if brave by nature, you may stop
+reading this section and go straight to the <a class="reference internal" href="#quick-install"><em>“Quick Install”</em></a>   chapter.</p>
+<p>The “basic” installation process described in the first chapter is
+fully automated, everything is performed by a single interactive script.
+This process has its limitations. It will likely work only on the
+supported platforms. Optional components need to be configured  outside
+of the Installer (these are described in the &#8220;Optional Components&#8221;
+section).</p>
+<p>For an advanced user, we provide the detailed explanations of all the
+steps performed by the Installer. This way he or she can experiment with
+individual configuration options, having maximum flexibility and control
+over the process. Yet we tried to organize the advanced information in
+such a way that those who only need the most basic instructions would
+not have to read through it unnecessarily.  Instead we provide them with
+an easy way to get a bare-bones configuration of the DVN up and running.</p>
+<p>If you are interested in practicing a DVN installation in a Vagrant
+environment you can later throw away, please follow the instructions at
+<a class="reference external" href="https://github.com/dvn/dvn-install-demo">https://github.com/dvn/dvn-install-demo</a> to spin up a Linux virtual
+machine on your laptop with <tt class="docutils literal"><span class="pre">vagrant</span> <span class="pre">up</span></tt>. When you are finished with
+this temporary DVN installation, you can delete the virtual machine with
+<tt class="docutils literal"><span class="pre">vagrant</span> <span class="pre">destroy</span></tt>.</p>
+<p>If you encounter any problems during installation, please contact the
+development team
+at <a class="reference external" href="mailto:support&#37;&#52;&#48;thedata&#46;org">support<span>&#64;</span>thedata<span>&#46;</span>org</a>
+or our <a class="reference external" href="https://groups.google.com/forum/?fromgroups#!forum/dataverse-community">Dataverse Users
+Community</a>.</p>
+<div class="section" id="quick-install">
+<span id="id1"></span><h2>Quick Install<a class="headerlink" href="#quick-install" title="Permalink to this headline">¶</a></h2>
+<p>For an experienced and/or rather bold user, this is a 1
+paragraph version of the installation instructions:</p>
+<p>This should work on RedHat and its derivatives, and MacOS X. If this
+does not describe your case, you will very likely have to install and
+configure at least some of the components manually. Meaning, you may
+consider reading through the chapters that follow! Still here? Great.
+Prerequisites: Sun/Oracle Java JDK 1.6_31+ and a “virgin” installation
+of Glassfish v3.1.2; PostgreSQL v8.3+, configured to listen to network
+connections and support password authentication on the localhost
+interface; you may need R as well. See the corresponding sections under
+“2. Prerequisites”, if necessary. Download the installer package from
+SourceForge:</p>
+<p><a class="reference external" href="http://sourceforge.net/projects/dvn/files/dvn">http://sourceforge.net/projects/dvn/files/dvn</a></p>
+<p>Choose the latest version and download the dvninstall zip file.</p>
+<p>Unzip the package in a temp location of your choice (this will create
+the directory <tt class="docutils literal"><span class="pre">dvninstall</span></tt>). Run the installer, as root:</p>
+<blockquote>
+<div><dl class="docutils">
+<dt><tt class="docutils literal"><span class="pre">cd</span> <span class="pre">dvninstall</span></tt></dt>
+<dd>./ <tt class="docutils literal"><span class="pre">install</span></tt></dd>
+</dl>
+</div></blockquote>
+<p>Follow the installation prompts. If it all works as it should, you
+will have a working DVN instance running in about a minute from now.</p>
+<p>Has it worked? Awesome! Now you may read the rest of the guide
+chapters at your own leisurely pace, to see if you need any of the
+optional components described there. And/or if you want to understand
+what exactly has just been done to your system.</p>
+</div>
+<div class="section" id="system-requirements">
+<h2>SYSTEM REQUIREMENTS<a class="headerlink" href="#system-requirements" title="Permalink to this headline">¶</a></h2>
+<p>Or rather, recommendations. The closer your configuration is to what’s
+outlined below, the easier it will be for the DVN team to provide
+support and answer your questions.</p>
+<ul class="simple">
+<li>Operating system - The production version of the Dataverse Network at
+IQSS (dvn.iq.harvard.edu) runs on RedHat Linux 5. Most of the DVN
+development is currently done on MacOS X. Because of our experience
+with RedHat and MacOS X these  are the recommended platforms. You
+should be able to deploy the application .ear file on any other
+platform that supports Java. However, the automated installer we
+provide will likely work on RedHat and MacOS only. Some information
+provided in this guide is specific to these 2 operating systems. (Any
+OS-specific instructions/examples will be clearly marked, for
+example:<tt class="docutils literal"><span class="pre">[MacOS-specific:]</span></tt>)</li>
+<li>CPU - The production IQSS Dataverse Network runs on generic,
+multi-core 64-bit processors.</li>
+<li>Memory - The application servers currently in production at the IQSS
+have 64 GB of memory each.  Development and testing systems require a
+minimum of 2 gigabyte of memory.</li>
+<li>Disk space - How much disk space is required depends on the amount of
+data that you expect to serve. The IQSS Dataverse Network file system
+is a standalone NetApp with 2 TB volume dedicated to the DVN data.</li>
+<li>Multiple servers – All the DVN components can run on the same server.
+On a busy, hard-working production network the load can be split
+across multiple servers. The 3 main components, the application
+server (Glassfish), the database (Postgres) and R can each run on its
+own host. Furthermore, multiple application servers sharing the same
+database and R server(s)  can be set up behind a load balancer.
+Developers would normally run Glassfish and Postgres on their
+workstations locally and use a shared R server.</li>
+<li>If it actually becomes a practical necessity to bring up more servers
+to handle your production load, there are no universal instructions
+on how to best spread it across extra CPUs. It will depend on the
+specifics of your site, the nature of the data you serve and the
+needs of your users, whether you’ll benefit most from dedicating
+another server to run the database, or to serve R requests. Please
+see the discussion in the corresponding sections of the Prerequisites
+chapter.</li>
+</ul>
+</div>
+<div class="section" id="prerequisites">
+<span id="id2"></span><h2>PREREQUISITES<a class="headerlink" href="#prerequisites" title="Permalink to this headline">¶</a></h2>
+<p>In this chapter, an emphasis is made on clearly identifying those
+components that are absolutely required for  every installation and
+marking any advanced, optional instructions as such.</p>
+<div class="section" id="glassfish">
+<h3>Glassfish<a class="headerlink" href="#glassfish" title="Permalink to this headline">¶</a></h3>
+<p>Version 3.1.2 is required.</p>
+<p>Make sure you have <strong>Sun/Oracle</strong><strong>Java JDK version 1.6, build 31</strong>
+or newer. It is available from
+<a class="reference external" href="http://www.oracle.com/technetwork/java/javase/downloads/index.html">http://www.oracle.com/technetwork/java/javase/downloads/index.html</a>.</p>
+<p><strong>[note for developers:]</strong></p>
+<p>If you are doing this installation as part of your DVN software
+development setup: The version of NetBeans currently in use by the DVN
+team is 7.0.1, and it is recommended that you use this same version if
+you want to participate in the development. As of writing of this
+manual, NetBeans 7.0.1 installer bundle comes with an older version of
+Glassfish. So you will have to install Glassfish version 3.1.2
+separately, and then select it as the default server for your NetBeans
+project.</p>
+<p><strong>[/note for developers]</strong></p>
+<p>We <strong>strongly</strong> recommend that you install GlassFish Server 3.1.2,
+Open Source Edition, <strong>Full Platform</strong>. You are very likely to run into
+installation issues if you attempt to run the installer and get the
+application to work with a different version! Simply transitioning from
+3.1.1 to 3.1.2 turned out to be a surprisingly complex undertaking,
+hence this recommendation to all other installers and developers to stay
+with the same version.</p>
+<p>It can be obtained from</p>
+<p><a class="reference external" href="http://glassfish.java.net/downloads/3.1.2-final.html">http://glassfish.java.net/downloads/3.1.2-final.html</a></p>
+<p>The page contains a link to the installation instructions. However,
+the process is completely straightforward. You are given 2 options for
+the format of the installer package. We recommend that you choose to
+download it as a shell archive; you will need to change its executable
+permission, with <strong>chmod +x</strong>, and then run it, as root:</p>
+<p>./<strong>installer-filename.sh</strong></p>
+<p>[<strong>Important:]</strong></p>
+<p>Leave the admin password fields blank. This is not a security risk,
+since out of the box, Glassfish will only be accepting admin connections
+on the localhost interface. Choosing password at this stage however will
+complicate the installation process unnecessarily<strong>.</strong>If this is a
+developers installation, you can probably keep this configuration
+unchanged (admin on localhost only). If you need to be able to connect
+to the admin console remotely, please see the note in the Appendix
+section of the manual.</p>
+<p><strong>[/Important]</strong></p>
+<div class="line-block">
+<div class="line"><strong>[Advanced:]</strong></div>
+<div class="line"><strong>[Unix-specific:`]</strong></div>
+</div>
+<p>The installer shell script will normally attempt to run in a graphic
+mode. If you are installing this on a remote Unix server, this will
+require X Windows support on your local workstation. If for whatever
+reason it&#8217;s not available, you have an option of running it in a <em>silent
+mode</em> - check the download page, above, for more information.</p>
+<div class="line-block">
+<div class="line"><strong>[/Unix-specific]</strong></div>
+<div class="line"><strong>[/Advanced]</strong></div>
+</div>
+</div>
+<div class="section" id="postgresql">
+<span id="id3"></span><h3>PostgreSQL<a class="headerlink" href="#postgresql" title="Permalink to this headline">¶</a></h3>
+<div class="line-block">
+<div class="line"><strong>Version 8.3 or higher is required.</strong></div>
+<div class="line">Installation instructions specific to RedHat Linux and MacOS X are</div>
+<div class="line">provided below.</div>
+<div class="line">Once the database server is installed, you&#8217;ll need to configure access</div>
+<div class="line">control to suit your installation.</div>
+<div class="line">Note that any modifications to the configuration files above require you to restart Postgres:</div>
+<div class="line"><tt class="docutils literal"><span class="pre">service</span> <span class="pre">postgresql</span> <span class="pre">restart</span></tt> (RedHat)</div>
+</div>
+<div class="line-block">
+<div class="line">or</div>
+<div class="line">&#8220;Restart Server&#8221; under Applications -&gt; PostgreSQL (MacOS X)</div>
+</div>
+<p>By default, most Postgres distributions are configured to listen to network connections on the localhost interface only; and to only support ident for authentication. (The MacOS installer may ask you if network connections should be allowed - answer &#8220;yes&#8221;). At a minimum, if GlassFish is running on the same host, it will also need to allow password authentication on localhost. So you will need to modify the &#8220;<tt class="docutils literal"><span class="pre">host</span> <span class="pre">all</span> <span class="pre">all</span> <span class="pre">127.0.0.1</span></tt>&#8221; line in your <tt class="docutils literal"><span class="pre">/var/lib/pgsq1/data/pg_hba.conf</span></tt> so that it looks like this:</p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">host</span> <span class="pre">all</span> <span class="pre">all</span> <span class="pre">127.0.0.1/32</span> <span class="pre">password</span></tt></div>
+</div>
+<p>Also, the installer script needs to have direct access to the local PostgresQL server via Unix domain sockets. So this needs to be set to either &#8220;trust&#8221; or &#8220;ident&#8221;. I.e., your <strong>pg_hba.conf</strong> must contain either of the 2 lines below:</p>
+<div class="line-block">
+<div class="line"><strong>local   all  all   ident    sameuser</strong></div>
+<div class="line">or</div>
+<div class="line"><strong>local   all  all  trust</strong></div>
+</div>
+<p>(&#8220;ident&#8221; is the default setting; but if it has been changed to
+&#8220;password&#8221; or &#8220;md5&#8221;, etc. on your system, Postgres will keep prompting
+you for the master password throughout the installation)</p>
+<p><strong>[optional:]</strong></p>
+<p>If GlassFish will be accessing the database remotely, add or modify the following line in your <tt class="docutils literal"><span class="pre">&lt;POSTGRES</span> <span class="pre">DIR&gt;/data/postgresql.conf</span></tt>:</p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">listen_addresses='*'</span></tt></div>
+</div>
+<p>to enable network connections on all interfaces; and add the following
+line to <tt class="docutils literal"><span class="pre">pg_hba.conf</span></tt>:</p>
+<div class="line-block">
+<div class="line">host       all      all      <tt class="docutils literal"><span class="pre">[ADDRESS]</span>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; <span class="pre">255.255.255.255</span> <span class="pre">password</span></tt></div>
+</div>
+<div class="line-block">
+<div class="line">where <tt class="docutils literal"><span class="pre">[ADDRESS]</span></tt> is the numeric IP address of the GlassFish server.</div>
+<div class="line">Using the subnet notation above you can enable authorization for multiple hosts on | your network. For example,</div>
+</div>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">host</span> <span class="pre">all</span> <span class="pre">all</span> <span class="pre">140.247.115.0</span> <span class="pre">255.255.255.0</span> <span class="pre">password</span></tt></div>
+</div>
+<div class="line-block">
+<div class="line">will permit password-authenticated connections from all hosts on the <tt class="docutils literal"><span class="pre">140.247.115.*</span></tt> subnet.</div>
+<div class="line"><strong>[/optional:]</strong></div>
+</div>
+<div class="line-block">
+<div class="line"><br /></div>
+<div class="line"><strong>[RedHat-specific:]</strong></div>
+<div class="line"><strong>[Advanced:]</strong></div>
+</div>
+<p>Please note that the instructions below are meant for users who have some experience with basic RedHat admin tasks. You should be safe to proceed if an instruction such as “uninstall the postgres rpms” makes sense to you immediately. I.e., if you already know how to install or uninstall an rpm package. Otherwise we recommend that you contact your systems administrator.</p>
+<p>For RedHat (and relatives), version 8.4 is now part of the distribution. As of RedHat 5, the default <tt class="docutils literal"><span class="pre">postgresql</span></tt> rpm is still version 8.1. So you may have to un-install the <tt class="docutils literal"><span class="pre">postgresql</span></tt> rpms, then get the ones for version 8.4:</p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">yum</span> <span class="pre">install</span> <span class="pre">postgresql84</span> <span class="pre">postgresql84-server</span></tt></div>
+</div>
+<p>Before you start the server for the first time with</p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">service</span> <span class="pre">postgresql</span> <span class="pre">start</span></tt></div>
+</div>
+<p>You will need to populate the initial database with</p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">service</span> <span class="pre">postgresql</span> <span class="pre">initdb</span></tt></div>
+</div>
+<div class="line-block">
+<div class="line"><strong>[/advanced]</strong></div>
+<div class="line"><strong>[/RedHat-specific]</strong></div>
+</div>
+<p><strong>[MacOS-specific:]</strong></p>
+<p>Postgres Project provides a one click installer for Mac OS X 10.4 and
+above at
+<a class="reference external" href="http://www.postgresql.org/download/macosx">http://www.postgresql.org/download/macosx</a>.
+Fink and MacPorts packages are also available.</p>
+<p><strong>[/MacOS-specific]`</strong></p>
+<div class="line-block">
+<div class="line"><strong>[advanced:]</strong></div>
+<div class="line"><strong>[optional:]</strong></div>
+</div>
+<p>See the section <a class="reference internal" href="#postgresql-setup"><em>PostgresQL setup</em></a> in the Appendix for the description of the steps that the automated installer takes to set up PostgresQL for use with the DVN.</p>
+<div class="line-block">
+<div class="line"><strong>[/optional]</strong></div>
+<div class="line"><strong>[/advanced]</strong></div>
+</div>
+</div>
+<div class="section" id="r-and-rserve">
+<span id="id4"></span><h3>R and RServe<a class="headerlink" href="#r-and-rserve" title="Permalink to this headline">¶</a></h3>
+<p>Strictly speaking, R is an optional component. You can bring up a
+running DVN instance without it. The automated installer will allow such
+an installation, with a warning. Users of this Dataverse Network will be
+able to upload and share some data. Only the advanced modes of serving
+quantitative data to the users require R <tt class="docutils literal"><span class="pre">[style?]</span></tt>. Please consult
+the <a class="reference internal" href="#do-you-need-r"><em>&#8220;Do you need R?&#8221;</em></a> section in the Appendix for an extended discussion of this.</p>
+<div class="line-block">
+<div class="line"><strong>Installation instructions:</strong></div>
+</div>
+<p>Install the latest version of R from your favorite CRAN mirror (refer to <a class="reference external" href="http://cran.r-project.org/">http://cran.r-project.org/</a> for more information). Depending on your OS distribution, this may be as simple as typing</p>
+<div class="line-block">
+<div class="line"><strong>[RedHat/Linux-specific:]</strong></div>
+</div>
+<p><tt class="docutils literal"><span class="pre">yum</span> <span class="pre">install</span> <span class="pre">R</span> <span class="pre">R-devel</span></tt></p>
+<p>(for example, the above line will work in CentOS out of the box; in RedHat, you will have to add support for EPEL repository &#8211; see
+<a class="reference external" href="http://fedoraproject.org/wiki/EPEL">http://fedoraproject.org/wiki/EPEL</a>
+&#8211; then run the <tt class="docutils literal"><span class="pre">yum</span> <span class="pre">install</span></tt> command)</p>
+<div class="line-block">
+<div class="line"><strong>[/RedHat/Linux-specific]</strong></div>
+</div>
+<p>Please make sure to install the &#8220;devel&#8221; package too! you will need it
+to build the extra R modules.</p>
+<p>Once you have R installed, download the package <tt class="docutils literal"><span class="pre">dvnextra.tar</span></tt> from this location:</p>
+<p><a class="reference external" href="http://dvn.iq.harvard.edu/dist/R/dvnextra.tar">http://dvn.iq.harvard.edu/dist/R/dvnextra.tar</a></p>
+<p>Unpack the archive:</p>
+<p><tt class="docutils literal"><span class="pre">tar</span> <span class="pre">xvf</span> <span class="pre">dvnextra.tar</span></tt></p>
+<p>then run the supplied installation shell script as root:</p>
+<div class="line-block">
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">cd</span> <span class="pre">dvnextra</span></tt></div>
+</div>
+<div class="line"><tt class="docutils literal"><span class="pre">./installModules.sh</span></tt></div>
+</div>
+<p>This will install a number of R modules needed by the DVN to run statistics and analysis, some from CRAN and some supplied in the bundle; it will also configure Rserve to run locally on your system and install some startup files that the DVN will need.</p>
+<p><strong>Please note that the DVN application requires specific versions of the 3rd-party R packages. For example, if you obtain and install the version of Zelig package currently available from CRAN, it will not work with the application. This is why we distribute the sources of the correct versions in this tar package.</strong></p>
+<div class="line-block">
+<div class="line"><strong>[advanced:]</strong></div>
+<div class="line">We haven’t had much experience with R on any platforms other than RedHat-and-the-like. Our developers use MacOS X, but point their DVN instances to a shared server running Rserve under RedHat.</div>
+</div>
+<p>The R project ports their distribution to a wide range of platforms. However, the installer shell script above will only run on Unix; and is not really guaranteed to work on anything other than RedHat. If you have some experience with either R or system administration, you should be able to use the script as a guide to re-create the configuration steps on any other platform quite easily. You will, however, be entirely on your own while embarking on that adventure.
+<strong>[/advanced]</strong></p>
+</div>
+<div class="section" id="system-configuration">
+<h3>System Configuration<a class="headerlink" href="#system-configuration" title="Permalink to this headline">¶</a></h3>
+<p><strong>[Advanced/optional:]</strong></p>
+<p>Many modern OS distributions come pre-configured so that all the
+network ports are firewalled off by default.</p>
+<p>Depending on the configuration of your server, you may need to open some
+of the following ports.</p>
+<p>On a developers personal workstation, the user would normally access his
+or her DVN instance on the localhost interface. So no open ports are
+required unless you want to give access to your DVN to another
+user/developer.</p>
+<p>When running a DVN that is meant to be accessible by network users: At a
+minimum, if all the components are running on the same server, the HTTP
+port 80 needs to be open. You may also want to open TCP 443, to be able
+to access Glassfish admin console remotely.</p>
+<p>If the DVN is running its own HANDLE.NET server (see Chapter 4.
+&#8220;Optional Components&#8221;), the TCP port 8000 and TCP/UDP ports 2641 are
+also needed.</p>
+<p>If the DVN application needs to talk to PostgreSQL and/or Rserve running
+on remote hosts, the TCP ports 5432 and 6311, respectively, need to be
+open there.</p>
+<p><strong>[/Advanced/optional]</strong></p>
+</div>
+</div>
+<div class="section" id="running-the-installer">
+<h2>RUNNING THE INSTALLER<a class="headerlink" href="#running-the-installer" title="Permalink to this headline">¶</a></h2>
+<p>Once the <a class="reference internal" href="#prerequisites"><em>Prerequisites</em></a> have been take care of, the DVN application can be installed.</p>
+<p>The installer package can be downloaded from our repository on SourceForge at</p>
+<p><a class="reference external" href="http://sourceforge.net/projects/dvn/files/dvn/3.0/dvninstall_v3_0.zip">http://sourceforge.net/projects/dvn/files/dvn/3.0/dvninstall_v3_0.zip</a></p>
+<div class="line-block">
+<div class="line">Unzip the package in a temp location of your choice (this will create the directory | <tt class="docutils literal"><span class="pre">dvninstall</span></tt>). Run the installer, as root:</div>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">cd</span> <span class="pre">dvninstall</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">./install</span></tt></div>
+</div>
+</div>
+<p>Follow the installation prompts. The installer will first verify the contents of the package and check if the required components
+(in <a class="reference internal" href="#prerequisites"><em>Prerequisites</em></a>) are present on the system. Then it will lead you through the application setup.</p>
+<div class="line-block">
+<div class="line"><strong>[Advanced:]</strong></div>
+</div>
+<p>The limitations of the installer package:</p>
+<p>Some extra configuration steps will be required if the PostgreSQL database is being set up on a remote server.</p>
+<p>It will most likely only work on the supported platforms, RedHat and Mac OS X.</p>
+<p>It is only guaranteed to work on a fresh Glassfish installation. If you already have more than one Glassfish domains created and/or have applications other than the DVN running under Glassfish, please consult the <a class="reference internal" href="#what-does-the-intstaller-do"><em>&#8220;What does the Installer do?&#8221;</em></a> section.</p>
+<p>It does not install any of the optional components (<a class="reference internal" href="#optional-components"><em>see Chapter 4</em></a>.)</p>
+<p>For the detailed explanation of the tasks performed by the Installer, see the <a class="reference internal" href="#what-does-the-intstaller-do"><em>&#8220;What does the Installer do?&#8221;</em></a> section.</p>
+<div class="line-block">
+<div class="line"><strong>[/Advanced]</strong></div>
+</div>
+</div>
+<div class="section" id="optional-components">
+<span id="id5"></span><h2>Optional Components<a class="headerlink" href="#optional-components" title="Permalink to this headline">¶</a></h2>
+<p><tt class="docutils literal"><span class="pre">[The</span> <span class="pre">sections</span> <span class="pre">on</span> <span class="pre">ImageMagick,</span> <span class="pre">Google</span> <span class="pre">Analytics</span> <span class="pre">and</span> <span class="pre">Captcha</span> <span class="pre">have</span> <span class="pre">been</span> <span class="pre">rewritten</span> <span class="pre">and,</span> <span class="pre">hopefully,</span> <span class="pre">made</span> <span class="pre">less</span> <span class="pre">confusing.</span> <span class="pre">The</span> <span class="pre">Handles</span> <span class="pre">instructions</span> <span class="pre">have</span> <span class="pre">also</span> <span class="pre">been</span> <span class="pre">modified,</span> <span class="pre">but</span> <span class="pre">I</span> <span class="pre">would</span> <span class="pre">like</span> <span class="pre">to</span> <span class="pre">work</span> <span class="pre">on</span> <span class="pre">it</span> <span class="pre">some</span> <span class="pre">more.</span> <span class="pre">Namely</span> <span class="pre">I'd</span> <span class="pre">like</span> <span class="pre">to</span> <span class="pre">read</span> <span class="pre">their</span> <span class="pre">own</span> <span class="pre">technical</span> <span class="pre">manual,</span> <span class="pre">and</span> <span class="pre">see</span> <span class="pre">if</span> <span class="pre">we</span> <span class="pre">should</span> <span class="pre">provide</span> <span class="pre">our</span> <span class="pre">own</span> <span class="pre">version</span> <span class="pre">of</span> <span class="pre">installation</span> <span class="pre">instructions,</span> <span class="pre">similarly</span> <span class="pre">to</span> <span class="pre">what</span> <span class="pre">we</span> <span class="pre">do</span> <span class="pre">with</span> <span class="pre">some</span> <span class="pre">other</span> <span class="pre">packages;</span> <span class="pre">we've</span> <span class="pre">heard</span> <span class="pre">complaints</span> <span class="pre">from</span> <span class="pre">users</span> <span class="pre">about</span> <span class="pre">their</span> <span class="pre">manual</span> <span class="pre">not</span> <span class="pre">being</span> <span class="pre">very</span> <span class="pre">easy</span> <span class="pre">to</span> <span class="pre">follow]</span></tt></p>
+<div class="section" id="recaptcha-bot-blocker">
+<h3>reCAPTCHA bot blocker<a class="headerlink" href="#recaptcha-bot-blocker" title="Permalink to this headline">¶</a></h3>
+<p>We found that our “email us” feature can be abused to send spam
+messages. You can choose to use the reCAPTCHA filter to help prevent
+this. Configure the filter as follows:</p>
+<ol class="arabic">
+<li><div class="first line-block">
+<div class="line">Go to reCAPTCHA web site at</div>
+<div class="line"><a class="reference external" href="http://recaptcha.net/">http://recaptcha.net/</a></div>
+<div class="line">and sign up for an account.</div>
+<div class="line">Register your website domain to acquire a public/private CAPTCHA key pair.</div>
+<div class="line">Record this information in a secure location.</div>
+</div>
+</li>
+<li><p class="first">Insert the the public/private key pair and domain for your reCAPTCHA
+account into the <tt class="docutils literal"><span class="pre">captcha</span></tt> table of the DVN PostgreSQL database.
+Use <tt class="docutils literal"><span class="pre">psql</span></tt>, <tt class="docutils literal"><span class="pre">pgadmin</span></tt> or any other database utility; the SQL
+query will look like this:
+<tt class="docutils literal"><span class="pre">INSERT</span> <span class="pre">INTO</span> <span class="pre">captcha</span> <span class="pre">(publickey,</span> <span class="pre">domainname,</span> <span class="pre">privatekey)</span> <span class="pre">VALUES</span> <span class="pre">('sample',</span> <span class="pre">'sample.edu',</span> <span class="pre">'sample')</span></tt></p>
+</li>
+<li><p class="first">Verify that the Report Issue page is now showing the reCAPTCHA
+challenge.</p>
+</li>
+</ol>
+</div>
+<div class="section" id="google-analytics">
+<h3>Google Analytics<a class="headerlink" href="#google-analytics" title="Permalink to this headline">¶</a></h3>
+<p>Network Admins can use the Google Analytics tools to view Dataverse Network website usage statistics.</p>
+<p>Note: It takes about 24 hours for Google Analytics to start monitoring
+your website after the registration.</p>
+<div class="line-block">
+<div class="line"><br /></div>
+<div class="line">To enable the use of Google Analytics:</div>
+</div>
+<ol class="arabic">
+<li><p class="first">Go to the Google Analytics homepage at
+<a class="reference external" href="http://www.google.com/analytics/indexu.html">http://www.google.com/analytics/indexu.html</a>.</p>
+</li>
+<li><p class="first">Set up a Google Analytics account and obtain a tracking code for your Dataverse Network installation.</p>
+</li>
+<li><p class="first">Use the Google Analytics Help Center to find how to add the tracking code to the content you serve.</p>
+</li>
+<li><dl class="first docutils">
+<dt>Configure the DVN to use the tracking key (obtained in Step 2,</dt>
+<dd><p class="first">above), by setting | the <tt class="docutils literal"><span class="pre">dvn.googleanalytics.key</span></tt> JVM option in
+Glassfish.</p>
+<p>This can be done by adding the following directly to the
+<tt class="docutils literal"><span class="pre">domain.xml</span></tt> config file (for example: <tt class="docutils literal"><span class="pre">/usr/local/glassfish/domains/domain1/confi/domain.xml</span></tt>):
+<tt class="docutils literal"><span class="pre">&lt;jvm-options&gt;-Ddvn.googleanalytics.key=XX-YYY&lt;/jvm-options&gt;</span></tt> (this will require Glassfish restart)</p>
+<p class="last">Or by using the Glassfish Admin Console configuration GUI. Consult the “Glassfish Configuration” section in the Appendix.</p>
+</dd>
+</dl>
+</li>
+</ol>
+<p>Once installed and activated, the usage statistics can be accessed from
+the Network Options of the DVN.</p>
+</div>
+<div class="section" id="imagemagick">
+<h3>ImageMagick<a class="headerlink" href="#imagemagick" title="Permalink to this headline">¶</a></h3>
+<p>When image files are ingested into a DVN, the application
+automatically creates small &#8220;thumbnail&#8221; versions to display on the
+Files View page. These thumbnails are generated once, then cached for
+future use.</p>
+<p>Normally, the standard Java image manipulation libraries are used to
+do the scaling. If you have studies with large numbers of large
+images, generating the thumbnails may become a time-consuming task. If
+you notice that the Files view takes a long time to load for the first
+time because of the images, it is possible | to improve the
+performance by installing the <tt class="docutils literal"><span class="pre">ImageMagick</span></tt> package. If it is
+installed, the application will automatically use its
+<tt class="docutils literal"><span class="pre">/usr/bin/convert</span></tt> utility to do the resizing, which appears to be
+significantly faster than the Java code.</p>
+<p><tt class="docutils literal"><span class="pre">ImageMagick</span></tt> is available for, or even comes with most of the popular OS distributions.</p>
+<div class="line-block">
+<div class="line"><strong>&lt;RedHat-Specific:&gt;</strong></div>
+</div>
+<p>It is part of the full RedHat Linux distribution, although it is not
+included in the default &#8220;server&#8221; configuration. It can be installed on a
+RedHat server with the <tt class="docutils literal"><span class="pre">yum</span> <span class="pre">install</span> <span class="pre">ImageMagick</span></tt> command.</p>
+<p><strong>&lt;/RedHat-Specific&gt;</strong></p>
+</div>
+<div class="section" id="handle-system">
+<h3>Handle System<a class="headerlink" href="#handle-system" title="Permalink to this headline">¶</a></h3>
+<p>DVN administrators may choose to set up a <a class="reference external" href="http://www.handle.net/">HANDLE.NET</a> server to issue and register persistent, global identifiers for their studies. The DVN app can be modified to support other naming services, but as of now it comes
+pre-configured to use Handles.</p>
+<p>To install and set up a local HANDLE.NET server:</p>
+<ol class="arabic simple">
+<li>Download HANDLE.NET.
+Refer to the HANDLE.NET software download page at
+<a class="reference external" href="http://handle.net/download.html">http://handle.net/download.html</a>.</li>
+<li>Install the server on the same host as GlassFish.
+Complete the installation and setup process as described in the
+HANDLE.NET Technical Manual:
+<a class="reference external" href="http://www.handle.net/tech_manual/Handle_Technical_Manual.pdf">http://www.handle.net/tech_manual/Handle_Technical_Manual.pdf</a>.</li>
+<li>Accept the default settings during installation, <strong>with one
+exception:</strong> do not encrypt private keys (this will make it easier to
+manage the service). <strong>Note</strong> that this means answer &#8216;n&#8217; when
+prompted &#8220;Would you like to encrypt your private key?(y/n). [y]:&#8221; If
+you accept the default &#8216;y&#8217; and then hit return when prompted for
+passphrase, this <strong>will</strong> encrypt the key, with a blank pass phrase!</li>
+<li>During the installation you will be issued an &#8220;authority prefix&#8221;.
+This is an equivalent of a domain name. For example, the prefix
+registered to the IQSS DVN is &#8220;1902.1&#8221;. The IDs issued to IQSS
+studies are of a form &#8220;1902.1/XXXX&#8221;, where &#8220;XXXX&#8221; is some unique
+identifier.</li>
+<li>Use <tt class="docutils literal"><span class="pre">psql</span></tt> or <tt class="docutils literal"><span class="pre">pgAdmin</span></tt> to execute the following SQL command:
+<tt class="docutils literal"><span class="pre">insert</span> <span class="pre">into</span> <span class="pre">handleprefix</span> <span class="pre">(prefix)</span> <span class="pre">values(</span> <span class="pre">'&lt;your</span> <span class="pre">HANDLE.NET</span> <span class="pre">prefix&gt;')</span></tt>;</li>
+<li><tt class="docutils literal"><span class="pre">(Optional/advanced)</span></tt> If you are going to be assigning HANDLE.NET
+ids in more than 1 authority prefix (to register studies harvested
+from remote sources): Once you obtain the additional HANDLE.NET
+prefixes, add each to the <tt class="docutils literal"><span class="pre">handleprefix</span></tt> table, using the SQL
+command from step 3.</li>
+<li>Use <tt class="docutils literal"><span class="pre">psql</span></tt> or <tt class="docutils literal"><span class="pre">pgAdmin</span></tt> to execute the following SQL
+command: <tt class="docutils literal"><span class="pre">update</span> <span class="pre">vdcnetwork</span> <span class="pre">set</span> <span class="pre">handleregistration=true,</span> <span class="pre">authority='&lt;your</span> <span class="pre">HANDLE.NET</span> <span class="pre">prefix&gt;';</span></tt></li>
+</ol>
+<p>Note: The DVN app comes bundled with the HANDLE.NET client libraries.
+You do not need to install these separately.</p>
+</div>
+<div class="section" id="twitter-setup">
+<h3>Twitter setup<a class="headerlink" href="#twitter-setup" title="Permalink to this headline">¶</a></h3>
+<p>To set up the ability for users to enable Automatic Tweets in your
+Dataverse Network:</p>
+<ol class="arabic">
+<li><p class="first">You will first need to tell twitter about you Dataverse Network Application. Go to <a class="reference external" href="https://dev.twitter.com/apps">https://dev.twitter.com/apps</a> and login (or create a new Twitter account).</p>
+</li>
+<li><p class="first">Click &#8220;Create a new application&#8221;.</p>
+</li>
+<li><p class="first">Fill out all the fields. For callback URL, use your Dataverse Network Home Page URL.</p>
+</li>
+<li><p class="first">Once created, go to settings tab and set Application Type to &#8220;Read and Write&#8221;. You can optionally also upload an Application
+Icon and fill out Organization details (the end user will see these.</p>
+</li>
+<li><dl class="first docutils">
+<dt>Click details again. You will need both the Consumer key and secret as JVM Options. Add via Glassfish console:</dt>
+<dd><p class="first">-Dtwitter4j.oauth.consumerKey=***</p>
+<p class="last">-Dtwitter4j.oauth.consumerSecret=***</p>
+</dd>
+</dl>
+</li>
+<li><p class="first">Restart Glassfish.</p>
+</li>
+<li><p class="first">To verify that Automatic Tweets are now properly set up, you can go to the Dataverse Network Options page or any Dataverse Options page and see that their is a new option, &#8220;Enable Twitter&#8221;.</p>
+</li>
+</ol>
+</div>
+<div class="section" id="digital-object-identifiers">
+<h3>Digital Object Identifiers<a class="headerlink" href="#digital-object-identifiers" title="Permalink to this headline">¶</a></h3>
+<p>Beginning with version 3.6, DVN will support the use of Digital Object Identifiers.  Similar to the currently enabled Handle System, these DOIs will enable a permanent link to studies in a DVN network.</p>
+<p>DVN uses the EZID API (<a class="reference external" href="http://www.n2t.net/ezid">www.n2t.net/ezid</a>) to facilitate the creation and maintenance of DOIs.  Network administrators will have to arrange to get their own account with EZID in order to implement creation of DOIs.  Once an account has been set up the following settings must be made in your DVN set-up:</p>
+<p>Update your database with the following query:</p>
+<p>Use <tt class="docutils literal"><span class="pre">psql</span></tt> or <tt class="docutils literal"><span class="pre">pgAdmin</span></tt> to execute the following SQL command:
+<tt class="docutils literal"><span class="pre">update</span> <span class="pre">vdcnetwork</span> <span class="pre">set</span> <span class="pre">handleregistration=true,</span>&nbsp; <span class="pre">protocol</span> <span class="pre">=</span> <span class="pre">'doi',</span> <span class="pre">authority='&lt;the</span> <span class="pre">namespace</span> <span class="pre">associated</span> <span class="pre">with</span> <span class="pre">your</span> <span class="pre">EZID</span> <span class="pre">account&gt;</span> <span class="pre">where</span> <span class="pre">id</span> <span class="pre">=</span> <span class="pre">0;</span></tt></p>
+<p>Add the following JVM options:</p>
+<p><tt class="docutils literal"><span class="pre">-Ddoi.username=&lt;username</span> <span class="pre">of</span> <span class="pre">your</span> <span class="pre">EZID</span> <span class="pre">account&gt;</span></tt></p>
+<p><tt class="docutils literal"><span class="pre">-Ddoi.password=&lt;password</span> <span class="pre">of</span> <span class="pre">your</span> <span class="pre">EZID</span> <span class="pre">account&gt;</span></tt></p>
+<p><tt class="docutils literal"><span class="pre">-Ddoi.baseurlstring=https://ezid.cdlib.org</span></tt></p>
+<p>Note: The DVN app comes bundled with the EZID API client libraries. You do not need to install these separately.</p>
+</div>
+</div>
+<div class="section" id="appendix">
+<h2>Appendix<a class="headerlink" href="#appendix" title="Permalink to this headline">¶</a></h2>
+<div class="section" id="do-you-need-r">
+<span id="id6"></span><h3>Do you need R?<a class="headerlink" href="#do-you-need-r" title="Permalink to this headline">¶</a></h3>
+<p>This is a more detailed explanation of the statement made earlier in the &#8220;Prerequisites&#8221; section: &#8220;Only the advanced modes of serving quantitative data to the users require R.&#8221; <tt class="docutils literal"><span class="pre">[style?]</span></tt></p>
+<p>In this context, by “quantitative data” we mean data sets for which
+machine-readable, variable-level metadata has been defined in the DVN
+database. “Subsettable data” is another frequently used term, in the
+DVN parlance. The currently supported sources of subsettable data are
+SPSS and STATA files, as well as row tabulated or CSV files, with
+extra control cards defining the data structure and variable
+metadata. (See full documentation in User Guide for <a class="reference internal" href="dataverse-user-main.html#finding-and-using-data"><em>Finding and Using Data</em></a></p>
+<p>Once a “subsettable” data set is create, users can run online statistics and analysis on it. That’s where R is used. In our experience, most of the institutions who have installed the DVN did so primarily in order to share and process quantitative data. When this is the case, R must be considered a required component. But a DVN network built  to serve a collection of strictly human-readable (text, image, etc.) data, R will not be necessary at all.</p>
+</div>
+<div class="section" id="what-does-the-installer-do">
+<span id="what-does-the-intstaller-do"></span><h3>What does the Installer do?<a class="headerlink" href="#what-does-the-installer-do" title="Permalink to this headline">¶</a></h3>
+<p>The Installer script (chapters Quick Install, Running the Installer.) automates the following tasks:</p>
+<ol class="arabic simple">
+<li>Checks the system for required components;</li>
+<li>Prompts the user for the following information:<ol class="loweralpha">
+<li>Location of the Glassfish directory;</li>
+<li>Access information (host, port, database name, username, password) for PostgresQL;</li>
+<li>Access information (host, port, username, password) for Rserve;</li>
+</ol>
+</li>
+<li>Attempts to create the PostgreSQL user (role) and database, from <a class="reference internal" href="#postgresql"><em>prerequisiste PostgreSQL setup step</em></a> above; see the <a class="reference internal" href="#postgresql-setup"><em>&#8220;PostgreSQL configuration&#8221;</em></a> Appendix section for details.</li>
+<li>Using the <a class="reference internal" href="#glassfish-configuration-template"><em>Glassfish configuration template (section the Appendix)</em></a> and the information collected in step 2.b. above, creates the config file domain.xml and installs it the Glassfish domain directory.</li>
+<li>Copies additional configuration files (supplied in the dvninstall/config directory of the Installer package) into the config directory of the Glassfish domain.</li>
+<li>Installs Glassfish Postgres driver (supplied in the dvninstall/pgdriver directory of the Installer package) into the lib directory in the Glassfish installation tree.</li>
+<li>Attempts to start Glassfish. The config file at this point contains the configuration settings that the DVN will need to run (see section <a class="reference internal" href="#glassfish-configuration-individual-settings"><em>Glassfish Configuration, individual settings section</em></a> of the Appendix), but otherwise it is a &#8220;virgin&#8221;, fresh config. Glassfish will perform some initialization tasks on this first startup and deploy some internal apps.</li>
+<li>If step 5. succeeds, the Installer attempts to deploy the DVN application (the Java archive DVN-EAR.ear supplied with the installer).</li>
+<li>Stops Glassfish, populates the DVN database with the initial content (section <a class="reference internal" href="#postgresql-setup"><em>&#8220;PostgreSQL configuration&#8221;</em></a>&#8221; of the Appendix), starts Glassfish.</li>
+<li>Attempts to establish connection to Rserve, using the access information obtained during step 2.c. If this fails, prints a warning message and points the user to the Prerequisites section of this guide where R installation is discussed.</li>
+<li>Finally, prints a message informing the user that their new DVN should be up and running, provides them with the server URL and suggests that they visit it, to change the default passwords and perhaps start  setting up their Dataverse Network.</li>
+</ol>
+<p>Throughout the steps above, the Installer attempts to diagnose any
+potential issues and give the user clear error messages when things go
+wrong (&#8220;version of Postgres too old&#8221;, &#8220;you must run this as root&#8221;,
+etc.).</p>
+<p>Enough information is supplied in this manual to enable a user (a
+skilled and rather patient user, we may add) to perform all the steps
+above without the use of the script.</p>
+</div>
+<div class="section" id="glassfish-configuration-template">
+<span id="id7"></span><h3>Glassfish configuration template<a class="headerlink" href="#glassfish-configuration-template" title="Permalink to this headline">¶</a></h3>
+<p>The configuration template (<tt class="docutils literal"><span class="pre">domain.xml.TEMPLATE</span></tt>) is part of the
+installer zip package. The installer replaces the placeholder
+configuration tokens (for example, <tt class="docutils literal"><span class="pre">%POSTGRES_DATABASE%</span></tt>) with the
+real values provided by the user to create the Glassfish configuration
+file <tt class="docutils literal"><span class="pre">domain.xml</span></tt>.</p>
+<p><tt class="docutils literal"><span class="pre">[I</span> <span class="pre">was</span> <span class="pre">thinking</span> <span class="pre">of</span> <span class="pre">copy-and-pasting</span> <span class="pre">the</span> <span class="pre">entire</span> <span class="pre">template</span> <span class="pre">file</span> <span class="pre">here;</span>
+<span class="pre">but</span> <span class="pre">it</span> <span class="pre">is</span> <span class="pre">30K</span> <span class="pre">of</span> <span class="pre">XML,</span> <span class="pre">so</span> <span class="pre">I</span> <span class="pre">decided</span> <span class="pre">not</span> <span class="pre">to.</span> <span class="pre">The</span> <span class="pre">above</span> <span class="pre">explains</span> <span class="pre">where</span> <span class="pre">it</span>
+<span class="pre">can</span> <span class="pre">be</span> <span class="pre">found,</span> <span class="pre">if</span> <span class="pre">anyone</span> <span class="pre">wants</span> <span class="pre">to</span> <span class="pre">look</span> <span class="pre">at</span> <span class="pre">it,</span> <span class="pre">for</span> <span class="pre">reference</span> <span class="pre">or</span>
+<span class="pre">whatever]</span></tt></p>
+</div>
+<div class="section" id="glassfish-configuration-individual-settings">
+<span id="id8"></span><h3>Glassfish Configuration, individual settings<a class="headerlink" href="#glassfish-configuration-individual-settings" title="Permalink to this headline">¶</a></h3>
+<p>As explained earlier in the Appendix, the Installer configures Glassfish
+by cooking a complete domain configuration file (<tt class="docutils literal"><span class="pre">domain.xml</span></tt>) and
+installing it in the domain directory.</p>
+<p>All of the settings and options however can be configured individually
+by an operator, using the Glassfish Admin Console.</p>
+<p>The Console can be accessed at the network port 4848 when Glassfish is
+running, by pointing a browser at</p>
+<blockquote>
+<div><tt class="docutils literal"><span class="pre">http://[your</span> <span class="pre">host</span> <span class="pre">name]:4848/</span></tt></div></blockquote>
+<p>and logging in as <tt class="docutils literal"><span class="pre">admin</span></tt>. The initial password is <tt class="docutils literal"><span class="pre">adminadmin</span></tt>. It
+is of course strongly recommended to log in and change it first thing
+after you run the Installer.</p>
+<p>The sections below describe all the configuration settings that would
+need to be done through the GUI in order to replicate the configuration
+file produced by the Installer. This information is provided for the
+benefit of an advanced user who may want to experiment with individual
+options. Or to attempt to install DVN on a platform not supported by our
+installer; although we wish sincerely that nobody is driven to such
+desperate measures ever.</p>
+<div class="section" id="jvm-options">
+<span id="id9"></span><h4>JVM options<a class="headerlink" href="#jvm-options" title="Permalink to this headline">¶</a></h4>
+<p>Under Application Server-&gt;JVM Settings-&gt;JVM Options:</p>
+<p>If you are installing Glassfish in a production environment, follow
+these steps:</p>
+<ol class="arabic">
+<li><div class="first line-block">
+<div class="line">Delete the following options: -Dsun.rmi.dgc.server.gcInterval=3600000</div>
+<div class="line">-Dsun.rmi.dgc.client.gcInterval=3600000</div>
+</div>
+</li>
+<li><div class="first line-block">
+<div class="line">Add the following options:</div>
+<div class="line">-XX:MaxPermSize=192m</div>
+<div class="line">-XX:+AggressiveHeap</div>
+<div class="line">-Xss128l</div>
+<div class="line">-XX:+DisableExplicitGC</div>
+<div class="line">-Dcom.sun.enterprise.ss.ASQuickStartup=false</div>
+</div>
+</li>
+<li><div class="first line-block">
+<div class="line">To install on a multi-processor machine, add the following:</div>
+<div class="line"><tt class="docutils literal"><span class="pre">-XX:+UseParallelOldGC</span></tt></div>
+</div>
+</li>
+<li><div class="first line-block">
+<div class="line">To enable the optional HANDLE.NET installation and provide access to</div>
+<div class="line">study ID registration, add the following (see the &#8220;Handles System&#8221;</div>
+<div class="line">section in the &#8220;Optional Components&#8221; for</div>
+<div class="line">details):</div>
+<div class="line"><tt class="docutils literal"><span class="pre">-Ddvn.handle.baseUrl=&lt;-Dataverse</span> <span class="pre">Network</span> <span class="pre">host</span> <span class="pre">URL&gt;/dvn/study?globalId=hdl:</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">-Ddvn.handle.auth=&lt;authority&gt;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">-Ddvn.handle.admcredfile=/hs/svr_1/admpriv.bin</span></tt></div>
+</div>
+</li>
+<li><div class="first line-block">
+<div class="line">To enable the optional Google Analytics option on the Network Options</div>
+<div class="line">page and provide access to site usage reports, add the following (see</div>
+<div class="line">the &#8220;Google Analytics&#8221; section in the &#8220;Optional Components&#8221; for</div>
+<div class="line">details):</div>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">-Ddvn.googleanalytics.key=&lt;googleAnalyticsTrackingCode&gt;</span></tt></div>
+</div>
+</div>
+</li>
+<li><div class="first line-block">
+<div class="line">Configure the following option only if you run multiple instances</div>
+<div class="line">of the GlassFish server for load balancing. This option controls</div>
+<div class="line">which GlassFish instance runs scheduled jobs, such as harvest or</div>
+<div class="line">export.</div>
+<div class="line">For the server instance that will run scheduled jobs, include the</div>
+<div class="line">following JVM option:</div>
+<div class="line"><tt class="docutils literal"><span class="pre">-Ddvn.timerServer=true</span></tt></div>
+<div class="line">For all other server instances, include this JVM option:</div>
+<div class="line"><tt class="docutils literal"><span class="pre">-Ddvn.timerServer=false</span></tt></div>
+<div class="line">If you are installing Glassfish in either a production or development</div>
+<div class="line">environment, follow these steps:</div>
+</div>
+<ul>
+<li><div class="first line-block">
+<div class="line">Change the following options’ settings:</div>
+<div class="line">Change <tt class="docutils literal"><span class="pre">-client</span></tt> to <tt class="docutils literal"><span class="pre">-server</span></tt>.</div>
+<div class="line">Change <tt class="docutils literal"><span class="pre">-Xmx512m</span></tt> to whatever size you can allot for the maximum</div>
+<div class="line">Java heap  space.</div>
+<div class="line">Set `` –Xms512m`` to the same value to which you set <tt class="docutils literal"><span class="pre">–Xmx512m</span></tt>.</div>
+</div>
+</li>
+<li><div class="first line-block">
+<div class="line">To configure permanent file storage (data and documentation files</div>
+<div class="line">uploaded to studies) set the following:</div>
+<div class="line"><tt class="docutils literal"><span class="pre">-Dvdc.study.file.dir=${com.sun.aas.instanceRoot}/config/files/studies</span></tt></div>
+</div>
+</li>
+<li><div class="first line-block">
+<div class="line">To configure the temporary location used in file uploads add the</div>
+<div class="line">following:</div>
+<div class="line"><tt class="docutils literal"><span class="pre">-Dvdc.temp.file.dir=${com.sun.aas.instanceRoot}/config/files/temp</span></tt></div>
+</div>
+</li>
+<li><div class="first line-block">
+<div class="line">To configure export and import logs (harvesting and importing),</div>
+<div class="line">add the following:</div>
+<div class="line">-Dvdc.export.log.dir=${com.sun.aas.instanceRoot}/logs/export</div>
+<div class="line">-Dvdc.import.log.dir=${com.sun.aas.instanceRoot}/logs/import</div>
+</div>
+</li>
+<li><div class="first line-block">
+<div class="line">Add the following:</div>
+<div class="line">-Djhove.conf.dir=${com.sun.aas.instanceRoot}/config</div>
+<div class="line">-Ddvn.inetAddress=&lt;host or fully qualified domain name of server</div>
+<div class="line">on which Dataverse Network runs&gt;</div>
+<div class="line">-Ddvn.networkData.libPath=${com.sun.aas.instanceRoot}/applications/j2ee-</div>
+<div class="line-block">
+<div class="line">apps/DVN-EAR</div>
+</div>
+</div>
+</li>
+<li><div class="first line-block">
+<div class="line">To manage calls to RServe and the R host (analysis and file upload), add</div>
+<div class="line">the following:</div>
+<div class="line"><tt class="docutils literal"><span class="pre">-Dvdc.dsb.host=&lt;RServe</span> <span class="pre">server</span> <span class="pre">hostname&gt;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">-Dvdc.dsb.rserve.user=&lt;account&gt;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">-Dvdc.dsb.rserve.pwrd=&lt;password&gt;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">-Dvdc.dsb.rserve.port=&lt;port</span> <span class="pre">number&gt;</span></tt></div>
+</div>
+<div class="line-block">
+<div class="line">For Installing R, see:</div>
+<div class="line"><a class="reference internal" href="#r-and-rserve"><em>R and R-Serve</em></a></div>
+<div class="line">for information about configuring these values in the <tt class="docutils literal"><span class="pre">Rserv.conf</span></tt></div>
+<div class="line">file.</div>
+<div class="line">These settings must be configured for subsetting and analysis to</div>
+<div class="line">work.</div>
+</div>
+</li>
+<li><div class="first line-block">
+<div class="line">To configure search index files set the following:</div>
+<div class="line"><tt class="docutils literal"><span class="pre">-Ddvn.index.location=${com.sun.aas.instanceRoot}/config</span></tt></div>
+</div>
+</li>
+<li><div class="first line-block">
+<div class="line">To use the optional customized error logging and add more information</div>
+<div class="line">to your log files, set the following:</div>
+<div class="line"><tt class="docutils literal"><span class="pre">-Djava.util.logging.config.file=</span> <span class="pre">${com.sun.aas.instanceRoot}</span> <span class="pre">/config/logging.properties</span></tt></div>
+<div class="line"><strong>Note</strong>: To customize the logging, edit the <tt class="docutils literal"><span class="pre">logging.properties</span></tt> file</div>
+</div>
+</li>
+<li><div class="first line-block">
+<div class="line">The default size limit for file downloads is 100MB.  To override this</div>
+<div class="line">default add the following JVM option:</div>
+<div class="line"><tt class="docutils literal"><span class="pre">-Ddvn.batchdownload.limit=&lt;max</span> <span class="pre">download</span> <span class="pre">bytes&gt;</span></tt></div>
+</div>
+</li>
+</ul>
+</li>
+</ol>
+</div>
+<div class="section" id="ejb-container">
+<h4>EJB Container<a class="headerlink" href="#ejb-container" title="Permalink to this headline">¶</a></h4>
+<p>Under Configuration-&gt;EJB Container-&gt;EJB Timer Service:</p>
+<ol class="arabic">
+<li><div class="first line-block">
+<div class="line">Set the Timer Datasource to the following:</div>
+<div class="line"><tt class="docutils literal"><span class="pre">jdbc/VDCNetDS</span></tt></div>
+</div>
+</li>
+<li><div class="first line-block">
+<div class="line">Save the configuration.</div>
+</div>
+</li>
+</ol>
+</div>
+<div class="section" id="http-service">
+<h4>HTTP Service<a class="headerlink" href="#http-service" title="Permalink to this headline">¶</a></h4>
+<p>The HTTP Service configuration settings described in this section are suggested defaults. These settings are very important. There are no right values to define; the values depend on the specifics of your web traffic, how many requests you get, how long they take to process on average, and your hardware. For detailed the
+| Sun Microsystems Documentation web site at the following URL:</p>
+<p><a class="reference external" href="http://docs.sun.com/">http://docs.sun.com/</a></p>
+<div class="line-block">
+<div class="line"><strong>Note</strong>: If your server becomes so busy that it drops connections,</div>
+<div class="line">adjust the Thread Counts to improve performance.</div>
+</div>
+<ol class="arabic">
+<li><p class="first">Under Configuration-&gt;HTTP Service-&gt;HTTP
+Listeners-&gt;<tt class="docutils literal"><span class="pre">http-listener-1</span></tt>:</p>
+<ul class="simple">
+<li>Listener Port: 80</li>
+<li>Acceptor Threads: The number of CPUs (cores) on your server</li>
+</ul>
+</li>
+<li><p class="first">Under Configuration-&gt;HTTP Service, in the RequestProcessing tab:</p>
+<ul class="simple">
+<li>Thread Count: Four times the number of CPUs (cores) on your server</li>
+<li>Initial Thread Count: The number of CPUs (cores)</li>
+</ul>
+</li>
+<li><p class="first">Under Configuration-&gt;HTTP Service-&gt;Virtual Servers-&gt;server: add new property <tt class="docutils literal"><span class="pre">allowLinking</span></tt> with the value <tt class="docutils literal"><span class="pre">true</span></tt>.</p>
+<blockquote>
+<div><ol class="arabic">
+<li><div class="first line-block">
+<div class="line">Under Configuration-&gt;HTTP Service, configure Access Logging:</div>
+</div>
+</li>
+</ol>
+<div class="line-block">
+<div class="line">format=%client.name% %auth-user-name% %datetime% %request%        %status%</div>
+<div class="line">%response.length%</div>
+<div class="line">rotation-enabled=true</div>
+<div class="line">rotation-interval-in-minutes=15</div>
+<div class="line">rotation-policy=time</div>
+<div class="line">rotation-suffix=yyyy-MM-dd</div>
+</div>
+</div></blockquote>
+</li>
+</ol>
+</div>
+<div class="section" id="javamail-session">
+<h4>JavaMail Session<a class="headerlink" href="#javamail-session" title="Permalink to this headline">¶</a></h4>
+<p>Under Resources-&gt;JavaMail Sessions<tt class="docutils literal"><span class="pre">-&gt;mail/notifyMailSession:</span></tt></p>
+<ul>
+<li><div class="first line-block">
+<div class="line">Mail Host: <tt class="docutils literal"><span class="pre">&lt;your</span> <span class="pre">mail</span> <span class="pre">server&gt;</span></tt></div>
+<div class="line"><strong>Note</strong>: The Project recommends that you install a mail server on the same machine as GlassFish and use <tt class="docutils literal"><span class="pre">localhost</span></tt> for this entry. Since email notification is used for workflow events such as creating a dataverse or study, these functions may not work properly if a valid mail server is not configured.</div>
+</div>
+</li>
+<li><dl class="first docutils">
+<dt>Default User: <tt class="docutils literal"><span class="pre">dataversenotify</span></tt></dt>
+<dd><p class="first last">This does not need to be a real mail account.</p>
+</dd>
+</dl>
+</li>
+<li><p class="first">Default Return Address: <tt class="docutils literal"><span class="pre">do-not-reply&#64;&lt;your</span> <span class="pre">mail</span> <span class="pre">server&gt;</span></tt></p>
+</li>
+</ul>
+</div>
+<div class="section" id="jdbc-resources">
+<h4>JDBC Resources<a class="headerlink" href="#jdbc-resources" title="Permalink to this headline">¶</a></h4>
+<p><strong>Under Resources-&gt;JDBC-&gt;Connection Pools:</strong></p>
+<div class="line-block">
+<div class="line">Add a new Connection Pool entry:</div>
+</div>
+<ul class="simple">
+<li>entryName: <tt class="docutils literal"><span class="pre">dvnDbPool</span></tt></li>
+<li>Resource Type: <tt class="docutils literal"><span class="pre">javax.sql.DataSource</span></tt></li>
+<li>Database Vendor: <tt class="docutils literal"><span class="pre">PostgreSQL</span></tt></li>
+<li>DataSource ClassName: <tt class="docutils literal"><span class="pre">org.postgresql.ds.PGPoolingDataSource</span></tt></li>
+<li>Additional Properties:<ul>
+<li>ConnectionAttributes: <tt class="docutils literal"><span class="pre">;create=true</span></tt></li>
+<li>User: <tt class="docutils literal"><span class="pre">dvnApp</span></tt></li>
+<li>PortNumber: <tt class="docutils literal"><span class="pre">5432</span></tt> (Port 5432 is the PostgreSQL default port.)</li>
+<li>Password: <tt class="docutils literal"><span class="pre">&lt;Dataverse</span> <span class="pre">Network</span> <span class="pre">application</span> <span class="pre">database</span> <span class="pre">password&gt;</span></tt></li>
+<li>DatabaseName: <tt class="docutils literal"><span class="pre">&lt;your</span> <span class="pre">database</span> <span class="pre">name&gt;</span></tt></li>
+<li>ServerName: <tt class="docutils literal"><span class="pre">&lt;your</span> <span class="pre">database</span> <span class="pre">host&gt;</span></tt></li>
+<li>JDBC30DataSource: <tt class="docutils literal"><span class="pre">true</span></tt></li>
+</ul>
+</li>
+</ul>
+<div class="line-block">
+<div class="line"><br /></div>
+</div>
+<p><strong>Under Resources-&gt;JDBC-&gt;JDBC Resources:</strong></p>
+<div class="line-block">
+<div class="line">Add a new JDBC Resources entry:</div>
+</div>
+<ul class="simple">
+<li>JNDI Name: <tt class="docutils literal"><span class="pre">jdbc/VDCNetDS</span></tt></li>
+<li>Pool Name: <tt class="docutils literal"><span class="pre">dvnDbPool</span></tt></li>
+</ul>
+</div>
+<div class="section" id="jms-resources">
+<h4>JMS Resources<a class="headerlink" href="#jms-resources" title="Permalink to this headline">¶</a></h4>
+<p>Under Resources-&gt;JMS Resources:</p>
+<ol class="arabic simple">
+<li>Add a new Connection Factory for the DSB Queue:<ul>
+<li>JNDI Name: <tt class="docutils literal"><span class="pre">jms/DSBQueueConnectionFactory</span></tt></li>
+<li>Resource Type: <tt class="docutils literal"><span class="pre">javax.jms.QueueConnectionFactory</span></tt></li>
+</ul>
+</li>
+<li>Add a new Connection Factory for the Index Message:<ul>
+<li>JNDI Name: <tt class="docutils literal"><span class="pre">jms/IndexMessageFactory</span></tt></li>
+<li>Resource Type: <tt class="docutils literal"><span class="pre">javax.jms.QueueConnectionFactory</span></tt></li>
+</ul>
+</li>
+<li>Add a new Destination Resource for the DSB Queue:<ul>
+<li>JNDI Name: <tt class="docutils literal"><span class="pre">jms/DSBIngest</span></tt></li>
+<li>Physical Destination Name: <tt class="docutils literal"><span class="pre">DSBIngest</span></tt></li>
+<li>Resource Type: <tt class="docutils literal"><span class="pre">javax.jms.Queue</span></tt></li>
+</ul>
+</li>
+<li>Add a new Destination Resource for the Index Message:<ul>
+<li>JNDI Name: <tt class="docutils literal"><span class="pre">jms/IndexMessage</span></tt></li>
+<li>Physical Destination Name: <tt class="docutils literal"><span class="pre">IndexMessage</span></tt></li>
+<li>Resource Type: <tt class="docutils literal"><span class="pre">javax.jms.Queue</span></tt></li>
+</ul>
+</li>
+</ol>
+</div>
+</div>
+<div class="section" id="postgresql-setup">
+<span id="id10"></span><h3>PostgreSQL setup<a class="headerlink" href="#postgresql-setup" title="Permalink to this headline">¶</a></h3>
+<p>The following actions are normally performed by the automated installer
+script. These steps are explained here for reference, and/or in case
+your need to perform them manually:</p>
+<ol class="arabic">
+<li><p class="first">Start as root, then change to user postgres:</p>
+<p><tt class="docutils literal"><span class="pre">su</span> <span class="pre">postgres</span></tt></p>
+</li>
+</ol>
+<blockquote>
+<div><p>Create DVN database usert (role):</p>
+<p><tt class="docutils literal"><span class="pre">createuser</span> <span class="pre">-SrdPE</span> <span class="pre">[DB_USERNAME]</span></tt></p>
+<p>(you will be prompted to choose a user password).</p>
+<p>Create DVN database:</p>
+<p><tt class="docutils literal"><span class="pre">createdb</span> <span class="pre">[DB_NAME]</span> <span class="pre">--owner=[DB_USERNAME]</span></tt></p>
+<p><tt class="docutils literal"><span class="pre">[DB_NAME]</span></tt> and <tt class="docutils literal"><span class="pre">[USER_NAME]</span></tt> are the names you choose for your DVN database and database user. These, together with the password you have assigned, will be used in the Glassfish configuration so that the application can talk to the database.</p>
+</div></blockquote>
+<ol class="arabic simple" start="2">
+<li>Before Glassfish can be configured for the DVN app, the Postgres driver needs to be installed in the &lt;GLASSFISH ROOT&gt;/lib directory. We supply a version of the driver known to work with the DVN in the dvninstall/pgdriver directory of the Installer bundle. (This is the <a class="reference internal" href="#what-does-the-intstaller-do"><em>&#8220;What does the Installer do?&#8221;</em></a> section of this appendix) An example of the installed location of the driver:</li>
+</ol>
+<blockquote>
+<div><tt class="docutils literal"><span class="pre">/usr/local/glassfish/lib/postgresql-8.3-603.jdbc4.jar</span></tt></div></blockquote>
+<ol class="arabic simple" start="3">
+<li>Finally, after the DVN application is deployed under Glassfish for the first time, the database needs to be populated with the initial content:</li>
+</ol>
+<blockquote>
+<div><p><tt class="docutils literal"><span class="pre">su</span> <span class="pre">postgres</span></tt>
+<tt class="docutils literal"><span class="pre">psql</span> <span class="pre">-d</span> <span class="pre">[DB_NAME]</span> <span class="pre">-f</span> <span class="pre">referenceData.sql</span></tt></p>
+<p>The file referenceData.sql is provided as part of the installer zip package.</p>
+</div></blockquote>
+</div>
+<div class="section" id="redhat-startup-file-for-glassfish-example">
+<h3>RedHat startup file for glassfish, example<a class="headerlink" href="#redhat-startup-file-for-glassfish-example" title="Permalink to this headline">¶</a></h3>
+<p>Below is an example of a glassfish startup file that you may want to
+install on your RedHat (or similar) system to have glassfish start
+automatically on boot.</p>
+<div class="line-block">
+<div class="line">Install the file as <tt class="docutils literal"><span class="pre">/etc/init.d/glassfish</span></tt>, then run <tt class="docutils literal"><span class="pre">chkconfig</span> <span class="pre">glassfish</span> <span class="pre">on</span></tt></div>
+</div>
+<p>Note that the extra configuration steps before the domain start line,
+for increasing the file limit and allowing &#8220;memory overcommit&#8221;. These
+are useful settings to have on a production server.</p>
+<div class="line-block">
+<div class="line">You may of course add extra custom configuration specific to your
+setup.</div>
+</div>
+<div class="highlight-guess"><div class="highlight"><pre><span class="c">#! /bin/sh</span>
+<span class="c"># chkconfig: 2345 99 01</span>
+<span class="c"># description: GlassFish App Server</span>
+<span class="nb">set</span> -e
+<span class="nv">ASADMIN</span><span class="o">=</span>/usr/local/glassfish/bin/asadmin
+<span class="k">case</span> <span class="s2">&quot;$1&quot;</span> in
+  start<span class="o">)</span>
+        <span class="nb">echo</span> -n <span class="s2">&quot;Starting GlassFish server: glassfish&quot;</span>
+        <span class="c"># Increase file descriptor limit:</span>
+        <span class="nb">ulimit</span> -n 32768
+        <span class="c"># Allow &quot;memory overcommit&quot;:</span>
+        <span class="c"># (basically, this allows to run exec() calls from inside the</span>
+        <span class="c"># app, without the Unix fork() call physically hogging 2X</span>
+        <span class="c"># the amount of memory glassfish is already using)</span>
+        <span class="nb">echo </span>1 &gt; /proc/sys/vm/overcommit_memory
+        <span class="nv">$ASADMIN</span> start-domain domain1 <span class="nb">echo</span> <span class="s2">&quot;.&quot;</span>
+        ;;
+  stop<span class="o">)</span>
+        <span class="nb">echo</span> -n <span class="s2">&quot;Stopping GlassFish server: glassfish&quot;</span>
+        <span class="nv">$ASADMIN</span> stop-domain domain1
+        <span class="nb">echo</span> <span class="s2">&quot;.&quot;</span>
+         ;;
+  *<span class="o">)</span>
+        <span class="nb">echo</span> <span class="s2">&quot;Usage: /etc/init.d/glassfish {start|stop}&quot;</span>
+
+        <span class="nb">exit </span>1
+<span class="k">esac</span>
+<span class="nb">exit </span>0
+</pre></div>
+</div>
+</div>
+<div class="section" id="enabling-secure-remote-access-to-asadmin">
+<h3>Enabling secure remote access to Asadmin<a class="headerlink" href="#enabling-secure-remote-access-to-asadmin" title="Permalink to this headline">¶</a></h3>
+<p>As was mentioned in the Glassfish section of the manual, in version
+3.1.2 admin interface (asadmin) is configured to be accessible on the
+localhost interface only. If you need to be able to access the admin
+console remotely, you will have to enable secure access to it. (It will
+be accessible over https only, at <tt class="docutils literal"><span class="pre">https://&lt;YOUR</span> <span class="pre">HOST&gt;:4848</span></tt>; connections
+to <tt class="docutils literal"><span class="pre">http://&lt;YOUR</span> <span class="pre">HOST&gt;:4848</span></tt> will be automatically redirected to the https
+interface)</p>
+<p>The following must be done as root:</p>
+<ol class="arabic">
+<li><p class="first">First you need to configure the admin password:</p>
+<p><tt class="docutils literal"><span class="pre">&lt;GF</span> <span class="pre">LOCATION&gt;/glassfish3/bin/asadmin</span> <span class="pre">change-admin-password</span></tt></p>
+<p>(since you didn&#8217;t create one when you were installing Glassfish, leave the &#8220;current password&#8221; blank, i.e., hit ENTER)</p>
+</li>
+<li><p class="first">Enable the secure access:</p>
+</li>
+</ol>
+<blockquote>
+<div><p><tt class="docutils literal"><span class="pre">&lt;GF</span> <span class="pre">LOCATION&gt;/glassfish3/bin/asadmin</span> <span class="pre">enable-secure-admin</span></tt></p>
+<p>(Note that you will need to restart Glassfish after step 2. above)</p>
+</div></blockquote>
+</div>
+<div class="section" id="using-lockss-with-dvn">
+<span id="id11"></span><h3>Using LOCKSS with DVN<a class="headerlink" href="#using-lockss-with-dvn" title="Permalink to this headline">¶</a></h3>
+<p>DVN holdings can be crawled by LOCKSS servers (<a class="reference external" href="http://www.lockss.org">www.lockss.org</a>). It is made possible by the special plugin developed and maintained by the DVN project, which a LOCKSS daemon utilizes to crawl and access materials served by a Dataverse network.</p>
+<p>The current stable version of the plugin is available at the following location:</p>
+<p><a class="reference external" href="http://lockss.hmdc.harvard.edu/lockss/plugin/DVNOAIPlugin.jar">http://lockss.hmdc.harvard.edu/lockss/plugin/DVNOAIPlugin.jar</a></p>
+<p>As of January 2013 and DVN version 3.3, the plugin is compatible with the  LOCKSS daemon version 1.55. The plugin sources can be found in the main DVN source tree in <a class="reference external" href="https://dvn.svn.sourceforge.net/svnroot/dvn/dvn-app/trunk/src/DVN-lockss">https://dvn.svn.sourceforge.net/svnroot/dvn/dvn-app/trunk/src/DVN-lockss</a> (please note that the DVN project is currently <strong>in the process of moving to gitHub!</strong> The preserved copy of the 3.3 source will be left at the URL above, together with the information on the current location of the source repository).</p>
+<p>In order to crawl a DVN, the following steps need to be performed:</p>
+<ol class="arabic">
+<li><p class="first">Point your LOCKSS daemon to the plugin repository above. (Refer to the LOCKSS documentation for details);</p>
+</li>
+<li><p class="first">Create a LOCKSS Archival Unit for your target DVN:</p>
+<p>In the LOCKSS Admin Console, go to <strong>Journal Configuration</strong> -&gt; <strong>Manual Add/Edit</strong> and click on <strong>Add Archival Unit</strong>.</p>
+<p>On the next form, select <strong>DVNOAI</strong> in the pull down menu under <strong>Choose a publisher plugin</strong> and click <strong>Continue</strong>.</p>
+<p>Next configure the parameters that define your DVN Archival Unit. LOCKSS daemon can be configured to crawl either the entire holdings of a DVN (no OAI set specified), or a select Dataverse.</p>
+</li>
+</ol>
+<p>Note that LOCKSS crawling must be authorized on the DVN side. Refer to
+the <a class="reference internal" href="dataverse-user-main.html#edit-lockss-harvest-settings"><em>&#8220;Edit LOCKSS Settings&#8221;</em></a>
+section of the DVN Network Administrator Guide for the instructions on
+enabling LOCKSS crawling on the network level, and/or to the
+<a class="reference internal" href="dataverse-user-main.html#enabling-lockss-access-to-the-dataverse"><em>Enabling LOCKSS access to the Dataverse</em></a>
+of the Dataverse Administration Guide. Once you allow LOCKSS crawling of
+your Dataverse(s), you will need to enter the URL of the &#8220;LOCKSS
+Manifest&#8221; page provided by the DVN in the configuration above. For the
+network-wide archival unit this URL will be
+<tt class="docutils literal"><span class="pre">http</span></tt><tt class="docutils literal"><span class="pre">://&lt;YOUR</span> <span class="pre">SERVER&gt;/dvn/faces/ManifestPage.xhtml</span></tt>; for an
+individual dataverse it is
+<tt class="docutils literal"><span class="pre">http</span></tt><tt class="docutils literal"><span class="pre">://&lt;YOUR</span> <span class="pre">SERVER&gt;/dvn/dv/&lt;DV</span> <span class="pre">ALIAS&gt;/faces/ManifestPage.xhtml.</span></tt></p>
+<div class="line-block">
+<div class="line">The URL of the DVN OAI server is <tt class="docutils literal"><span class="pre">http</span></tt><tt class="docutils literal"><span class="pre">://&lt;YOUR</span> <span class="pre">DVN</span> <span class="pre">HOST&gt;/dvn/OAIHandler</span></tt>.</div>
+</div>
+</div>
+<div class="section" id="read-only-mode">
+<h3>Read Only Mode<a class="headerlink" href="#read-only-mode" title="Permalink to this headline">¶</a></h3>
+<p>A Read Only Mode has been established in DVN to allow the application to remain available while deploying new versions or patches.  Users will be able to view data and metadata, but will not be able to add or edit anything.  Currently there is no way to switch to Read Only Mode through the application.
+In order to change the application mode you must apply the following queries through <tt class="docutils literal"><span class="pre">psql</span></tt> or <tt class="docutils literal"><span class="pre">pgAdmin</span></tt>:</p>
+<p>To set to Read Only Mode:</p>
+<blockquote>
+<div><div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">BEGIN;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">SET</span> <span class="pre">TRANSACTION</span> <span class="pre">READ</span> <span class="pre">WRITE;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">--</span> <span class="pre">Note</span> <span class="pre">database</span> <span class="pre">and</span> <span class="pre">user</span> <span class="pre">strings</span> <span class="pre">may</span> <span class="pre">have</span> <span class="pre">to</span> <span class="pre">be</span> <span class="pre">modified</span> <span class="pre">for</span> <span class="pre">your</span> <span class="pre">particular</span> <span class="pre">installation;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">--</span> <span class="pre">You</span> <span class="pre">may</span> <span class="pre">also</span> <span class="pre">customize</span> <span class="pre">the</span> <span class="pre">status</span> <span class="pre">notice</span> <span class="pre">which</span> <span class="pre">will</span> <span class="pre">appear</span> <span class="pre">on</span> <span class="pre">all</span> <span class="pre">pages</span> <span class="pre">of</span> <span class="pre">the</span> <span class="pre">application;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">update</span> <span class="pre">vdcnetwork</span> <span class="pre">set</span> <span class="pre">statusnotice</span> <span class="pre">=</span> <span class="pre">&quot;This</span> <span class="pre">network</span> <span class="pre">is</span> <span class="pre">currently</span> <span class="pre">in</span> <span class="pre">Read</span> <span class="pre">Only</span> <span class="pre">state.</span> <span class="pre">No</span> <span class="pre">saving</span> <span class="pre">of</span> <span class="pre">data</span> <span class="pre">will</span> <span class="pre">be</span> <span class="pre">allowed.&quot;;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">ALTER</span> <span class="pre">DATABASE</span> <span class="pre">&quot;dvnDb&quot;</span> <span class="pre">set</span> <span class="pre">default_transaction_read_only=on;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">Alter</span> <span class="pre">user</span> <span class="pre">&quot;dvnApp&quot;</span> <span class="pre">set</span> <span class="pre">default_transaction_read_only=on;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">update</span> <span class="pre">vdcnetwork</span> <span class="pre">set</span> <span class="pre">statusnotice</span> <span class="pre">=</span> <span class="pre">&quot;&quot;;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">END;</span></tt></div>
+</div>
+</div></blockquote>
+<p>To return to regular service:</p>
+<blockquote>
+<div><div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">BEGIN;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">SET</span> <span class="pre">TRANSACTION</span> <span class="pre">READ</span> <span class="pre">WRITE;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">--</span> <span class="pre">Note</span> <span class="pre">database</span> <span class="pre">and</span> <span class="pre">user</span> <span class="pre">strings</span> <span class="pre">may</span> <span class="pre">have</span> <span class="pre">to</span> <span class="pre">be</span> <span class="pre">modified</span> <span class="pre">for</span> <span class="pre">your</span> <span class="pre">particular</span> <span class="pre">installation;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">ALTER</span> <span class="pre">DATABASE</span> <span class="pre">&quot;dvnDb&quot;</span> <span class="pre">set</span> <span class="pre">default_transaction_read_only=off;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">Alter</span> <span class="pre">user</span> <span class="pre">&quot;dvnApp&quot;</span> <span class="pre">set</span> <span class="pre">default_transaction_read_only=off;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">update</span> <span class="pre">vdcnetwork</span> <span class="pre">set</span> <span class="pre">statusnotice</span> <span class="pre">=</span> <span class="pre">&quot;&quot;;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">END;</span></tt></div>
+</div>
+</div></blockquote>
+</div>
+<div class="section" id="backup-and-restore">
+<h3>Backup and Restore<a class="headerlink" href="#backup-and-restore" title="Permalink to this headline">¶</a></h3>
+<p><strong>Backup</strong></p>
+<div class="line-block">
+<div class="line">The PostgreSQL database and study files (contained within the Glassfish directory by default but this is <a class="reference internal" href="#jvm-options"><em>configurable via JVM options</em></a>) are the most critical components to back up. The use of standard PostgreSQL tools (i.e. pg_dump) is recommended.</div>
+</div>
+<p>Glassfish configuration files (i.e. domain.xml, robots.txt) and local
+customizations (i.e. images in the docroot) should be backed up as well.
+In practice, it is best to simply back up the entire Glassfish directory
+as other files such as logs may be of interest.</p>
+<div class="line-block">
+<div class="line"><strong>Restore</strong></div>
+</div>
+<p>Restoring DVN consists of restoring the PostgreSQL database and the
+Glassfish directory.</p>
+</div>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+        </div>
+        <div class="sidebar">
+          <h3>Table Of Contents</h3>
+          <ul class="current">
+<li class="toctree-l1"><a class="reference internal" href="dataverse-user-main.html">User Guide</a></li>
+<li class="toctree-l1 current"><a class="current reference internal" href="">Installers Guide</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="#quick-install">Quick Install</a></li>
+<li class="toctree-l2"><a class="reference internal" href="#system-requirements">SYSTEM REQUIREMENTS</a></li>
+<li class="toctree-l2"><a class="reference internal" href="#prerequisites">PREREQUISITES</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="#glassfish">Glassfish</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#postgresql">PostgreSQL</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#r-and-rserve">R and RServe</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#system-configuration">System Configuration</a></li>
+</ul>
+</li>
+<li class="toctree-l2"><a class="reference internal" href="#running-the-installer">RUNNING THE INSTALLER</a></li>
+<li class="toctree-l2"><a class="reference internal" href="#optional-components">Optional Components</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="#recaptcha-bot-blocker">reCAPTCHA bot blocker</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#google-analytics">Google Analytics</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#imagemagick">ImageMagick</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#handle-system">Handle System</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#twitter-setup">Twitter setup</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#digital-object-identifiers">Digital Object Identifiers</a></li>
+</ul>
+</li>
+<li class="toctree-l2"><a class="reference internal" href="#appendix">Appendix</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="#do-you-need-r">Do you need R?</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#what-does-the-installer-do">What does the Installer do?</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#glassfish-configuration-template">Glassfish configuration template</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#glassfish-configuration-individual-settings">Glassfish Configuration, individual settings</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="#jvm-options">JVM options</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#ejb-container">EJB Container</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#http-service">HTTP Service</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#javamail-session">JavaMail Session</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#jdbc-resources">JDBC Resources</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#jms-resources">JMS Resources</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="#postgresql-setup">PostgreSQL setup</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#redhat-startup-file-for-glassfish-example">RedHat startup file for glassfish, example</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#enabling-secure-remote-access-to-asadmin">Enabling secure remote access to Asadmin</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#using-lockss-with-dvn">Using LOCKSS with DVN</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#read-only-mode">Read Only Mode</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#backup-and-restore">Backup and Restore</a></li>
+</ul>
+</li>
+</ul>
+</li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-developer-main.html">DVN Developers Guide</a></li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-api-main.html">APIs Guide</a></li>
+</ul>
+
+          <h3 style="margin-top: 1.5em;">Search</h3>
+          <form class="search" action="search.html" method="get">
+            <input type="text" name="q" />
+            <input type="submit" value="Go" />
+            <input type="hidden" name="check_keywords" value="yes" />
+            <input type="hidden" name="area" value="default" />
+          </form>
+          <p class="searchtip" style="font-size: 90%">
+            Enter search terms.
+          </p>
+        </div>
+        <div class="clearer"></div>
+      </div>
+    </div>
+
+    <div class="footer-wrapper">
+      <div class="footer">
+        <div class="left">
+          <a href="dataverse-user-main.html" title="User Guide"
+             >previous</a> |
+          <a href="dataverse-developer-main.html" title="DVN Developers Guide"
+             >next</a> |
+          <a href="genindex.html" title="General Index"
+             >index</a>
+            <br/>
+            <a href="_sources/dataverse-installer-main.txt"
+               rel="nofollow">Show Source</a>
+        </div>
+
+        <div class="right">
+          
+    <div class="footer">
+        &copy; Copyright 1997-2013, President &amp; Fellows Harvard University.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2b1.
+    </div>
+        </div>
+        <div class="clearer"></div>
+      </div>
+    </div>
+
+  </body>
+</html>
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/dataverse-user-main.html	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,4392 @@
+
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+  "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+    
+    <title>User Guide &mdash; The Harvard Dataverse Network 3.6.1 documentation</title>
+    
+    <link rel="stylesheet" href="_static/agogo.css" type="text/css" />
+    <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+    
+    <script type="text/javascript">
+      var DOCUMENTATION_OPTIONS = {
+        URL_ROOT:    './',
+        VERSION:     '3.6.1',
+        COLLAPSE_INDEX: false,
+        FILE_SUFFIX: '.html',
+        HAS_SOURCE:  true
+      };
+    </script>
+    <script type="text/javascript" src="_static/jquery.js"></script>
+    <script type="text/javascript" src="_static/underscore.js"></script>
+    <script type="text/javascript" src="_static/doctools.js"></script>
+    <script type="text/javascript" src="http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
+    <link rel="top" title="The Harvard Dataverse Network 3.6.1 documentation" href="index.html" />
+    <link rel="next" title="Installers Guide" href="dataverse-installer-main.html" />
+    <link rel="prev" title="Dataverse Network Guides" href="index.html" /> 
+  </head>
+  <body>
+    <div class="header-wrapper">
+      <div class="header">
+        <div class="headertitle"><a
+          href="index.html">The Harvard Dataverse Network 3.6.1 documentation</a></div>
+        <div class="rel">
+          <a href="index.html" title="Dataverse Network Guides"
+             accesskey="P">previous</a> |
+          <a href="dataverse-installer-main.html" title="Installers Guide"
+             accesskey="N">next</a> |
+          <a href="genindex.html" title="General Index"
+             accesskey="I">index</a>
+        </div>
+       </div>
+    </div>
+
+    <div class="content-wrapper">
+      <div class="content">
+        <div class="document">
+            
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body">
+            
+  <div class="section" id="user-guide">
+<h1>User Guide<a class="headerlink" href="#user-guide" title="Permalink to this headline">¶</a></h1>
+<div class="section" id="common-tasks">
+<h2>Common Tasks<a class="headerlink" href="#common-tasks" title="Permalink to this headline">¶</a></h2>
+<p>Here is a list of the most common ways people use the Dataverse Network.
+Activities can be grouped into finding and using data or publishing
+data. A brief description of each activity follows with more detailed
+information available in the Users Guide.</p>
+<div class="section" id="finding-data">
+<h3>Finding Data<a class="headerlink" href="#finding-data" title="Permalink to this headline">¶</a></h3>
+<p>Visitors to the site can browse dataverses looking for data of
+interest or they can search by keywords. There are Basic and Advanced
+Searches.</p>
+<p><strong>Browsing the Site</strong></p>
+<p>The Network Homepage presents a list of recently released dataverses on the left side of the page.
+A dataverse is a container for studies that can be managed as a group by the dataverse administrator.
+Most often a dataverse represents a single organization or scholar and so their studies are often related.
+On the right side of the page there are lists of both recently released studies and studies that have been
+downloaded most often.  At the bottom of these lists, the View More link brings the user to a complete list
+of released dataverses or studies as applicable.  The home page also includes a scrolling list of datverse
+collections called subnetworks, if applicable.</p>
+<p>Clicking on the name of a dataverse, study or subnetwork displays its home page.</p>
+<p><strong>Browsing Dataverses</strong></p>
+<p>If you click the View More link under the recently released dataverse list on the Network Homepage you&#8217;ll be brought to
+the Browse Dataverses page.  Here you can sort the dataverses by Name, Affiliation, Release Date and Download Count.  You
+may also filter the dataverses by typing a filter term in the &#8220;filter&#8221; text box.  The filter will only display those
+dataverses whose name or affiliation matches the filter term.  Clicking on the name of a dataverse displays its home page.</p>
+<p><strong>Search</strong></p>
+<p>For many purposes, Basic Search is sufficient. On the center top of the network homepage enter keywords or
+complete sentences and click <strong>Search</strong>. A resulting list of studies is
+displayed. Further refinement can be made by clicking facets such as
+&#8220;Original Dataverse&#8221; or &#8220;Author&#8221; under &#8220;Refine Results&#8221; on the left side
+of the page. After a facet has been clicked, it will appear at the top
+of the page under &#8220;Search Results for&#8221; and clicking the selected facet
+will remove it, restoring the previous results. In addition to the
+network homepage, Basic Search can be found on the upper right of the
+dataverse home pages as well as on the search results and Advanced
+Search pages.  Be aware that searching from a dataverse limits the scope
+of search to studies within that dataverse while searching from the
+network home page searches all released studies.</p>
+<p>When a more specific search is needed, use Advanced Search. Advanced
+Search allows searching on keywords found in specific cataloging
+information fields, in particular collections in a dataverse where
+available, or by variable name. The link to Advanced Search is next to
+the Basic Search feature on the network and dataverse home pages and the
+search results page.</p>
+</div>
+<div class="section" id="using-data">
+<h3>Using Data<a class="headerlink" href="#using-data" title="Permalink to this headline">¶</a></h3>
+<p>Data in the Dataverse Network is stored in files. Files of any
+type are allowed but some types of tabular and network data files are
+supported by additional functionality, including downloading in
+different formats, downloading subsets of variables, and analytical
+tools.</p>
+<p><strong>Download Files</strong></p>
+<p>To download files, click on a study of interest, then select the
+data tab. Individual files can be downloaded or groups of files by
+checking files of interest or entire file categories and clicking
+Download All Selected Files. Groups of files are packaged into a single
+<tt class="docutils literal"><span class="pre">.zip</span></tt> file. Group downloads have a download size limit and any selected
+files not downloaded will be indicated in the <tt class="docutils literal"><span class="pre">.zip</span></tt> file.</p>
+<p>Downloading individual files in an alternate format where available is
+straightforward. Choose the format from the Download As select box next
+to the file and the file will download.</p>
+<p><strong>Subset or Analyze Files</strong></p>
+<p>Tabular and Network data files of recognized formats (Stata, SPSS, RData,
+Graphml) can be further manipulated through downloading subsets of
+variables and by performing various statistical analyses. Where
+available these options appear as an additional link, Access
+Subset/Analysis, below the Download As format select box next to each
+file. The functionality is quite different for tabular versus network
+data files so refer to the Users Guide for additional information.</p>
+</div>
+<div class="section" id="publishing-data">
+<h3>Publishing Data<a class="headerlink" href="#publishing-data" title="Permalink to this headline">¶</a></h3>
+<p>Publishing data through the Dataverse Network is straightforward:
+create an account and a place to store your data, organize your data,
+upload files, and release your data for public access.</p>
+<p><strong>Create a Dataverse and Account</strong></p>
+<p>The first step to publishing your data is to create a place to
+store it that can be managed by you. To do this you need an account.
+Create a dataverse and account by clicking on the Create a Dataverse
+link on the upper right side of the network homepage. This leads you
+through a series of steps at the end of which you will have a dataverse
+and user account to manage it.</p>
+<p>Newly created dataverses are unreleased and not available for
+browsing. Make note of the link to your dataverse at the end of the
+process so you can return to it until it becomes released. Another way
+to access your unreleased dataverse is to log in, click on your user
+name in the upper right of the page, dataverses tab, then the name of
+your dataverse.</p>
+<p><strong>Create Studies</strong></p>
+<p>Once you have a user account and a place to store your data, you
+need to take the first step toward organizing your data into studies.
+Many data have been or will be used to publish a study so this step may
+be clear. If not, a study should represent a particular thesis or
+inquiry with accompanying data. First, log in with your new user account
+and navigate to your dataverse home page. Next, click Options in the
+upper right of the page. From there click Create a Study and complete
+the form. Most of the fields on the study form are optional -only the
+title is required. If you are unsure of what these values should be,
+enter a title and these fields can be completed later before releasing
+the study.</p>
+<p>Be aware that a newly created study is unreleased and not available
+for browsing. To access an unreleased study for further editing, click
+on Options-&gt;Manage Studies and click on your study&#8217;s name. You can also
+click on your username, studies tab, then the study name.</p>
+<p><strong>Upload Files</strong></p>
+<p>Now that you have a place to store and manage your data and a
+study to associate it with, you can upload your data and documentation
+files. Files are uploaded to a study. Navigate to the study you want to
+upload particular files to and click on Add Files on the upper right
+side of the page. The add files page requires you to first select a file
+type, then browse for the file on your local system. Some file types
+undergo additional processing to support extended functionality but if
+you are unsure which type to choose, select Other. At this time you can
+enter a descriptive Category which can be used to group related files
+and a file description. If you are unsure of these values they can be
+added later.</p>
+<p>Though files are selected individually, several files can be added
+to this page at one time. It is recommended to upload only a few files
+at a time since this can take some time to complete, depending on file
+type.</p>
+<p>An alternative to selecting files individually is to first create an
+archive of files in <tt class="docutils literal"><span class="pre">.zip</span></tt> or <tt class="docutils literal"><span class="pre">.tar</span></tt> format and then select the
+appropriate &#8220;multiple files&#8221; Data Type when uploading your archive. The
+zip file or tarball will be unpacked so that the individual files will
+be added to the page.</p>
+<p>If you upload an SPSS (<tt class="docutils literal"><span class="pre">.por</span></tt>, <tt class="docutils literal"><span class="pre">.sav</span></tt>), Stata (<tt class="docutils literal"><span class="pre">.dta</span></tt>) or R
+(<tt class="docutils literal"><span class="pre">.RData</span></tt>) file, your study will be temporarily unavailable for
+editing until the additional processing on the file is completed. This
+can be brief or take some time depending on the size and complexity of
+the file. A message at the top of the file indicates it is unavailable
+for editing and an email will be sent when finished to the address you
+indicate on the add files page.</p>
+<p><strong>Release Studies</strong></p>
+<p>Once your study is in a state where it&#8217;s ready to be published or
+shared with others, it should be released. This is done either by
+clicking Release on the upper right of the study page or by navigating
+to your dataverse, clicking Options, Manage Studies, then clicking
+release next to the study you want released. Note that releasing a study
+fixes the version number. Additional changes to the study will create a
+new draft version. The draft can be repeatedly edited without changing
+the version number until it is released. At this point your study is
+visible within your dataverse. If your dataverse is also released it
+will be searchable and viewable by others. If your dataverse is not yet
+released, it will only be visible to people with access to your
+dataverse.</p>
+<p><strong>Release Dataverse</strong></p>
+<p>Releasing a dataverse makes it appear in the list of dataverses on
+the network home page and makes it viewable by others. This may require
+adding a study or other details to your dataverse depending on site
+policy. By default, releasing a dataverse requires nothing but changing
+the Dataverse Release Settings to Released on the Manage Permissions
+page. To release your dataverse, navigate to the dataverse home page,
+choose Options from the upper right of the page, click on Dataverse
+Settings, then Manage Permissions. At the top of the page, change
+Dataverse Release Settiings to Released and click Save Changes.</p>
+<p>Any studies that are released are now visible to others. Those
+that are unreleased do not appear in the list of studies on the
+dataverse home page.</p>
+<p>At this point you have published one or more studies and their data and
+made them available for browsing or searching.</p>
+</div>
+<div class="section" id="things-to-consider-next-steps">
+<h3>Things to Consider, Next Steps<a class="headerlink" href="#things-to-consider-next-steps" title="Permalink to this headline">¶</a></h3>
+<p>The above tasks are fundamental activities and may be all that is
+needed for most users. Some situations are more complex and require
+additional consideration. These include publishing and organizing data
+for large organizations, shared research between scholars, and enabling
+contributions by a geographically diverse team while keeping data
+private until ready for publication.</p>
+<p>For <strong>large organizations</strong>, a single dataverse may suffice. Collections
+within a dataverse can further organize studies by sub unit or topic.
+The dataverse itself can be <strong>customized</strong> with the organizations own
+website header and footer. In some cases, sub units or organizations
+want to maintain their own distinct branding. In such cases each can
+create and maintain their own dataverse and the parent dataverse can
+link to their studies through a link collection.</p>
+<p>For <strong>shared research</strong>, the model is similar: a single dataverse based
+on the research project can be created to which both researchers have
+administration rights. Additionally, researchers can maintain their own
+dataverses for other work and link back to the studies in the shared
+project dataverse.</p>
+<p><strong>Allowing a diverse team to contribute</strong> to an unreleased dataverse is
+simply a matter of granting the appropriate level of <strong>permissions</strong> to
+each team member. At minimum, each team member would need to be added as
+a contributor to the dataverse. By default, they can only contribute to
+studies they themselves have created. However, this can be expanded from
+the dataverse Manage Permissions page to allow contributors to edit all
+studies in the dataverse. Changes made by contributors need to be
+approved by a curator or admin before a study can be released.</p>
+</div>
+<div class="section" id="how-the-guides-are-organized">
+<h3>How the Guides Are Organized<a class="headerlink" href="#how-the-guides-are-organized" title="Permalink to this headline">¶</a></h3>
+<p>The guides are reference documents that explain how to use
+the Dataverse Network functionality: Installers Guide, Developers Guide, APIs Guide, and Users
+Guide. The Users Guide is further divided into primary activities: using
+data, creating studies, administering dataverses or the network. Details
+on all of the above tasks can be found in the Users Guide. The
+Installers Guide is for people or organizations who want to host their
+own Dataverse Network. The Developers Guide contains instructions for
+people who want to contribute to the Open Source Dataverse Network
+project or who want to modify the code to suit their own needs. Finally, the
+APIs Guide is for people who would like to use our APIs in order to build apps that
+can work with the Dataverse Network web application. This <a class="reference external" href="http://thedata.org/book/apps">page</a> lists some current apps
+which have been developed with our APIs.</p>
+</div>
+<div class="section" id="other-resources">
+<h3>Other Resources<a class="headerlink" href="#other-resources" title="Permalink to this headline">¶</a></h3>
+<p><strong>Dataverse Network Project Site</strong></p>
+<p>Additional information about the Dataverse Network project itself
+including presentations, information about upcoming releases, data
+management and citation, and announcements can be found at
+<a class="reference external" href="http://thedata.org/">http://thedata.org</a></p>
+<p><strong>User Group</strong></p>
+<p>As the user community grows we encourage people to shares ideas, ask
+questions, or offer suggestions for improvement. Go to
+<a class="reference external" href="https://groups.google.com/group/dataverse-community">https://groups.google.com/group/dataverse-community</a> to register to our dataverse community group.</p>
+<p><strong>Follow Us on Twitter</strong></p>
+<p>For up to date news, information and developments, follow our twitter account: <a class="reference external" href="https://twitter.com/thedataorg">https://twitter.com/thedataorg</a></p>
+<p><strong>Support</strong></p>
+<p>We maintain an email based support service that&#8217;s free of charge. We
+attempt to respond within one business day to all questions and if it
+cannot be resolved immediately, we&#8217;ll let you know what to expect.</p>
+</div>
+<div class="section" id="contact-us">
+<h3>Contact Us<a class="headerlink" href="#contact-us" title="Permalink to this headline">¶</a></h3>
+<p>The support email address is
+<a class="reference external" href="mailto:support&#37;&#52;&#48;thedata&#46;org">support<span>&#64;</span>thedata<span>&#46;</span>org</a>.</p>
+<p>This is the same address as the Report Issue link. We try to respond
+within one business day.</p>
+</div>
+</div>
+<div class="section" id="finding-and-using-data">
+<span id="id1"></span><h2>Finding and Using Data<a class="headerlink" href="#finding-and-using-data" title="Permalink to this headline">¶</a></h2>
+<p>Ends users, without need to login to the Dataverse Network, can browse
+dataverses, search studies, view study description and data files for
+public studies, and subset, analyze and visualize data for public data
+files. If entire studies or individual data files are restricted, end
+users need to be given permission from the dataverse administrator to
+access the data.</p>
+<div class="section" id="search">
+<h3>Search<a class="headerlink" href="#search" title="Permalink to this headline">¶</a></h3>
+<p>To find a study or data set, you can search or browse studies offered
+in any released dataverse on the Network homepage. Each dataverse offers
+a hierarchical organization comprising one or more collections of data
+sets with a particular theme. Most dataverses allow you to search for
+data within their files, or you can start browsing through the dataverse
+classifications that are closest to your substantive interests.</p>
+<p><strong>Browse Collections</strong></p>
+<p>You can browse all public dataverses from the Network homepage. Click
+the title of a dataverse to browse that dataverse&#8217;s collections and
+studies. Click the title of a collection to view a list of studies and
+subcollections for that selection. Click the title of a study to view
+the Cataloging Information and study files for that selection.</p>
+<p>When you select a dataverse to view its contents, the homepage opens to
+the&nbsp;<em>root collection</em>, and the dataverse&#8217;s studies are displayed
+directly under the root collection name. If the root collection contains
+other collections, then those collections are listed and not the studies
+within them. You must select a collection title to view the studies
+contained within it.</p>
+<p>Note: If a dataverse includes links to collections from another
+dataverse and the root collection does not contain other collections,
+the homepage opens to a list of the root and linked collections.</p>
+<p><strong>Search - Basic</strong></p>
+<p>You can search for studies across the entire Dataverse Network from the
+Network homepage, or search within a dataverse from the dataverse
+homepage. When you search across the Network, studies from restricted
+dataverses are not included in the search. Restricted studies are
+included in search results, and a lock icon appears beside those studies
+in the results list. After your search is complete, you can further
+narrow your list of data by searching again in the results. See Search
+Tips for search examples and guidelines.</p>
+<p>When you enter more than one term in the search text field, the results
+list contains studies that have these terms near each other within the
+study fields searched. For example, if you enter <tt class="docutils literal"><span class="pre">United</span> <span class="pre">Nations</span></tt>,
+the results include studies where the words <em>United</em> and <em>Nations</em> are
+separated by no more than four words in the same study field, such as
+abstract or title.</p>
+<p>It supports a search in any field of the studies&#8217; Cataloging
+Information, which includes citation information, abstract and other
+scope-related information, methodology, and Terms of Use. In addition,
+file descriptions also are searched.</p>
+<p><strong>Search - Advanced</strong></p>
+<p>In an advanced search, you can refine your criteria by choosing which
+Cataloging Information fields to search. You also can apply logic to the
+field search. For text fields, you can specify that the field searched
+either <em>contains</em> or <em>does not containthe text that you enter. For
+date fields, you can specify that the field searched is either *later
+than</em> nor <em>earlier than</em> the date that you enter. Refer to
+the <a class="reference external" href="http://lucene.apache.org/java/docs/">Documentation</a>  page for
+the latest version at the Lucene website and look for <em>Query Syntax</em> for full details.</p>
+<p>To perform an advanced search, click the Advanced Search link at the
+top-right of the Search panel. You can search the following study
+metadata fields by using the Search Scope drop-down list:</p>
+<ul class="simple">
+<li>Title - Title field of studies&#8217; Cataloging Information.</li>
+<li>Author - Author fields of studies&#8217; Cataloging Information.</li>
+<li>(Study) Global ID - ID assigned to studies.</li>
+<li>Other ID - A different ID previously given to the study by another
+archive.</li>
+<li>Abstract - Any words in the abstract of the study.</li>
+<li>Keyword - A term that defines the nature or scope of a study. For
+example, <tt class="docutils literal"><span class="pre">elections</span></tt>.</li>
+<li>Keyword Vocabulary - Reference to the standard used to define the
+keywords.</li>
+<li>Topic Classification - One or more words that help to categorize the
+study.</li>
+<li>Topic Classification Vocabulary - Reference used to define the Topic
+Classifications.</li>
+<li>Producer - Institution, group, or person who produced the study.</li>
+<li>Distributor - Institution that is responsible for distributing the
+study.</li>
+<li>Funding Agency - Agency that funded the study.</li>
+<li>Production Date - Date on which the study was created or completed.</li>
+<li>Distribution Date - Date on which the study was distributed to the
+public.</li>
+<li>Date of Deposit - Date on which the study was uploaded to the
+Network.</li>
+<li>Time Period Cover Start - The beginning of the period covered by the
+study.</li>
+<li>Time Period Cover End - The end of the period covered by the study.</li>
+<li>Country/Nation - The country or countries where the study took place.</li>
+<li>Geographic Coverage - The geographical area covered by the study. For
+example, <tt class="docutils literal"><span class="pre">North</span> <span class="pre">America</span></tt>.</li>
+<li>Geographic Unit - The smallest geographic unit in which the study
+took place, such as <tt class="docutils literal"><span class="pre">state</span></tt>.</li>
+<li>Universe - Universe of interest, population of interest, or target
+population.</li>
+<li>Kind of Data - The type of data included in the file, such
+as <tt class="docutils literal"><span class="pre">survey</span> <span class="pre">data</span></tt>, <tt class="docutils literal"><span class="pre">census/enumeration</span> <span class="pre">data</span></tt>,
+or <tt class="docutils literal"><span class="pre">aggregate</span> <span class="pre">data</span></tt>.</li>
+<li>Variable Information - The variable name and description in the
+studies&#8217; data files, given that the data file is subsettable and
+contains tabular data. It returns the studies that contain the file
+and the variable name where the search term was found.</li>
+</ul>
+<p><strong>Sort Results</strong></p>
+<p>When your search is complete, the results page lists studies that met
+the search criteria in order of relevance. For example, a study that
+includes your search term within the Cataloging Information in ten
+places appears before a study that includes your search term in the
+Cataloging Information in only one place.</p>
+<p>You can sort search results by title, study ID, last updated, or number
+of downloads (that is, the number of times users downloaded any file
+belonging to that study). Click the Sort By drop-down list to choose
+your sort order.</p>
+<p><strong>Search Tips</strong></p>
+<p>Use the following guidelines to search effectively within a Network or a
+dataverse:</p>
+<ul>
+<li><p class="first">The default search syntax uses <tt class="docutils literal"><span class="pre">AND</span></tt> logic within individual
+fields. That is, if you enter more than one term, the search engine
+looks for all terms within a single field, such as title or abstract.
+For example, if you enter <tt class="docutils literal"><span class="pre">United</span> <span class="pre">Nations</span> <span class="pre">report</span></tt>, the results
+list any studies that include the terms <em>United</em>, <em>Nations</em>,
+and <em>report</em> within a single metadata field.</p>
+</li>
+<li><p class="first">The search logic looks for multiple terms within a specific proximity
+to one another, and in the same field. The current proximity criteria
+is four words. That is, if you enter two search terms, both terms
+must be within four words of each other in the same field to be
+returned as a result.
+For example, you might enter <tt class="docutils literal"><span class="pre">10</span> <span class="pre">year</span></tt> in a basic search. If a
+study includes the string <em>10 millions deaths per year</em> within a
+metadata field, such as abstract, that study is not included in the
+search results. A study that contains the string <em>10 per year</em> within the abstract field is included in the search results.</p>
+</li>
+<li><p class="first">During the index process that supports searches, periods are removed
+in strings and each term between periods is indexed individually. If
+you perform a basic search for a term that contains one or more
+periods, the search works because the analyzer applies
+the <em>AND</em> logic. If you search on a specific field, though, note
+that you should specify individually each component of the string
+between periods to return your results.</p>
+</li>
+<li><p class="first">You can enter one term in the search field, and then search within
+those results for another term to narrow the results further. This
+might be more effective than searching for both terms at one time, if
+those terms do not meet the proximity and field limits specified
+previously.
+You could search first for an author&#8217;s name, and then search those
+results for a specific term in the title. If you try searching for
+both terms in the author and title fields together, you might not
+find the study for which you are looking.
+For example, you can search the Harvard Dataverse Network for the
+following study:</p>
+<blockquote>
+<div><p><em>Gary King; Will Lowe, 2003, &#8220;10 Million International Dyadic
+Events&#8221;, hdl:1902.1/FYXLAWZRIA UNF:3:um06qkr/1tAwpS4roUqAiw==
+Murray Research Archive [Distributor]</em></p>
+</div></blockquote>
+<p>If you type <tt class="docutils literal"><span class="pre">King,</span> <span class="pre">10</span> <span class="pre">Million</span></tt> in the Search field and click
+Search, you see <tt class="docutils literal"><span class="pre">0</span> <span class="pre">matches</span> <span class="pre">were</span> <span class="pre">found</span></tt> in the Results field. If
+you type <tt class="docutils literal"><span class="pre">10</span></tt> in the Search field and click Search, you see
+something like <tt class="docutils literal"><span class="pre">1621</span> <span class="pre">matches</span> <span class="pre">were</span> <span class="pre">found</span></tt> in the Results field.
+But if you first type <tt class="docutils literal"><span class="pre">King</span></tt> in the Search field and click
+Search, then type <tt class="docutils literal"><span class="pre">10</span> <span class="pre">Million</span></tt> in the Search field and click
+Search again, you see something like <tt class="docutils literal"><span class="pre">4</span> <span class="pre">matches</span> <span class="pre">were</span> <span class="pre">found</span></tt> in the
+Results field.</p>
+</li>
+</ul>
+</div>
+<div class="section" id="view-studies-download-data">
+<h3>View Studies / Download Data<a class="headerlink" href="#view-studies-download-data" title="Permalink to this headline">¶</a></h3>
+<p><strong>Cataloging Information</strong></p>
+<p>When a study is created, a set of <em>metadata</em> is associated with that
+study. This metadata is called the <em>Cataloging Information</em> for the
+study. When you select a study to view it, you first see the Cataloging
+Information tab listing the metadata associated with that study. This is
+the default view of a study.</p>
+<p>Cataloging Information contains numerous fields that help to describe
+the study. The amount of information you find for each study varies,
+based on what was entered by the author (Contributor) or Curator of that
+study. For example, one study might display the distributor, related
+material, and geographic coverage. Another study might display only the
+authors and the abstract. Every study includes the <em>Citation Information</em> fields in the Cataloging Information.</p>
+<p>Note: A comprehensive list of all Cataloging Information fields is
+provided in the <a class="reference internal" href="#metadata-references"><em>List of Metadata References</em></a></p>
+<p>Cataloging Information is divided into four sections. These sections and
+their details are displayed only when the author (Contributor) or
+Curator provides the information when creating the study. Sections
+consist of the following:</p>
+<ul class="simple">
+<li>Citation Information - These fields comprise
+the <a class="reference external" href="http://thedata.org/citation">citation</a> for the study,
+consisting of a global identifier for all studies and a UNF, or
+Universal Numerical Fingerprint, for studies that contain subsettable
+data files. It also can include information about authors, producers
+and distributors, and references to related studies or papers.</li>
+<li>Abstract and Scope - This section describes the research study, lists
+the study&#8217;s data sets, and defines the study&#8217;s geographical scope.</li>
+<li>Data Collection/Methodology - This section includes the technical
+details of how the author obtained the data.</li>
+<li>Terms of Use - This information explains that the study requires
+users to accept a set of conditions or agreements before downloading
+or analyzing the data. If any <em>Terms of Use</em> text is displayed in
+the Cataloging Information section, you are prompted to accept the
+conditions when you click the download or analyze icons in the Files
+page.
+Note: A study might not contain Terms of Use, but in some cases the
+original parent dataverse might have set conditions for all studies
+owned by that dataverse. In that case, the conditions are inherited
+by the study and you must accept these conditions before downloading
+files or analyzing the data.</li>
+</ul>
+<p>Study metadata can be downloaded in XML format using a link at the bottom
+of the study Cataloging Information tab:  <a class="reference external" href="https://thedata.harvard.edu/dvn/api/metadata/91148?partialExclude=codeBook/dataDscr">DDI (without variables)</a>
+/ <a class="reference external" href="https://thedata.harvard.edu/dvn/api/metadata/91148">DDI (full)</a>.
+These links appear for released studies whose metadata has been exported.
+Studies are typically exported on a daily basis.</p>
+<p><strong>List of Study Files</strong></p>
+<p>When you view a study, click the Documentation, Data and Analysis tab to
+view a list of all electronic files associated with the study that were
+provided by the author or Curator.</p>
+<p>A study might contain documentation, data, or other files. When the
+study contributor uploads data files of the type <tt class="docutils literal"><span class="pre">.dta</span></tt>, <tt class="docutils literal"><span class="pre">.sav</span></tt>, or <tt class="docutils literal"><span class="pre">.por</span></tt> to the Network, those files are converted
+to <tt class="docutils literal"><span class="pre">.tab</span></tt> tab-delimited files. These <tt class="docutils literal"><span class="pre">.tab</span></tt> files
+are subsettable, and can be subsetted and analyzed online by using the Dataverse Network
+application.</p>
+<p>Data files of the type <tt class="docutils literal"><span class="pre">.xml</span></tt> also are considered to be subsettable,
+and can be subsetted and analyzed to a minimal degree online.
+An <tt class="docutils literal"><span class="pre">.xml</span></tt> type file indicates social network data that complies with
+the <a class="reference external" href="http://graphml.graphdrawing.org/">GraphML</a> file format.</p>
+<p>You can identify a subsettable data file by the <em>Subsetting</em> label and
+the number of cases and variables listed next to the file name. Other
+files that also contain data might be associated with a study, but the
+Dataverse Network application does not recognize them as data (or
+subsettable) files.</p>
+<p><strong>Download Study Files</strong></p>
+<p>You can download any of the following within a study:</p>
+<ul class="simple">
+<li>All or selected data files within a <em>study</em> or a <em>category</em> (type
+of files)</li>
+<li>Individual <em>data files</em></li>
+<li>Individual subsets within a data file (see <a class="reference internal" href="#tabular-data"><em>Subset and Analyze
+Tabular Data Sets</em></a>
+or <a class="reference internal" href="#network-data"><em>Subset and Analyze Network Data Sets</em></a> for details)</li>
+</ul>
+<p>The default format for subsettable tabular data file downloads
+is <em>tab-delimited</em>. When you download one or more subsettable files in
+tab-delimited format, the file contains a header row. When you download
+one subsettable file, you can select from the following formats in
+addition to tab-delimited:</p>
+<ul class="simple">
+<li>Original file</li>
+<li>Splus</li>
+<li>Stata</li>
+<li>R</li>
+</ul>
+<p>The default format for subsettable network data file downloads
+is <em>Original file</em>. In addition, you can choose to download network
+data files in <em>GraphML</em> format.</p>
+<p>If you select any other format for a tabular data file, the file is
+downloaded in a zipped archive. You must unzip the archive to view or
+use the individual data file.</p>
+<p>If you download all or a selection of data files within a study, the
+files are downloaded in a zipped archive, and the individual files are
+in tab-delimited or network format. You must unzip the archive to view
+or use the individual data files.</p>
+<p>Note: Studies and data files often have user restrictions applied. If
+prompted to accept Terms of Use for a study or file, check the <em>I Accept</em> box and then click the Continue button to view or download the
+file.</p>
+<p><strong>User Comments</strong></p>
+<p>If the User Comment feature is enabled within a dataverse, users are
+able to add comments about a study within that dataverse.</p>
+<p>When you view a study, click the User Comments tab to view all comments
+associated with the study. Comments can be monitored and abuse reported
+to the Network admin, who has permission to remove any comments deemed
+inappropriate. Note that the dataverse admin does not have permission to
+remove comments, to prevent bias.</p>
+<p>If you choose, you also can add your own comments to a study from the
+User Comments tab. See <a class="reference internal" href="#edit-study-comments-settings"><em>Comment on Studies or Data</em></a> for
+detailed information.</p>
+<p>Note: To add a comment to a study, you must register and create an
+account in the dataverse that owns the study about which you choose to
+comment. This helps to prevent abuse and SPAM issues.</p>
+<p><strong>Versions</strong></p>
+<p>Upon creating a study, a version is created. This is a way to archive
+the&nbsp;<em>metadata</em> and&nbsp;<em>data files</em>&nbsp;associated with the study citation
+or UNF.</p>
+<p><strong>View Citations</strong></p>
+<p>You can view a formatted citation for any of the following entities
+within the Dataverse Network application:</p>
+<ul class="simple">
+<li>Studies - For every study, you can view a citation for that study.
+Go to the Cataloging Information tab for a study and view the&nbsp;<em>How
+to Cite</em> field.</li>
+<li>Data sets - For any data set, you can view a citation for that set.
+Go to the Documentation, Data and Analysis tab for a study to see the
+list of study files. To view the citation for any data set click
+the&nbsp;<em>View Data Citation</em> link associated with that subsettable
+file.</li>
+<li>Data subsets - If you subset and analyze a data set, you can view a
+citation for each subset.
+See <a class="reference internal" href="#apply-descriptive-statistics"><em>Apply Descriptive Statistics</em></a> or <a class="reference internal" href="#perform-advanced-analysis"><em>Perform Advanced Analysis</em></a> for
+detailed information.
+Also, when you download a workspace file, a copy
+of the citation information for that subset is provided in the
+download.</li>
+</ul>
+<p>Note: For individual variables within a subsettable data subset, you can
+view the <a class="reference external" href="http://thedata.org/citation/tech">UNF</a> for that variable.
+This is not a full citation for the variable, but it is one component of
+that citation. Note also that this does not apply to <tt class="docutils literal"><span class="pre">.xml</span></tt> data.</p>
+</div>
+<div class="section" id="subset-and-analysis">
+<h3>Subset and Analysis<a class="headerlink" href="#subset-and-analysis" title="Permalink to this headline">¶</a></h3>
+<p>Subsetting and analysis can be performed on tabular and network data
+files. Refer to the appropriate section for more details.</p>
+<div class="section" id="tabular-data">
+<span id="id2"></span><h4>Tabular Data<a class="headerlink" href="#tabular-data" title="Permalink to this headline">¶</a></h4>
+<p>Tabular data files (subsettable files) can be subsetted and analyzed
+online by using the Dataverse Network application. For analysis, the
+Dataverse Network offers a user interface to Zelig, a powerful, R-based
+statistical computing tool. A comprehensive set of Statistical Analysis
+Models are provided.</p>
+<p>After you find the tablular data set that you want, access the Subset
+and Analysis options to use the online tools. Then, you can&nbsp;<em>subset
+data by variables or observations</em>, translate it into a convenient
+format, download subsets, and apply statistics and analysis.</p>
+<p>Network data files (also subsettable) can be subsetted online, and then
+downloaded as a subset. Note that network data files cannot be analyzed
+online.</p>
+<p>Review the Tabular Data Subset and Recode Tips before you start.</p>
+<p><strong>Access Subset and Analysis Options</strong></p>
+<p>You can subset and analyze tabular data files before you download the
+file or your subsets.</p>
+<p>To access the Subset and Analysis options for a data set:</p>
+<ol class="arabic simple">
+<li>Click the title of the study from which you choose to analyze or
+download a file or subset.</li>
+<li>Click the Documentation, Data and Analysis tab for the study.</li>
+<li>In the list of study files, locate the data file that you choose to
+download, subset, or analyze.
+You can download data sets for a file only if the file entry includes
+the subset icon.</li>
+<li>Click the <em>Access Subset/Analysis</em>&nbsp;link associated with the
+selected file.
+If prompted, check the <em>I accept</em> box and click Continue to accept
+the Terms of Use.
+You see the Data File page listing data for the file that you choose
+to subset or analyze.</li>
+</ol>
+<p><strong>View Variable Quick Summary</strong></p>
+<p>When a subsettable data file is uploaded for a study, the Dataverse
+Network code calculates summary statistics for each variable within that
+data file. On any tab of the Data File page, you can view the summary
+statistics for each variable in the data file. Information listed
+comprises the following:</p>
+<ul class="simple">
+<li>For continuous variables, the application calculates summary
+statistics that are listed in the DDI schema.</li>
+<li>For discrete variables, the application tabulates values and their
+labels as a frequency table.
+Note, however, that if the number of categories is more than 50, the
+values are not tabulated.</li>
+<li>The UNF value for each variable is included.</li>
+</ul>
+<p>To view summary statistics for a variable:</p>
+<ol class="arabic simple">
+<li>In the Data File page, click any tab.</li>
+<li>In the variable list on the bottom of the page, the right column is
+labeled <em>Quick Summary</em>.
+locate a variable for which you choose to view summary statistics.
+Then, click the Quick Summary icon for that variable to toggle the
+statistic&#8217;s information on and off.
+You see a small chart that lists information about that variable. The
+information provided depends upon the variable selected.</li>
+</ol>
+<p><strong>Download Tabular Subsets</strong></p>
+<p>You can download a subset of variables within a tabular-data study file.
+You also can recode a subset of those variables and download the recoded
+subset, if you choose.</p>
+<p>To download a subset of variables in tabular data:</p>
+<ol class="arabic simple">
+<li>In the Data File page, click the Download Subset tab.</li>
+<li>Click the radio button for the appropriate File Format in which to
+download the variables: Text, R Data, S plus, or Stata.</li>
+<li>On the right side of the tab, use the Show drop-down list to select
+the quantities of variables to list at one time: 10, 20, 50, or All.</li>
+<li>Scroll down the screen and click the check boxes to select variables
+from the table of available values. When you select a variable, it is
+added to the Selected Variables box at the top of the tab.
+To remove a variable from this box, deselect it from the Variable
+Type list at the bottom of the screen.
+To select all variables, click the check box beside the column name,
+Variable Type.</li>
+<li>Click the <em>Create Zip File</em> button.
+The <em>Create Zip File</em> button label changes the following
+format: <tt class="docutils literal"><span class="pre">zipFile_&lt;number&gt;.zip</span></tt>.</li>
+<li>Click the <tt class="docutils literal"><span class="pre">zipFile_&lt;number&gt;.zip</span></tt> button and follow your browser&#8217;s
+prompts to open or save the data file to your computer&#8217;s disk drive</li>
+</ol>
+<p id="apply-descriptive-statistics"><strong>Apply Descriptive Statistics</strong></p>
+<p>When you run descriptive statistics for data, you can do any of the
+following with the analysis results:</p>
+<ul class="simple">
+<li>Open the results in a new window to save or print the results.</li>
+<li>Download the R workspace in which the statistics were analyzed, for
+replication of the analysis. See Replicate Analysis for more
+information.</li>
+<li>View citation information for the data analyzed, and for the full
+data set from which you selected variables to analyze. See View
+Citations for more information.</li>
+</ul>
+<p>To apply descriptive statistics to a data set or subset:</p>
+<ol class="arabic simple">
+<li>In the Data File page, click the Descriptive Statistics tab.</li>
+<li>Click one or both of the Descriptive Statistics options: Univariate
+Numeric Summaries and Univariate Graphic Summaries.</li>
+<li>On the right side of the tab, use the Show drop-down list to select
+one of the following options to show variables in predefined
+quantities: 10, 20, 50, or All.</li>
+<li>Scroll down the screen and click the check boxes to select variables
+from the table of available values. When you select a variable, it is
+added to the Selected Variables box at the top of the tab.
+To remove a variable from this box, deselect it from the Variable
+Type list at the bottom of the screen.
+To select all variables, click the check box beside the column name,
+Variable Type.</li>
+<li>Click the Run Statistics button.
+You see the Dataverse Analysis page.</li>
+<li>To save or print the results, scroll to the Descriptive Statistics
+section and click the link <em>Open results in a new window</em>. You then
+can print or save the window contents.
+To save the analysis, scroll to the Replication section and click the
+button <em>zipFile_&lt;number&gt;.zip</em>.
+Review the Citation Information for the data set and for the subset
+that you analyzed.</li>
+<li>Click the link <em>Back to Analysis and Subsetting</em> to return the
+previous page and continue analysis of the data.</li>
+</ol>
+<p><strong>Recode and Case-Subset Tabular Data</strong></p>
+<p>Review the Tabular Data Recode and Subset Tips before you start work
+with a study&#8217;s files.</p>
+<p>To recode and subset variables within a tabular data set:</p>
+<ol class="arabic simple">
+<li>In the Data File page, click the Recode and Case-Subsetting tab.</li>
+<li>One the right side of the variable list, use the Show drop-down list
+and select one of the following options to show variables in
+predefined quantities: 10, 20, 50, or All.</li>
+<li>Scroll down the screen and click the check boxes to select variables
+from the table of available values. When you select a variable, it is
+added to the Selected Variables box at the top of the tab.
+To remove a variable from this box, deselect it from the Variable
+Type list at the bottom of the screen.
+To select all variables, click the check box beside the column name,
+Variable Type.</li>
+<li>Select one variable in the Selected Variables box, and then
+click <em>Start</em>.
+The existing name and label of the variable appear in the New
+Variable Name and New Variable Label boxes.</li>
+<li>In the New Variable Label field, change the variable name to a unique
+value that is not used in the data file.
+The new variable label is optional.</li>
+<li>In the table below the Variable Name fields, you can check one or
+more values to drop them from the subset, or enter new values,
+labels, or ranges (as a condition) as needed. Click the Add
+Value/Range button to create more entries in the value table.
+Note: Click the <tt class="docutils literal"><span class="pre">?</span></tt> Info buttons to view tips on how to use the
+Recode and Subset table. Also, See Tabular Data Recode and Subset
+Tips for more information about adding values and ranges.</li>
+<li>Click the Apply Recodes button.
+Your renamed variables appear at the bottom of the page in the List
+of Recode Variables.</li>
+<li>Select another variable in the Selected Variables box, click the
+Start button, and repeat the recode action.
+Repeat this process for each variable that you choose to recode.</li>
+<li>To remove a recoded variable, scroll to the List of Recode Variables
+at the bottom of the page and click the Remove link for the recoded
+variable that you choose to delete from your subset.</li>
+</ol>
+<p id="perform-advanced-analysis"><strong>Perform Advanced Analysis</strong></p>
+<p>When you run advanced statistical analysis for data, you can do any of
+the following with the analysis results:</p>
+<ul class="simple">
+<li>Open the results in a new window to save or print the results.</li>
+<li>Download the R workspace in which the statistics were analyzed, for
+replication of the analysis. See Replicate Analysis for more
+information.</li>
+<li>View citation information for the data analyzed, and for the full
+data set from which you selected variables to analyze. See View
+Citations for more information.</li>
+</ul>
+<p>To run statistical models for selected variables:</p>
+<ol class="arabic simple">
+<li>In the Data File page, click the Advanced Statistical Analysis tab.</li>
+<li>Scroll down the screen and click the check boxes to select variables
+from the table of available values. When you select a variable, it is
+added to the Selected Variables box at the top of the tab.
+To remove a variable from this box, deselect it from the Variable
+Type list at the bottom of the screen.
+To select all variables, click the check box beside the column name,
+Variable Type.</li>
+<li>Select a model from the Choose a Statistical Model drop-down list.</li>
+<li>Select one variable in the Selected Variables box, and then click the
+applicable arrow button to assign a function to that variable from
+within the analysis model.
+You see the name of the variables in the appropriate function box.
+Note: Some functions allow a specific type of variable only, while
+other functions allow multiple variable types. Types include
+Character, Continuous, and Discrete. If you assign an incorrect
+variable type to a function, you see an <tt class="docutils literal"><span class="pre">Incompatible</span> <span class="pre">type</span></tt> error
+message.</li>
+<li>Repeat the variable and function assignments until your model is
+complete.</li>
+<li>Select your Output options.</li>
+<li>Click the Run Model button.
+If the statistical model that you defined is incomplete, you first
+are prompted to correct the definition. Correct your model, and then
+click Run Model again.
+You see the Dataverse Analysis page.</li>
+<li>To save or print the results, scroll to the Advanced Statistical
+Analysis section and click the link <em>Open results in a new window</em>.
+You then can print or save the window contents.
+To save the analysis, scroll to the Replication section and click the
+button <tt class="docutils literal"><span class="pre">zipFile_&lt;number&gt;.zip</span></tt>.
+Review the Citation Information for the data set and for the subset
+that you analyzed.</li>
+<li>Click the link <em>Back to Analysis and Subsetting</em> to return the
+previous page and continue analysis of the data.</li>
+</ol>
+<p><strong>Replicate Analysis</strong></p>
+<p>You can save the R workspace in which the Dataverse Network performed an
+analysis. You can download the workspace as a zipped archive that
+contains four files. Together, these files enable you to recreate the
+subset analysis in another R environment:</p>
+<ul class="simple">
+<li><tt class="docutils literal"><span class="pre">citationFile.&lt;identifier&gt;.txt</span></tt> - The citation for the subset that you analyzed.</li>
+<li><tt class="docutils literal"><span class="pre">rhistoryFile.&lt;identifier&gt;.R</span></tt> - The R code used to perform the analysis.</li>
+<li><tt class="docutils literal"><span class="pre">tempsubsetfile.&lt;identifier&gt;.tab</span></tt> - The R object file used to perform the analysis.</li>
+<li><tt class="docutils literal"><span class="pre">tmpRWSfile.&lt;identifier&gt;.RData</span></tt> - The subset data that you analyzed.</li>
+</ul>
+<p>To download this workspace for your analysis:</p>
+<ol class="arabic simple">
+<li>For any subset, Apply Descriptive Statistics or Perform Advanced
+Analysis.</li>
+<li>On the Dataverse Analysis or Advanced Statistical Analysis page,
+scroll to the Replication section and click the
+button <tt class="docutils literal"><span class="pre">zipFile_&lt;number&gt;.zip</span></tt>.</li>
+<li>Follow your browser&#8217;s prompts to save the zipped archive.
+When the archive file is saved to your local storage, extract the
+contents to use the four files that compose the R workspace.</li>
+</ol>
+<p><strong>Statistical Analysis Models</strong></p>
+<p>You can apply any of the following advanced statistical models to all or
+some variables in a tabular data set:</p>
+<p>Categorical data analysis: Cross tabulation</p>
+<p>Ecological inference model: Hierarchical mulitnomial-direct ecological
+inference for R x C tables</p>
+<p>Event count models, for event count dependent variables:</p>
+<ul class="simple">
+<li>Negative binomial regression</li>
+<li>Poisson regression</li>
+</ul>
+<p>Models for continuous bounded dependent variables:</p>
+<ul class="simple">
+<li>Exponential regression for duration</li>
+<li>Gamma regression for continuous positives</li>
+<li>Log-normal regression for duration</li>
+<li>Weibull regression for duration</li>
+</ul>
+<p>Models for continuous dependent variables:</p>
+<ul class="simple">
+<li>Least squares regression</li>
+<li>Linear regression for left-censoreds</li>
+</ul>
+<p>Models for dichotomous dependent variables:</p>
+<ul class="simple">
+<li>Logistic regression for binaries</li>
+<li>Probit regression for binaries</li>
+<li>Rare events logistic regression for binaries</li>
+</ul>
+<p>Models for ordinal dependent variables:</p>
+<ul class="simple">
+<li>Ordinal logistic regression for ordered categoricals</li>
+<li>Ordinal probit regression for ordered categoricals</li>
+</ul>
+<p><strong>Tabular Data Recode and Subset Tips</strong></p>
+<p>Use the following guidelines when working with tabular data files:</p>
+<ul class="simple">
+<li>Recoding:<ul>
+<li>You must fill at least the first (new value) and last (condition)
+columns of the table; the second column is optional and for a new
+value label.</li>
+<li>If the old variable you chose for recoding has information about
+its value labels, you can prefill the table with these data for
+convenience, and then modify these prefilled data.</li>
+<li>To exclude a value from your recoding scheme, click the Drop check
+box in the row for that value.</li>
+</ul>
+</li>
+<li>Subsetting:<ul>
+<li>If the variable you chose for subsetting has information about its
+value labels, you can prefill the table with these data for
+convenience.</li>
+<li>To exclude a value in the last column of the table, click the Drop
+check box in row for that value.</li>
+<li>To include a particular value or range, enter it in the last
+column whose header shows the name of the variable for subsetting.</li>
+</ul>
+</li>
+<li>Entering a value or range as a condition for subsetting or recoding:<ul>
+<li>Suppose the variable you chose for recoding is x.
+If your condition is x==3, enter <tt class="docutils literal"><span class="pre">3</span></tt>.
+If your condition is x &lt; -3, enter <tt class="docutils literal"><span class="pre">(--3</span></tt>.
+If your condition is x &gt; -3, enter <tt class="docutils literal"><span class="pre">-3-)</span></tt>.
+If your condition is -3 &lt; x &lt; 3, enter <tt class="docutils literal"><span class="pre">(-3,</span> <span class="pre">3)</span></tt>.</li>
+<li>Use square brackets (<tt class="docutils literal"><span class="pre">[]</span></tt>) for closed ranges.</li>
+<li>You can enter non-overlapping values and ranges separated by a
+comma, such as <tt class="docutils literal"><span class="pre">0,[7-9]</span></tt>.</li>
+</ul>
+</li>
+</ul>
+</div>
+<div class="section" id="network-data">
+<span id="id3"></span><h4>Network Data<a class="headerlink" href="#network-data" title="Permalink to this headline">¶</a></h4>
+<p>Network data files (subsettable files) can be subsetted and analyzed
+online by using the Dataverse Network application. For analysis, the
+Dataverse Network offers generic network data analysis. A list of
+Network Analysis Models are provided.</p>
+<p>Note: All subsetting and analysis options for network data assume a
+network with undirected edges.</p>
+<p>After you find the network data set that you want, access the Subset and
+Analysis options to use the online tools. Then, you can subset data
+by <em>vertices</em>&nbsp;or&nbsp;<em>edges</em>, download subsets, and apply network
+measures.</p>
+<p><strong>Access Network Subset and Analyze Options</strong></p>
+<p>You can subset and analyze network data files before you download the
+file or your subsets. To access the Subset and Analysis options for a
+network data set:</p>
+<ol class="arabic simple">
+<li>Click the title of the study from which you choose to analyze or
+download a file or subset.</li>
+<li>Click the Documentation, Data and Analysis tab for the study.</li>
+<li>In the list of study files, locate the network data file that you
+choose to download, subset, or analyze. You can download data sets
+for a file only if the file entry includes the subset icon.</li>
+<li>Click the&nbsp;<em>Access Subset/Analysis</em>&nbsp;link associated with the
+selected file. If prompted, check the&nbsp;<em>I accept</em>&nbsp;box and click
+Continue to accept the Terms of Use.
+You see the Data File page listing data for the file that you choose
+to subset or analyze.</li>
+</ol>
+<p><strong>Subset Network Data</strong></p>
+<p>There are two ways in which you can subset network data. First, you can
+run a manual query, and build a query of specific values for edge or
+vertex data with which to subset the data. Or, you can select from among
+three automatically generated queries with which to subset the data:</p>
+<ul class="simple">
+<li>Largest graph - Subset the &lt;nth&gt; largest connected component of the
+network. That is, the largest group of nodes that can reach one
+another by walking across edges.</li>
+<li>Neighborhood - Subset the &lt;nth&gt; neighborhood of the selected
+vertices. That is, generate a subgraph of the original network
+composed of all vertices that are positioned at most &lt;n&gt; steps away
+from the currently selected vertices in the original network, plus
+all of the edges that connect them.</li>
+</ul>
+<p>You also can successively subset data to isolate specific values
+progressively.</p>
+<p>Continue to the next topics for detailed information about subsetting a
+network data set.</p>
+<p><strong>Subset Manually</strong></p>
+<p>Perform a manual query to slice a graph based on the attributes of its
+vertices or edges. You choose whether to subset the graph based on
+vertices or edges, then use the Manual Query Builder or free-text Query
+Workspace fields to construct a query based on that element&#8217;s
+attributes. A single query can pertain only to vertices or only to
+edges, never both. You can perform separate, sequential vertex or edge
+queries.</p>
+<p>When you perform a vertex query, all vertices whose attributes do not
+satisfy the query are dropped from the graph, in addition to all edges
+that touch them. When you perform an edge query, all edges whose
+attributes do not satisfy the criteria are dropped, but all vertices
+remain <em>unless</em> you enable the <em>Eliminate disconnected vertices</em> check box. Note that enabling this option drops all
+disconnected vertices whether or not they were disconnected before the
+edge query.</p>
+<p>Review the Network Data Tips before you start work with a study&#8217;s files.</p>
+<p>To subset variables within a network data set by using a manually
+defined query:</p>
+<ol class="arabic">
+<li><p class="first">In the Data File page, click the Manual Query radio button near the
+top of the page.</p>
+</li>
+<li><p class="first">Use the Attribute Set drop-down list and select Vertex to subset by
+node or vertex values.
+Select Edge to subset by edge values.</p>
+</li>
+<li><p class="first">Build the first attribute selection value in the Manual Query Builder
+panel:</p>
+<ol class="arabic simple">
+<li>Select a value in the Attributes list to assign values on which to
+subset.</li>
+<li>Use the Operators drop-down list to choose the function by which
+to define attributes for selection in this query.</li>
+<li>In the Values field, type the specific values to use for selection
+of the attribute.</li>
+<li>Click <em>Add to Query</em>&nbsp;to complete the attribute definition for
+selection.
+You see the query string for this attribute in the Query Workspace
+field.</li>
+</ol>
+<p>Alternatively, you can enter your query directly by typing it into
+the Query Workspace field.</p>
+</li>
+<li><p class="first">Continue to add selection values to your query by using the Manual
+Query Builder tools.</p>
+</li>
+<li><p class="first">To remove any verticies that do not connect with other data in the
+set, check the&nbsp;<em>Eliminate disconnected vertices</em>&nbsp;check box.</p>
+</li>
+<li><p class="first">When you complete construction of your query string, click&nbsp;<em>Run</em>&nbsp;to
+perform the query.</p>
+</li>
+<li><p class="first">Scroll to the bottom of the window, and when the query is processed
+you see a new entry in the Subset History panel that defines your
+query.</p>
+</li>
+</ol>
+<p>Continue to build a successive subset or download a subset.</p>
+<p><strong>Subset Automatically</strong></p>
+<p>Peform an Automatic Query to select a subgraph of the nextwork based on
+structural properties of the network. Remember to review the Network
+Data Tips before you start work with a study&#8217;s files.</p>
+<p>To subset variables within a network data set by using an automatically
+generated query:</p>
+<ol class="arabic simple">
+<li>In the Data File page, click the Automatic Query radio button near
+the middle of the page.</li>
+<li>Use the Function drop-down list and select the type of function with
+which to select your subset:<ul>
+<li>Largest graph - Subset the &lt;nth&gt; largest group of nodes that can
+reach one another by walking across edges.</li>
+<li>Neighborhood - Generate a subgraph of the original network
+composed of all vertices that are positioned at most &lt;n&gt; steps
+away from the currently selected vertices in the original network,
+plus all of the edges that connect them. This is the only query
+that can (and generally does) increase the number of vertices and
+edges selected.</li>
+</ul>
+</li>
+<li>In the Nth field, enter the &lt;nth&gt; degree with which to select data
+using that function.</li>
+<li>Click&nbsp;<em>Run</em>&nbsp;to perform the query.</li>
+<li>Scroll to the bottom of the window, and when the query is processed
+you see a new entry in the Subset History panel that defines your
+query.</li>
+</ol>
+<p>Continue to build a successive subset or download a subset.</p>
+<p><strong>Build or Restart Subsets</strong></p>
+<p><strong>Build a Subset</strong></p>
+<p>To build successive subsets and narrow your data selection
+progressively:</p>
+<ol class="arabic simple">
+<li>Perform a manual or automatic subset query on a selected data set.</li>
+<li>Perform a second query to further narrow the results of your previous
+subset activity.</li>
+<li>When you arrive at the subset with which you choose to work, continue
+to analyze or download that subset.</li>
+</ol>
+<p><strong>Undo Previous Subset</strong></p>
+<p>You can reset, or undo, the most recent subsetting action for a data
+set. Note that you can do this only one time, and only to the most
+recent subset.</p>
+<p>Scroll to the Subset History panel at the bottom of the page and
+click&nbsp;<em>Undo</em>&nbsp;in the last row of the list of successive subsets.
+The last subset is removed, and the previous subset is available for
+downloading, further subsetting, or analysis.</p>
+<p><strong>Restart Subsetting</strong></p>
+<p>You can remove all subsetting activity and restore data to the original
+set.</p>
+<p>Scroll to the Subset History panel at the bottom of the page and
+click&nbsp;<em>Restart</em>&nbsp;in the row labeled&nbsp;<em>Initial State</em>.
+The data set is restored to the original condition, and is available
+for downloading, subsetting, or analysis.</p>
+<p><strong>Run Network Measures</strong></p>
+<p>When you finish selecting the specific data that you choose to analyze,
+run a Network Measure analysis on that data. Review the Network Data
+Tips before you start your analysis.</p>
+<ol class="arabic simple">
+<li>In the Data File page, click the Network Measure radio button near
+the bottom of the page.</li>
+<li>Use the Attributes drop-down list and select the type of analysis to
+perform:<ul>
+<li>Page Rank - Determine how much influence comes from a specific
+actor or node.</li>
+<li>Degree - Determine the number of relationships or collaborations
+exist within a network data set.</li>
+<li>Unique Degree - Determine the number of collaborators that exist.</li>
+<li>In Largest Component - Determine the largest component of a
+network.</li>
+<li>Bonacich Centrality - Determine the importance of a main actor or
+node.</li>
+</ul>
+</li>
+<li>In the Parameters field, enter the specific value with which to
+subset data using that function:<ul>
+<li>Page Rank - Enter a value for the parameter &lt;d&gt;, a proportion,
+between 0 and 1.</li>
+<li>Degree - Enter the number of relationships to extract from a
+network data set.</li>
+<li>Unique Degree - Enter the number of unique relationships to
+extract.</li>
+<li>In Largest Component - Enter the number of components to extract
+from a network data set, starting with the largest.</li>
+</ul>
+</li>
+<li>Click <em>Run</em> to perform the analysis.</li>
+<li>Scroll to the bottom of the window, and when the analysis is
+processed you see a new entry in the Subset History panel that
+contains your analyzed data.</li>
+</ol>
+<p>Continue to download the analyzed subset.</p>
+<p><strong>Download Network Subsets or Measures</strong></p>
+<p>When you complete subsetting and analysis of a network data set, you can
+download the final set of data. Network data subsets are downloaded in a
+zip archive, which has the name <tt class="docutils literal"><span class="pre">subset_&lt;original</span> <span class="pre">file</span> <span class="pre">name&gt;.zip</span></tt>.
+This archive contains three files:</p>
+<ul class="simple">
+<li><tt class="docutils literal"><span class="pre">subset.xml</span></tt> - A GraphML formatted file that contains the final
+subsetted or analyzed data.</li>
+<li><tt class="docutils literal"><span class="pre">verticies.tab</span></tt> - A tabular file that contains all node data for
+the final set.</li>
+<li><tt class="docutils literal"><span class="pre">edges.tab</span></tt> - A tabular file that contains all relationship data
+for the final set.</li>
+</ul>
+<p>Note: Each time you download a subset of a specific network data set, a
+zip archive is downloaded that has the same name. All three zipped files
+within that archive also have the same names. Be careful not to
+overwrite a downloaded data set that you choose to keep when you perform
+sucessive downloads.</p>
+<p>To download a final set of data:</p>
+<ol class="arabic simple">
+<li>Scroll to the Subset History panel on the Data File page.</li>
+<li>Click <em>Download Latest Results</em> at the bottom of the history list.</li>
+<li>Follow your browser&#8217;s prompts to open or save the data file to your
+computer&#8217;s disk drive. Be sure to save the file in a unique location
+to prevent overwritting an existing downloaded data file.</li>
+</ol>
+<p><strong>Network Data Tips</strong></p>
+<p>Use these guidelines when subsetting or analyzing network data:</p>
+<ul class="simple">
+<li>For a Page rank network measure, the value for the parameter &lt;d&gt; is a
+proportion and must be between 0 and 1. Higher values of &lt;d&gt; increase
+dispersion, while values of &lt;d&gt; closer to zero produce a more uniform
+distribution. PageRank is normalized so that all of the PageRanks sum
+to 1.</li>
+<li>For a Bonacich Centrality network measure, the alpha parameter is a
+proportion that must be between -1 and +1. It is normalized so that
+all alpha centralities sum to 1.</li>
+<li>For a Bonacich Centrality network measure, the exo parameter must be
+greater than 0. A higher value of exo produces a more uniform
+distribution of centrality, while a lower value allows more
+variation.</li>
+<li>For a Bonacich Centrality network measure, the original alpha
+parameter of alpha centrality takes values only from -1/lambda to
+1/lambda, where lambda is the largest eigenvalue of the adjacency
+matrix. In this Dataverse Network implementation, the alpha parameter
+is rescaled to be between -1 and 1 and represents the proportion of
+1/lambda to be used in the calculation. Thus, entering alpha=1 sets
+alpha to be 1/lambda. Entering alpha=0.5 sets alpha to be
+1/(2*lambda).</li>
+</ul>
+</div>
+</div>
+<div class="section" id="data-visualization">
+<h3>Data Visualization<a class="headerlink" href="#data-visualization" title="Permalink to this headline">¶</a></h3>
+<p>Data Visualization allows contributors to make time series
+visualizations available to end users. These visualizations may be
+viewable and downloadable as graphs or data tables.&nbsp;Please see the
+appropriate guide for more information on setting up a visualization or
+viewing one.</p>
+<div class="section" id="explore-data">
+<h4>Explore Data<a class="headerlink" href="#explore-data" title="Permalink to this headline">¶</a></h4>
+<p>The study owner may make a data visualization interface available to
+those who can view a study.&nbsp; This will allow you to select various data
+variables and see a time series graph or data table.&nbsp; You will also be
+able to download your custom graph for use in your own reports or
+articles.</p>
+<p>The study owner will at least provide a list of data measures from which
+to choose.&nbsp;&nbsp; These measures may be divided into types.&nbsp; If they are you
+will be able to narrow the list of measures by first selecting a measure
+type.&nbsp; Once you have selected a measure, if there are multiple variables
+associated with the measure you will be able to select one or more
+filters to uniquely identify a variable. By default any filter assigned
+to a variable will become the label associated with the variable in the
+graph or table.&nbsp; &nbsp;By pressing the Add Line button you will add the
+selected variable to your custom graph.</p>
+<p>&nbsp; <img alt="image0" src="_images/measure_selected.png" /></p>
+<p>Once you have added data to your graph you will be able to customize it
+further.&nbsp; You will be given a choice of display options made available
+by the study owner.&nbsp; These may include an interactive flash graph, a
+static image graph and a numerical data table.&nbsp;&nbsp; You will also be
+allowed to edit the graph title, which by default is the name of the
+measure or measures selected. You may also edit the Source Label.
+Other customizable features are the height and the legend location of
+the image graph.&nbsp; You may also select a subset of the data by selecting
+the start and end points of the time series.&nbsp; Finally, on the display
+tab you may opt to display the series as indices in which case a single
+data point known as the reference period will be designated as 100 and
+all other points of the series will be calculated relative to the
+reference period.&nbsp; If you select data points that do not have units in
+common (i.e. one is in percent while the other is in dollars) then the
+display will automatically be set to indices with the earliest common
+data point as the default reference period.</p>
+<p><img alt="image1" src="_images/complex_graph_screenshot.png" /></p>
+<p>On the Line Details tab you will see additional information on the data
+you have selected.&nbsp; This may include links to outside web pages that
+further explain the data.&nbsp; On this tab you will also be able to edit the
+label or delete the line from your custom graph.</p>
+<p>On the Export tab you will be given the opportunity to export your
+custom graph and/or data table.&nbsp;&nbsp; If you select multiple files for
+download they will be bound together in a single zip file.</p>
+<p>The Refresh button clears any data that you have added to your custom
+graph and resets all of the display options to their default values.</p>
+</div>
+<div class="section" id="set-up">
+<h4>Set Up<a class="headerlink" href="#set-up" title="Permalink to this headline">¶</a></h4>
+<p>This feature allows you to make time series visualizations available to
+your end users.&nbsp;&nbsp; These visualizations may be viewable and downloadable
+as graphs or data tables.&nbsp; In the current beta version of the feature
+your data file must be subsettable and must contain at least one date
+field and one or more measures.&nbsp; You will be able to associate data
+fields from your file to a time variable and multiple measures and
+filters.</p>
+<p>When you select Set Up Exploration from within a study, you must first
+select the file for which you would like to set up the exploration.&nbsp; The
+list of files will include all subsettable data files within the study.</p>
+<p>Once you have selected a file you will go to a screen that has 5 tabs to
+guide you through the data visualization set-up. (In general, changes
+made to a visualization on the individual tabs are not saved to the
+database until the form’s Save button is pressed.&nbsp; When you are in add
+or edit mode on a tab, the tab will have an update or cancel button to
+update the “working copy” of a visualization or cancel the current
+update.)</p>
+<p>If you have a previously set up an exploration for a data file you may copy that exploration to a new file.
+When you select a file for set up you will be asked if you want to copy an exploration from another data file
+and will be presented a list of files from which to choose.  Please note that the data variable names must
+be identical in both files for this migration to work properly.</p>
+<p><strong>Time Variable</strong></p>
+<p>On the first tab you select the time variable of your data file.&nbsp; The
+variable list will only include those variables that are date or time
+variables. &nbsp;These variables must contain a date in each row.&nbsp;&nbsp;You may
+also enter a label in the box labeled Units.&nbsp; This label will be
+displayed under the x-axis of the graph created by the end user.</p>
+<p><img alt="image2" src="_images/edittimevariablescreenshot.png" /></p>
+<p><strong>Measures</strong></p>
+<p>On the Measures tab you may assign measures to the variables in your
+data file.&nbsp; First you may customize the label that the end user will see
+for measures.&nbsp; Next you may add measures by clicking the “Add Measure”
+link.&nbsp; Once you click that link you must give your measure a unique
+name.&nbsp; Then you may assign Units to it.&nbsp; Units will be displayed as the
+y-axis label of any graph produced containing that measure.&nbsp; In order to
+assist in the organizing of the measures you may create measure types
+and assign your measures to one or more measure types.&nbsp; Finally, the
+list of variables for measures will include all those variables that are
+entered as numeric in your data file.&nbsp; If you assign multiple variables
+to the same measure you will have to distinguish between them by
+assigning appropriate filters.&nbsp;&nbsp; For the end user, the measure will be
+the default graph name.</p>
+<p><img alt="image3" src="_images/editmeasuresscreenshot.png" /></p>
+<p><strong>Filters</strong></p>
+<p>On the filters tab you may assign filters to the variables in your data
+file.&nbsp; Generally filters contain demographic, geographic or other
+identifying information about the variables.&nbsp; For a given group of
+filters only one filter may be assigned to a single variable.&nbsp; The
+filters assigned to a variable must be sufficient to distinguish among
+the variables assigned to a single measure.&nbsp;&nbsp; Similar to measures,
+filters may be assigned to one or more types.&nbsp;&nbsp; For the end user the
+filter name will be the default label of the line of data added to a
+graph.</p>
+<p><img alt="image4" src="_images/editfiltersscreenshot.png" /></p>
+<div class="line-block">
+<div class="line"><br /></div>
+</div>
+<p><strong>Sources</strong></p>
+<p>On the Sources tab you can indicate the source of each of the variables
+in your data file.&nbsp; By default, the source will be displayed as a note
+below the x-axis labels.&nbsp; You may assign a single source to any or all
+of your data variables. &nbsp;You may also assign multiple sources to any of
+your data variables.</p>
+<p><img alt="image5" src="_images/sourcetabscreenshot.png" /></p>
+<div class="line-block">
+<div class="line"><br /></div>
+</div>
+<p><strong>Display</strong></p>
+<p>On the Display tab you may customize what the end user sees in the Data
+Visualization interface.&nbsp; Options include the data visualization formats
+made available to the end user and default view, the Measure Type label,
+and the Variable Info Label.</p>
+<div class="line-block">
+<div class="line"><br /></div>
+<div class="line-block">
+<div class="line"><img alt="image6" src="_images/displaytabscreenshot.png" /></div>
+</div>
+</div>
+<p><strong>Validate Button</strong></p>
+<p>When you press the “Validate” button the current state of your
+visualization data will be validated.&nbsp; In order to pass validation your
+data must have one time variable defined.&nbsp; There must also be at least
+one measure variable assigned.&nbsp; If more than one variable is assigned to
+a given measure then filters must be assigned such that each single
+variable is defined by the measure and one or more filters.&nbsp; If the data
+visualization does not pass validation a detailed error message
+enumerating the errors will be displayed.</p>
+<p><strong>Release Button</strong></p>
+<p>Once the data visualization has been validated you may release it to end
+users by pressing the “Release” button.&nbsp; The release button will also
+perform a validation.&nbsp; Invalid visualizations will not be released, but
+a detailed error message will not be produced.</p>
+<p><strong>Save Button</strong></p>
+<p>The “Save” button will save any changes made to a visualization on the
+tabs to the database.&nbsp;&nbsp; If a visualization has been released and changes
+are saved that would make it invalid the visualization will be set to
+“Unreleased”.</p>
+<p><strong>Exit Button</strong></p>
+<p>To exit the form press the “Exit” button.&nbsp; You will be warned if you
+have made any unsaved changes.</p>
+<p><strong>Examples</strong></p>
+<p>Simplest case – a single measure associated with a single variable.</p>
+<p>Data variable contains information on average family income for all
+Americans.&nbsp; The end user of the visualization will see an interface as
+below:</p>
+<p><img alt="image7" src="_images/simple_explore_data.png" /></p>
+<p>Complex case - multiple measures and types along with multiple filters
+and filter types.&nbsp; If you have measures related to both income and
+poverty rates you can set them up as measure types and associate the
+appropriate measures with each type.&nbsp; Then, if you have variables
+associated with multiple demographic groups you can set them up as
+filters.&nbsp; You can set up filter types such as age, gender, race and
+state of residence.&nbsp; Some of your filters may belong to multiple types
+such as males age 18-34.</p>
+<p><img alt="image8" src="_images/complex_exploration.png" /></p>
+</div>
+</div>
+</div>
+<div class="section" id="dataverse-administration">
+<h2>Dataverse Administration<a class="headerlink" href="#dataverse-administration" title="Permalink to this headline">¶</a></h2>
+<p>Once a user creates a dataverse becomes its owner and therefore is the
+administrator of that dataverse. The dataverse administrator has access
+to manage the settings described in this guide.</p>
+<div class="section" id="create-a-dataverse">
+<h3>Create a Dataverse<a class="headerlink" href="#create-a-dataverse" title="Permalink to this headline">¶</a></h3>
+<p>A dataverse is a container for studies and is the home for an individual
+scholar&#8217;s or organization&#8217;s data.</p>
+<p>Creating a dataverse is easy but first you must be a registered user.
+Depending on site policy, there may be a&nbsp;&#8220;Create a Dataverse&#8221; link on
+the Network home page. This first walks you through creating an account,
+then a dataverse.</p>
+<ol class="arabic simple">
+<li>Fill in the required information:</li>
+</ol>
+<blockquote>
+<div><ul class="simple">
+<li><strong>Type of Dataverse</strong>: Choose Scholar if it represents an individual&#8217;s work otherwise choose Basic.</li>
+<li><strong>Dataverse Name</strong>: This will be displayed on the network and dataverse home pages. If this is a Scholar dataverse it will     automatically be filled in with the scholar&#8217;s first and last name.</li>
+<li><strong>Dataverse Alias</strong>: This is an abbreviation, usually lower-case, that becomes part of the URL for the new dataverse.</li>
+</ul>
+<blockquote>
+<div>The required fields to create a dataverse are configurable in the Network Options, so fields that are required may also include
+Affiliation, Network Home Page Description, and Classification.</div></blockquote>
+</div></blockquote>
+<ol class="arabic simple" start="2">
+<li>Click &#8220;Save&#8221; and you&#8217;re done! An email will be sent to you with more information, including the URL to access you new dataverse.</li>
+</ol>
+<p>*Required information can vary depending on site policy. Required fields are noted with a <strong>red asterisk</strong>.</p>
+</div>
+<div class="section" id="edit-general-settings">
+<h3>Edit General Settings<a class="headerlink" href="#edit-general-settings" title="Permalink to this headline">¶</a></h3>
+<p>Use the General Settings tab on the Options page to release your
+dataverse, change the name, alias, and classification of your
+dataverse.&nbsp;The classifications are used to browse to your dataverse from
+the Network home page.</p>
+<p>Navigate to the&nbsp;General Settings from the Options page:</p>
+<p>Dataverse home page &gt; Options page &gt; Settings tab &gt; General subtab</p>
+<p>To edit release your dataverse:</p>
+<p>Select <em>Released</em> from the drop-down list when your dataverse is ready
+to go public. Select <em>Not Released</em> if you wish to block public access
+to your dataverse.</p>
+<p>Your dataverse cannot be released if it does not contain any released
+studies. Create a study or define a collection with studies from other
+dataverses before you attempt to make your dataverse public.</p>
+<p>To edit the affiliation, name, or alias settings of your dataverse:</p>
+<p>If you edit a Scholar dataverse type, you can edit the following fields:</p>
+<ul class="simple">
+<li>First Name - Edit your first name, which appears with your last name
+on the Network home page in the Scholar Dataverse group.</li>
+<li>Last Name - Edit your last name, which appears with your first name
+on the Network home page in the Scholar Dataverse group.</li>
+</ul>
+<p>If you edit either Scholar or basic types, you can edit any of the
+following fields:</p>
+<ul class="simple">
+<li>Affiliation - Edit your institutional identity.</li>
+<li>Dataverse Name - Edit the title for your dataverse, which appears on
+your dataverse home page. There are no naming restrictions.</li>
+<li>Dataverse Alias - Edit your dataverse&#8217;s URL.&nbsp;Special characters
+(~,`, !, &#64;, #, $, %, ^, &amp;, and *) and spaces are not allowed.
+<strong>Note</strong>: if you change the Dataverse Alias field, the URL for your
+Dataverse changes (http//.../dv/&#8217;alias&#8217;), which affects links to this
+page.</li>
+<li>Network Home Page Description - Edit the text that appears beside the
+name of your dataverse on the Network home page.</li>
+<li>Classification - Check the classifications, or groups, in which you
+choose to include your dataverse. Remove the check for any
+classifications that you choose not to join.</li>
+</ul>
+</div>
+<div class="section" id="edit-layout-branding">
+<span id="id4"></span><h3>Edit Layout Branding<a class="headerlink" href="#edit-layout-branding" title="Permalink to this headline">¶</a></h3>
+<p><strong>Customize Layout Branding (header/footer) to match your website</strong></p>
+<p>The Layout Branding allows you to customize your dataverse, by
+<strong>adding HTML to the default banner and footer</strong>, such as that used on
+your personal website. If your website has such layout elements as a
+navigation menu or images, you can add them here. Each dataverse is
+created with a default customization added, which you can leave as is,
+edit to change the background color, or add your own customization.</p>
+<p>Navigate to the&nbsp;Layout Branding from the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Dataverse</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Settings</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Customization</span> <span class="pre">subtab</span></tt></p>
+<p>To edit the banner and footer of your dataverse:</p>
+<ol class="arabic simple">
+<li>In the Custom Banner field, enter your plain text, and HTML to define
+your custom banner.</li>
+<li>In the Custom Footer field, enter your plain text, and HTML to define
+your custom footer.</li>
+</ol>
+<p><strong>Embed your Dataverse into your website (iframes)</strong></p>
+<p>Want to embed your Dataverse on an OpenScholar site? Follow <a class="reference internal" href="#openscholar"><em>these special instructions</em></a>.</p>
+<p>For dataverse admins that are more advanced HTML developers, or that
+have HTML developers available to assist them, you can create a page on
+your site and add the dataverse with an iframe.</p>
+<ol class="arabic simple">
+<li>Create a new page, that you will host on your site.</li>
+<li>Add the following HTML code to the content area of that new
+page.</li>
+</ol>
+<blockquote>
+<div><div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">&lt;script</span> <span class="pre">type=&quot;text/javascript&quot;&gt;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">var</span> <span class="pre">dvn_url</span> <span class="pre">=</span> <span class="pre">&quot;[SAMPLE_ONLY_http://dvn.iq.harvard.edu/dvn/dv/sampleURL]&quot;;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">var</span> <span class="pre">regexS</span> <span class="pre">=</span> <span class="pre">&quot;[\\?&amp;]dvn_subpage=([^&amp;#]*)&quot;;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">var</span> <span class="pre">regex</span> <span class="pre">=</span> <span class="pre">new</span> <span class="pre">RegExp(</span> <span class="pre">regexS</span> <span class="pre">);</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">var</span> <span class="pre">results</span> <span class="pre">=</span> <span class="pre">regex.exec(</span> <span class="pre">window.location.href</span> <span class="pre">);</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">if(</span> <span class="pre">results</span> <span class="pre">!=</span> <span class="pre">null</span> <span class="pre">)</span> <span class="pre">dvn_url</span> <span class="pre">=</span> <span class="pre">dvn_url</span> <span class="pre">+</span> <span class="pre">results[1];document.write('&lt;iframe</span> <span class="pre">src=&quot;'</span> <span class="pre">+</span> <span class="pre">dvn_url</span> <span class="pre">+</span> <span class="pre">'&quot;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">onLoad=&quot;set_dvn_url(this)&quot;</span> <span class="pre">width=&quot;100%&quot;</span> <span class="pre">height=&quot;600px&quot;</span> <span class="pre">frameborder=&quot;0&quot;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">style=&quot;background-color:#FFFFFF;&quot;&gt;&lt;/iframe&gt;');</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">&lt;/script&gt;</span></tt></div>
+</div>
+</div></blockquote>
+<ol class="arabic simple" start="3">
+<li>Edit that code by adding the URL of your dataverse (replace the
+SAMPLE_ONLY URL in the example, including the brackets “[ ]”), and
+adjusting the height.&nbsp; We suggest you keep the height at or under
+600px in order to fit the iframe into browser windows on computer
+monitor of all sizes, with various screen resolutions.</li>
+<li>The dataverse is set to have a min-width of 724px, so try give the
+page a width closer to 800px.</li>
+<li>Once you have the page created on your site, with the iframe code, go
+to the Setting tab, then the Customization subtab on your dataverse
+Options page, and click the checkbox that disables customization for
+your dataverse.</li>
+<li>Then enter the URL of the new page on your site. That will redirect
+all users to the new page on your site.</li>
+</ol>
+<p><strong>Layout Branding Tips</strong></p>
+<ul class="simple">
+<li>HTML markup, including <tt class="docutils literal"><span class="pre">script</span></tt> tags for JavaScript, and <tt class="docutils literal"><span class="pre">style</span></tt>
+tags for an internal style sheet, are permitted. The <tt class="docutils literal"><span class="pre">html,</span></tt>
+<tt class="docutils literal"><span class="pre">head</span></tt> and <tt class="docutils literal"><span class="pre">body</span></tt> element tags are not allowed.</li>
+<li>When you use an internal style sheet to insert CSS into your
+customization, it is important to avoid using universal (&#8220;<tt class="docutils literal"><span class="pre">*</span></tt>&#8221;)
+and type (&#8220;<tt class="docutils literal"><span class="pre">h1</span></tt>&#8221;) selectors, because these can overwrite the
+external style sheets that the dataverse is using, which can break
+the layout, navigation or functionality in the app.</li>
+<li>When you link to files, such as images or pages on a web server
+outside the network, be sure to use the full URL (e.g.
+<tt class="docutils literal"><span class="pre">http://www.mypage.com/images/image.jpg</span></tt>).</li>
+<li>If you recreate content from a website that uses frames to combine
+content on the sides, top, or bottom, then you must substitute the
+frames with <tt class="docutils literal"><span class="pre">table</span></tt> or <tt class="docutils literal"><span class="pre">div</span></tt> element types. You can open such an
+element in the banner field and close it in the footer field.</li>
+<li>Each time you click &#8220;Save&#8221;, your banner and footer automatically are
+validated for HTML and other code errors. If an error message is
+displayed, correct the error and then click &#8220;Save&#8221; again.</li>
+<li>You can use the banner or footer to house a link from your homepage
+to your personal website. Be sure to wait until you release your
+dataverse to the public before you add any links to another website.
+And, be sure to link back from your website to your homepage.</li>
+<li>If you are using an OpenScholar or iframe site and the redirect is
+not working, you can edit your branding settings by adding a flag to
+your dataverse URL: disableCustomization=true. For example:
+<tt class="docutils literal"><span class="pre">dvn.iq.harvard.edu/dvn/dv/mydv?disableCustomization=true</span></tt>. To
+reenable: <tt class="docutils literal"><span class="pre">dvn.iq.harvard.edu/dvn/dv/mydv?disableCustomization=false</span></tt>.
+Disabling the customization lasts for the length of the user session.</li>
+</ul>
+</div>
+<div class="section" id="edit-description">
+<h3>Edit Description<a class="headerlink" href="#edit-description" title="Permalink to this headline">¶</a></h3>
+<p>The Description is displayed on your dataverse Home page.&nbsp;Utilize this
+field to display announcements or messaging.</p>
+<p>Navigate to the Description from the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Dataverse</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Settings</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">General</span> <span class="pre">subtab</span> <span class="pre">&gt;Home</span> <span class="pre">Page</span> <span class="pre">Description</span></tt></p>
+<p>To change the content of this description:</p>
+<ul class="simple">
+<li>Enter your description or announcement text in the field provided.
+Note: A light blue background in any form field indicates HTML,  JavaScript, and style tags are permitted. The  <tt class="docutils literal"><span class="pre">html,</span></tt>, <tt class="docutils literal"><span class="pre">head</span></tt> and <tt class="docutils literal"><span class="pre">body</span></tt> element types are not allowed.</li>
+</ul>
+<p>Previous to the Version 3.0 release of the Dataverse Network, the
+Description had a character limit set at 1000, which would truncate
+longer description with a <strong>more &gt;&gt;</strong> link. This functionality has been
+removed, so that you can add as much text or code to that field as you
+wish. If you would like to add the character limit and truncate
+functionality back to your dataverse, just add this snippet of
+Javascript to the end of your description.</p>
+<blockquote>
+<div><div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">&lt;script</span> <span class="pre">type=&quot;text/javascript&quot;&gt;</span></tt></div>
+<div class="line">&nbsp;&nbsp;&nbsp;   <tt class="docutils literal"><span class="pre">jQuery(document).ready(function(){</span></tt></div>
+<div class="line">&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;   <tt class="docutils literal"><span class="pre">jQuery(&quot;.dvn\_hmpgMainMessage</span> <span class="pre">span&quot;).truncate({max\_length:1000});</span></tt></div>
+<div class="line">&nbsp;&nbsp;&nbsp;  <tt class="docutils literal"><span class="pre">});</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">&lt;/script&gt;</span></tt></div>
+</div>
+</div></blockquote>
+</div>
+<div class="section" id="edit-study-comments-settings">
+<span id="id5"></span><h3>Edit Study Comments Settings<a class="headerlink" href="#edit-study-comments-settings" title="Permalink to this headline">¶</a></h3>
+<p>You can enable or disable the Study User Comments feature in your
+dataverse. If you enable Study User Comments, any user has the option to
+add a comment to a study in this dataverse. By default, this feature is
+enabled in all new dataverses. Note that you should ensure there are
+terms of use at the network or dataverse level that define acceptable
+use of this feature if it is enabled.</p>
+<p>Navigate to the Study User Comments from the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Dataverse</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Settings</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">General</span> <span class="pre">subtab</span> <span class="pre">&gt;Allow</span> <span class="pre">Study</span> <span class="pre">Comments</span></tt></p>
+<p>A user must create an account in your dataverse to use the comment
+feature. When you enable this feature, be aware that new accounts will
+be created in your dataverse when users add comments to studies. In
+addition, the Report Abuse function in the comment feature is managed by
+the network admin. If a user reads a comment that might be
+inappropriate, that user can log in or register an account and access
+the Report Abuse option. Comments are reported as abuse to the network
+admin.</p>
+<p>To manage the Study User Comments feature in your dataverse:</p>
+<ul class="simple">
+<li>Click the &#8220;Allow Study Comments&#8221; check box to enable comments.</li>
+<li>Click the checked box to remove the check and disable comments.</li>
+</ul>
+</div>
+<div class="section" id="manage-e-mail-notifications">
+<h3>Manage E-Mail Notifications<a class="headerlink" href="#manage-e-mail-notifications" title="Permalink to this headline">¶</a></h3>
+<p>You can edit the e-mail address used on your dataverse’s Contact Us page
+and by the network when sending notifications on processes and errors.
+By default, the e-mail address used is from the user account of the
+dataverse creator.</p>
+<p>Navigate to the&nbsp;E-Mail Notifications from the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Dataverse</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Settings</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">General</span> <span class="pre">subtab</span> <span class="pre">&gt;E-Mail</span> <span class="pre">Address(es)</span></tt></p>
+<p>To edit the contact and notification e-mail address for your dataverse:</p>
+<ul class="simple">
+<li>Enter one or more e-mail addresses in the <strong>E-Mail Address</strong> field.
+Provide the addresses of users who you choose to receive notification
+when contacted from this dataverse. Any time a user submits a request
+through your dataverse, including the Request to Contribute link and
+the Contact Us page, e-mail is sent to all addresses that you enter
+in this field. Separate each address from others with a comma. Do not
+add any spaces between addresses.</li>
+</ul>
+</div>
+<div class="section" id="add-fields-to-search-results">
+<h3>Add Fields to Search Results<a class="headerlink" href="#add-fields-to-search-results" title="Permalink to this headline">¶</a></h3>
+<p>Your dataverse includes the network&#8217;s search and browse features to
+assist your visitors in locating the data that they need. By default,
+the Cataloging Information fields that appear in the search results or
+in studies&#8217; listings include the following: study title, authors, ID,
+production date, and abstract. You can customize other Cataloging
+Information fields to appear in search result listings after the default
+fields. Additional fields appear only if they are populated for the
+study.</p>
+<p>Navigate to the&nbsp;Search Results Fields from the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Dataverse</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Settings</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Customization</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">Search</span> <span class="pre">Results</span> <span class="pre">Fields</span></tt></p>
+<p>To add more Cataloging Information fields listed in the Search or Browse
+panels:</p>
+<ul class="simple">
+<li>Click the check box beside any of the following Cataloging
+Information fields to include them in your results pages: Production
+Date, Producer, Distribution Date, Distributor, Replication For,
+Related Publications, Related Material, and Related Studies.</li>
+</ul>
+<p>Note: These settings apply to your dataverse only.</p>
+</div>
+<div class="section" id="set-default-study-listing-sort-order">
+<h3>Set Default Study Listing Sort Order<a class="headerlink" href="#set-default-study-listing-sort-order" title="Permalink to this headline">¶</a></h3>
+<p>Use the drop-down menu to set the default sort order of studies on the
+Study Listing page. By default, they are sorted by Global ID, but you
+can also sort by Title, Last Released, Production Date, or Download
+Count.</p>
+<p>Navigate to the&nbsp;Default Study Listing Sort Order from the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Dataverse</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Settings</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Customization</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">Default</span> <span class="pre">Sort</span> <span class="pre">Order</span></tt></p>
+</div>
+<div class="section" id="enable-twitter">
+<h3>Enable Twitter<a class="headerlink" href="#enable-twitter" title="Permalink to this headline">¶</a></h3>
+<p>If your Dataverse Network has been configured for Automatic Tweeting,
+you will see an option listed as &#8220;Enable Twitter.&#8221; When you click this,
+you will be redirected to Twtter to authorize the Dataverse Network
+application to send tweets for you.</p>
+<p>Once authorized, tweets will be sent for each new study or study version
+that is released.</p>
+<p>To disable Automatic Tweeting, go to the Options page, and click
+&#8220;Disable Twitter.&#8221;</p>
+<p>Navigate to Enable Twitter from the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Dataverse</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Settings</span> <span class="pre">tab</span> <span class="pre">&gt;&nbsp;Promote</span> <span class="pre">Your</span> <span class="pre">Dataverse</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">Sync</span> <span class="pre">Dataverse</span> <span class="pre">With</span> <span class="pre">Twitter</span></tt></p>
+</div>
+<div class="section" id="get-code-for-dataverse-link-or-search-box">
+<h3>Get Code for Dataverse Link or Search Box<a class="headerlink" href="#get-code-for-dataverse-link-or-search-box" title="Permalink to this headline">¶</a></h3>
+<p>Add a dataverse promotional link or dataverse search box on your
+personal website by copying the code for one of the sample links on this
+page, and then pasting it anywhere on your website to create the link.</p>
+<p>Navigate to the Code for Dataverse Link or Search Box from the Options
+page:</p>
+<p><tt class="docutils literal"><span class="pre">Dataverse</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Settings</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Promote</span> <span class="pre">Your</span> <span class="pre">Dataverse</span> <span class="pre">subtab</span></tt></p>
+</div>
+<div class="section" id="edit-terms-for-study-creation">
+<h3>Edit Terms for Study Creation<a class="headerlink" href="#edit-terms-for-study-creation" title="Permalink to this headline">¶</a></h3>
+<p>You can set up Terms of Use for the dataverse that require users to
+acknowledge your terms and click &#8220;Accept&#8221; before they can contribute to
+the dataverse.</p>
+<p>Navigate to the&nbsp;Terms for Study Creation from the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Dataverse</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Permissions</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Terms</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">Deposit</span> <span class="pre">Terms</span> <span class="pre">of</span> <span class="pre">Use</span></tt></p>
+<p>To set Terms of Use for creating or uploading to the dataverse:</p>
+<ol class="arabic simple">
+<li>Click the Enable Terms of Use check box.</li>
+<li>Enter a description of your terms to which visitors must agree before
+they can create a study or upload a file to an existing study.
+Note: A light blue background in any form field indicates HTML,
+JavaScript, and style tags are permitted. The <tt class="docutils literal"><span class="pre">html</span></tt> and <tt class="docutils literal"><span class="pre">body</span></tt>
+element types are not allowed.</li>
+</ol>
+</div>
+<div class="section" id="edit-terms-for-file-download">
+<h3>Edit Terms for File Download<a class="headerlink" href="#edit-terms-for-file-download" title="Permalink to this headline">¶</a></h3>
+<p>You can set up Terms of Use for the network that require users to
+acknowledge your terms and click &#8220;Accept&#8221; before they can download or
+subset contents from the network.</p>
+<p>Navigate to the Terms for File Download from the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Dataverse</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;&nbsp;Permissions</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Terms</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">Download</span> <span class="pre">Terms</span> <span class="pre">of</span> <span class="pre">Use</span></tt></p>
+<p>To set Terms of Use for downloading or subsetting contents from any
+dataverse in the network:</p>
+<ol class="arabic simple">
+<li>Click the Enable Terms of Use check box.</li>
+<li>Enter a description of your terms to which visitors must agree before
+they can download or analyze any file.
+Note: A light blue background in any form field indicates HTML,
+JavaScript, and style tags are permitted. The <tt class="docutils literal"><span class="pre">html</span></tt> and <tt class="docutils literal"><span class="pre">body</span></tt>
+element types are not allowed.</li>
+</ol>
+</div>
+<div class="section" id="manage-permissions">
+<h3>Manage Permissions<a class="headerlink" href="#manage-permissions" title="Permalink to this headline">¶</a></h3>
+<p>Enable contribution invitation, grant permissions to users and groups,
+and manage dataverse file permissions.</p>
+<p>Navigate to Manage Permissions from the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Dataverse</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Permissions</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Permissions</span> <span class="pre">subtab</span></tt></p>
+<p><strong>Contribution Settings</strong></p>
+<p>Choose the access level contributors have to your dataverse. Whether
+they are allowed to edit only their own studies, all studies, or whether
+all registered users can edit their own studies (Open dataverse) or all
+studies (Wiki dataverse). In an Open dataverse, users can add studies by
+simply creating an account, and can edit their own studies any time,
+even after the study is released. In a Wiki dataverse, users cannot only
+add studies by creating an account, but also edit any study in that
+dataverse. Contributors cannot, however, release a study directly. After
+their edits, they submit it for review and a dataverse administrator or
+curator will release it.</p>
+<p><strong>User Permission Settings</strong></p>
+<p>There are several roles defined for users of a Dataverse Network
+installation:</p>
+<ul class="simple">
+<li>Data Users - Download and analyze all types of data</li>
+<li>Contributors - Distribute data and receive recognition and citations
+to it</li>
+<li>Curators - Summarize related data, organize data, or manage multiple
+sets of data</li>
+<li>Administrators - Set up and manage contributions to your dataverse,
+manage the appearance of your dataverse, organize your dataverse
+collections</li>
+</ul>
+<p><strong>Privileged Groups</strong></p>
+<p>Enter group name to allow a group access to the dataverse. Groups are
+created by network administrators.</p>
+<p><strong>Dataverse File Permission Settings</strong></p>
+<p>Choose &#8216;Yes&#8217; to restrict ALL files in this dataverse. To restrict files
+individually, go to the Study Permissions page of the study containing
+the file.</p>
+</div>
+<div class="section" id="create-user-account">
+<h3>Create User Account<a class="headerlink" href="#create-user-account" title="Permalink to this headline">¶</a></h3>
+<p>As a registered user, you can:</p>
+<ul class="simple">
+<li>Add studies to open and wiki dataverses, if available</li>
+<li>Contribute to existing studies in wiki dataverses, if available</li>
+<li>Add user comments to studies that have this option</li>
+<li>Create your own dataverse</li>
+</ul>
+<p><strong>Network Admin Level</strong></p>
+<p>Navigate to Create User Account from the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Network</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Permissions</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Users</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">Create</span> <span class="pre">User</span> <span class="pre">link</span></tt></p>
+<p>To create an account for a new user in your Network:</p>
+<ol class="arabic">
+<li><dl class="first docutils">
+<dt>Complete the account information page.</dt>
+<dd><p class="first last">Enter values in all required fields. Note: an email address can also be used as a username</p>
+</dd>
+</dl>
+</li>
+<li><p class="first">Click Create Account to save your entries.</p>
+</li>
+<li><p class="first">Go to the Permissions tab on the Options page to give the user
+Contributor, Curator or Admin access to your dataverse.</p>
+</li>
+</ol>
+<p><strong>Dataverse Admin Level</strong></p>
+<p>Navigate to Create User Account from the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Dataverse</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Permissions</span> <span class="pre">tab</span> <span class="pre">&gt;&nbsp;Permissions</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">Create</span> <span class="pre">User</span> <span class="pre">link</span></tt></p>
+<p>To create an account for a new user in your Dataverse:</p>
+<ol class="arabic">
+<li><dl class="first docutils">
+<dt>Complete the account information page.</dt>
+<dd><p class="first last">Enter values in all required fields. Note: an email address can also be used as a username</p>
+</dd>
+</dl>
+</li>
+<li><p class="first">Click Create Account to save your entries.</p>
+</li>
+<li><p class="first">Go to the Permissions tab on the Options page to give the user
+Contributor, Curator or Admin access to your dataverse.</p>
+</li>
+</ol>
+<p><strong>New User: Network Homepage</strong></p>
+<p>As a new user, to create an account at the <strong>Dataverse Network homepage</strong>, select &#8220;Create Account&#8221;
+at the top-right hand side of the page.</p>
+<p>Complete the required information denoted by the red asterisk and save.</p>
+<p><strong>New User: Dataverse Level</strong></p>
+<p>As a new user, to create an account at the <strong>Dataverse level</strong>, select &#8220;Create Account&#8221;
+at the top-right hand side of the page. Note: For Open Dataverses select &#8220;Create Account&#8221; in the orange box
+on the top right hand side of the page labelled: &#8220;OPEN DATAVERSE&#8221;.</p>
+<p>Complete the required information denoted by the red asterisk and save.</p>
+</div>
+<div class="section" id="download-tracking-data">
+<h3>Download Tracking Data<a class="headerlink" href="#download-tracking-data" title="Permalink to this headline">¶</a></h3>
+<p>You can view any guestbook responses that have been made in your
+dataverse. Beginning with version 3.2 of Dataverse Network, if the
+guestbook is not enabled, data will be collected silently based on the
+logged-in user or anonymously. The data displayed includes user account
+data or the session ID of an anonymous user, the global ID, study title
+and file name of the file downloaded, the time of the download, the type
+of download and any custom questions that have been answered. The
+username/session ID and download type were not collected in the 3.1
+version of Dataverse Network. A comma separated values file of all
+download tracking data may be downloaded by clicking the Export Results
+button.</p>
+<p>Navigate to the&nbsp;Download Tracking Data from the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Dataverse</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Permissions</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Download</span> <span class="pre">Tracking</span> <span class="pre">Data</span> <span class="pre">subtab</span></tt></p>
+</div>
+<div class="section" id="edit-file-download-guestbook">
+<h3>Edit File Download Guestbook<a class="headerlink" href="#edit-file-download-guestbook" title="Permalink to this headline">¶</a></h3>
+<p>You can set up a guestbook for your dataverse to collect information on
+all users before they can download or subset contents from the
+dataverse. The guestbook is independent of Terms of Use. Once it has
+been enabled it will be shown to any user for the first file a user
+downloads from a given study within a single session. If the user
+downloads additional files from the study in the same session a record
+will be created in the guestbook response table using data previously
+entered. Beginning with version 3.2 of Dataverse Network, if the
+dataverse guestbook is not enabled in your dataverse, download
+information will be collected silently based on logged-in user
+information or session ID.</p>
+<p>Navigate to the&nbsp;File Download Guestbook from the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Dataverse</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Permissions</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Guestbook</span> <span class="pre">subtab</span></tt></p>
+<p>To set up a Guestbook for downloading or subsetting contents from any study in the dataverse:</p>
+<ol class="arabic simple">
+<li>Click the Enable File Download Guestbook check box.</li>
+<li>Select or unselect required for any of the user account identifying
+data points (First and last name, E-Mail address, etc.)</li>
+<li>Add any custom questions to collect additional data. These questions
+may be marked as required and set up as free text responses or
+multiple choice. For multiple choice responses select Radio Buttons
+as the Custom Field Type and enter the possible answers.</li>
+<li>Any custom question may be removed at any time, so that it won’t show
+for the end user. If there are any responses associated with question
+that has been removed they will continue to appear in the Guestbook
+Response data table.</li>
+</ol>
+</div>
+<div class="section" id="openscholar">
+<span id="id6"></span><h3>OpenScholar<a class="headerlink" href="#openscholar" title="Permalink to this headline">¶</a></h3>
+<p><strong>Embed your Dataverse easily on an OpenScholar site</strong></p>
+<p>Dataverse integrates seamlessly with
+<a class="reference external" href="http://openscholar.harvard.edu/">OpenScholar</a>, a self-service site builder for higher education.</p>
+<p>To embed your dataverse on an OpenScholar site:</p>
+<ol class="arabic simple">
+<li>On your Dataverse Options page, Go to the Setting tab</li>
+<li>Go to the Customization subtab</li>
+<li>Click the checkbox that disables customization for your dataverse</li>
+<li>Make note of your Dataverse alias URL (i.e.
+<a class="reference external" href="http://thedata.harvard.edu/dvn/dv/myvalue">http://thedata.harvard.edu/dvn/dv/myvalue</a>)</li>
+<li>Follow the <a class="reference external" href="http://support.openscholar.harvard.edu/customer/portal/articles/1215076-apps-dataverse">OpenScholar Support Center
+instructions</a>&nbsp;to
+enable the Dataverse App</li>
+</ol>
+</div>
+<div class="section" id="enabling-lockss-access-to-the-dataverse">
+<span id="id7"></span><h3>Enabling LOCKSS access to the Dataverse<a class="headerlink" href="#enabling-lockss-access-to-the-dataverse" title="Permalink to this headline">¶</a></h3>
+<p><strong>Summary:</strong></p>
+<p><a class="reference external" href="http://lockss.stanford.edu/lockss/Home">LOCKSS Project</a> or <em>Lots
+of Copies Keeps Stuff Safe</em> is an international initiative based at
+Stanford University Libraries that provides a way to inexpensively
+collect and preserve copies of authorized e-content. It does so using an
+open source, peer-to-peer, decentralized server infrastructure. In order
+to make a LOCKSS server crawl, collect and preserve content from a DVN,
+both the server (the LOCKSS daemon) and the client (the DVN) sides must
+be properly configured. In simple terms, the LOCKSS server needs to be
+pointed at the DVN, given its location and instructions on what to
+crawl, the entire network, or a particular Dataverse; on the DVN side,
+access to the data must be authorized for the LOCKSS daemon. The section
+below describes the configuration tasks that the administrator of a
+Dataverse will need to do on the client side. It does not describe how
+LOCKSS works and what it does in general; it&#8217;s a fairly complex system,
+so please refer to the documentation on the <a class="reference external" href="http://lockss.stanford.edu/lockss/Home">LOCKSS
+Project</a> site for more
+information. Some information intended to a LOCKSS server administrator
+is available in the <a class="reference internal" href="dataverse-installer-main.html#using-lockss-with-dvn"><em>&#8220;Using LOCKSS with DVN&#8221;</em></a> of the <a class="reference internal" href="dataverse-installer-main.html#introduction"><em>DVN Installers Guide</em></a>
+(our primary sysadmin-level manual).</p>
+<p><strong>Configuration Tasks:</strong></p>
+<p>In order for a LOCKSS server to access, crawl and preserve any data on a
+given Dataverse Network, it needs to be granted an authorization by the
+network administrator. (In other words, an owner of a dataverse cannot
+authorize LOCKSS access to its files, unless LOCKSS access is configured
+on the Dataverse Network level). By default, LOCKSS crawling of the
+Dataverse Network is not allowed; check with the administrator of
+your&nbsp;Dataverse Network for details.</p>
+<p>But if enabled on the&nbsp;Dataverse Network level, the dataverse owner can
+further restrict LOCKSS access. For example, if on the network level all
+LOCKSS servers are allowed to crawl all publicly available data, the
+owner can limit access to the materials published in his or her
+dataverse to select servers only; specified by network address or
+domain.</p>
+<p>In order to configure LOCKSS access, navigate to the Advanced tab on the
+Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Dataverse</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Settings</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Advanced</span> <span class="pre">subtab</span></tt></p>
+<p>It&#8217;s important to understand that when a LOCKSS daemon is authorized to
+&#8220;crawl restricted files&#8221;, this does not by itself grant the actual
+access to the materials! This setting only specifies that the daemon
+should not be skipping such restricted materials outright. If it is
+indeed desired to have non-public materials collected and preserved by
+LOCKSS, in addition to selecting this option, it will be the
+responsibility of the DV Administrator to give the LOCKSS daemon
+permission to actually access the files.&nbsp;As of DVN version 3.3, this can
+only be done based on the IP address of the LOCKSS server (by creating
+an IP-based user group with the appropriate permissions).</p>
+<p>Once LOCKSS crawling of the Dataverse is enabled, the Manifest page
+URL will be</p>
+<p><tt class="docutils literal"><span class="pre">http</span></tt><tt class="docutils literal"><span class="pre">://&lt;YOUR</span> <span class="pre">SERVER&gt;/dvn/dv/&lt;DV</span> <span class="pre">ALIAS&gt;/faces/ManifestPage.xhtml</span></tt>.</p>
+</div>
+</div>
+<div class="section" id="study-and-data-administration">
+<h2>Study and Data Administration<a class="headerlink" href="#study-and-data-administration" title="Permalink to this headline">¶</a></h2>
+<p>Study Options are available for Contributors, Curators, and
+Administrators of a Dataverse.</p>
+<div class="section" id="create-new-study">
+<h3>Create New Study<a class="headerlink" href="#create-new-study" title="Permalink to this headline">¶</a></h3>
+<p>Brief instructions for creating a study:</p>
+<p>Navigate to the dataverse in which you want to create a study, then
+click Options-&gt;Create New Study.</p>
+<p>Enter at minimum a study title and click Save. Your draft study is now
+created. Add additional cataloging information and upload files as
+needed. Release the study when ready to make it viewable by others.</p>
+<p><strong>Data Citation widget</strong></p>
+<p>At the top of the edit study form, there is a data citation widget that
+allows a user to quickly enter fields that appear in the data citation,
+ie. title, author, date, distributor Otherwise, the information can be
+entered as the fields appear in the data entry form.</p>
+<p>See the information below for more details and recommendations for
+creating a study.</p>
+<p><strong>Steps to Create a Study</strong></p>
+<ol class="arabic simple">
+<li>Enter Cataloging Information, including an abstract of the study.
+Set Terms of Use for the study in the Cataloging fields, if you choose.</li>
+<li>Upload files associated with the study.</li>
+<li>Set permissions to access the study, all of the study files, or some
+of the study files.</li>
+<li>Delete your study if you choose, before you submit it for review.</li>
+<li>Submit your study for review, to make it available to the public.</li>
+</ol>
+<p>There are several guidelines to creating a study:</p>
+<ul class="simple">
+<li>You must create a study by performing steps in the specified order.</li>
+<li>If multiple users edit a study at one time, the first user to click
+Save assumes control of the file. Only that user&#8217;s changes are
+effective.</li>
+<li>When you save the study, any changes that you make after that do not
+effect the study&#8217;s citation.</li>
+</ul>
+<p><strong>Enter Cataloging Information</strong></p>
+<p>To enter the Cataloging Information for a new study:</p>
+<ol class="arabic">
+<li><p class="first">Prepopulate Cataloging Information fields based on a study template
+(if a template is available), use the Select Study Template pull-down
+list to select the appropriate template.</p>
+<p>A template provides default values for basic fields in the
+Cataloging Information fields. The default template prepopulates the
+Deposit Date field only.</p>
+</li>
+<li><p class="first">Enter a title in the Title field.</p>
+</li>
+<li><p class="first">Enter data in the remaining Cataloging Information fields.
+To list all fields, including the Terms of Use fields, click the Show
+All Fields button after you enter a title. Use the following
+guidelines to complete these fields:</p>
+<ul class="simple">
+<li>A light blue background in any form field indicates that HTML,
+JavaScript, and style tags are permitted. You cannot use the
+<tt class="docutils literal"><span class="pre">html</span></tt> and <tt class="docutils literal"><span class="pre">body</span></tt> element types.</li>
+<li>To use the inline help and view information about a field, roll
+your cursor over the field title.</li>
+<li>Be sure to complete the Abstract field.</li>
+<li>To set Terms of Use for your study, scroll to the bottom of the Cataloging Information tab.
+Eight fields appear under the Terms of Use label. You must
+complete at least one of these fields to enable Terms for this
+study.</li>
+</ul>
+</li>
+<li><p class="first">Click the <em>Save</em> button and then add comments or a brief description
+in the Study Version Notes popup. Then click the <em>Continue</em> button
+and your study draft version is saved.</p>
+</li>
+</ol>
+<p><strong>Upload Study Files</strong></p>
+<p>To upload files associated with a new study:</p>
+<ol class="arabic">
+<li><p class="first">For each file that you choose to upload to your study, first select
+the Data Type from the drop-down list. Then click the Browse button
+to select the file, and then click Upload to add each file at a time.</p>
+<p>When selecting a CSV (character-separated values) data type, an SPSS Control Card file is first required.</p>
+<p>When selecting a TAB (tab-delimited) data type, a DDI Control Card file is first required. There is no restriction to the number or types of files that you can upload to the Dataverse Network.</p>
+<p>There is a maximum file size of 2 gigabytes for each file that you upload.</p>
+</li>
+<li><p class="first">After you upload one file, enter the type of file in the <em>Category</em>
+field and then click Save.
+If you do not enter a category and click Save, the Category
+drop-down list does not have any value. You can create any category
+to add to this list.</p>
+</li>
+<li><p class="first">For each file that you upload, first click the check box in front of
+the file&#8217;s entry in the list, and then use the Category drop-down
+list to select the type of file that you uploaded.</p>
+<p>Every checked file is assigned the category that you select. Be sure
+to click the checked box to remove the check before you select a new
+value in the Category list for another file.</p>
+</li>
+<li><p class="first">In the Description field, enter a brief message that identifies the
+contents of your file.</p>
+</li>
+<li><p class="first">Click Save when you are finished uploading files. <strong>Note:</strong> If you upload a subsettable file, that process takes a few
+moments to complete. During the upload, the study is not available for editing. When you receive e-mail notification that the
+subsettable file upload is complete, click <em>Refresh</em> to continue editing the study.</p>
+<p>You see the Documentation, Data and Analysis tab of the study page
+with a list of the uploaded files. For each <em>subsettable tabular</em>
+data set file that you upload, the number of cases and variables and
+a link to the Data Citation information for that data set are
+displayed. If you uploaded an SPSS (<tt class="docutils literal"><span class="pre">.sav</span></tt> or <tt class="docutils literal"><span class="pre">.por</span></tt>) file, the
+Type for that file is changed to <em>Tab delimited</em> and the file
+extension is changed to <tt class="docutils literal"><span class="pre">.tab</span></tt> when you click Save.</p>
+<p>For each <em>subsettable network</em> data set file that you upload, the number of edges and verticies and a link to the Data Citation
+information for that data set are displayed.</p>
+</li>
+<li><p class="first">Continue to the next step and set file permissions for the study or
+its files.</p>
+</li>
+</ol>
+<p><strong>Study File Tips</strong></p>
+<p>Keep in mind these tips when uploading study files to your dataverse:</p>
+<ul class="simple">
+<li>The following subsettable file types are supported:<ul>
+<li>SPSS <tt class="docutils literal"><span class="pre">sav</span></tt> and <tt class="docutils literal"><span class="pre">por</span></tt> - Versions 7.x to 20.x (See the <a class="reference internal" href="#spss-datafile-ingest"><em>Note on SPSS ingest</em></a> in the Appendix)</li>
+<li>STATA <tt class="docutils literal"><span class="pre">dta</span></tt> - Versions 4 to 12</li>
+<li>R <tt class="docutils literal"><span class="pre">RData</span></tt> - All versions (NEW as of DVN v.3.5! See <a class="reference internal" href="#r-datafile-ingest"><em>Ingest of R data files</em></a> in the Appendix)</li>
+<li>GraphML <tt class="docutils literal"><span class="pre">xml</span></tt> - All versions</li>
+<li>CSV data file with a <a class="reference internal" href="#controlcard-datafile-ingest"><em>control card</em></a></li>
+<li>TAB-delimited data file with a <a class="reference internal" href="#ddixml-datafile-ingest"><em>DDI XML control card</em></a></li>
+</ul>
+</li>
+<li>A custom ingest for FITS Astronomical data files has been added in v.3.4. (see <a class="reference internal" href="#fits-datafile-ingest"><em>FITS File format Ingest</em></a> in the Appendix)</li>
+<li>You can add information for each file, including:<ul>
+<li>File name</li>
+<li>Category (documentation or data)</li>
+<li>Description</li>
+</ul>
+</li>
+<li>If you upload the wrong file, click the Remove link before you click
+Save.
+To replace a file after you upload it and save the study, first
+remove the file and then upload a new one.</li>
+<li>If you upload a STATA (<tt class="docutils literal"><span class="pre">.dta</span></tt>), SPSS (<tt class="docutils literal"><span class="pre">.sav</span></tt> or <tt class="docutils literal"><span class="pre">.por</span></tt>), or
+network (<tt class="docutils literal"><span class="pre">.xml</span></tt>) file, the file automatically becomes subsettable
+(that is, subset and analysis tools are available for that file in
+the Network). In this case, processing the file might take some time
+and you will not see the file listed immediately after you click
+Save.</li>
+<li>When you upload a <em>subsettable</em> data file, you are prompted to
+provide or confirm your e-mail address for notifications. One e-mail
+lets you know that the file upload is in progress; a second e-mail
+notifies you when the file upload is complete.</li>
+<li>While the upload of the files takes place, your study is not
+available for editing. When you receive e-mail notification that the
+upload is completed, click <em>Refresh</em> to continue editing the study.</li>
+</ul>
+<p><strong>Set Study and File Permissions</strong></p>
+<p>You can restrict access to a study, all of its files, or some of its
+files. This restriction extends to the search and browse functions.</p>
+<p>To permit or restrict access:</p>
+<ol class="arabic">
+<li><p class="first">On the study page, click the Permissions link.</p>
+</li>
+<li><p class="first">To set permissions for the study:</p>
+<ol class="upperalpha simple">
+<li>Scroll to the Entire Study Permission Settings panel, and click
+the drop-down list to change the study to Restricted or Public.</li>
+<li>In the <em>User Restricted Study Settings</em> field, enter a user or
+group to whom you choose to grant access to the study, then click
+Add.</li>
+</ol>
+<p>To enable a request for access to restricted files in the study,
+scroll to the File Permission Settings panel, and click the
+Restricted File Settings check box. This supplies a request link on
+the Data, Documentation and Analysis tab for users to request access
+to restricted files by creating an account.</p>
+<p>To set permission for individual files in the study:</p>
+<ol class="upperalpha simple">
+<li>Scroll to the Individual File Permission Settings panel, and enter
+a user or group in the Restricted File User Access <em>Username</em>
+field to grant permissions to one or more individual files.</li>
+<li>Use the File Permission pull-down list and select the permission
+level that you choose to apply to selected files: Restricted or
+Public.</li>
+<li>In the list of files, click the check box for each file to which
+you choose to apply permissions.
+To select all files, click the check box at the top of the list.</li>
+<li>Click Update.
+The users or groups to which you granted access privileges appear
+in the File Permissions list after the selected files.</li>
+</ol>
+</li>
+</ol>
+<p>Note: You can edit or delete your study if you choose, but only until
+you submit the study for reveiw. After you submit your study for review,
+you cannot edit or delete it from the dataverse.</p>
+<p><strong>Delete Studies</strong></p>
+<p>You can delete a study that you contribute, but only until you submit
+that study for review. After you submit your study for review, you
+cannot delete it from the dataverse.</p>
+<p>If a study is no longer valid, it can now be deaccessioned so it&#8217;s
+unavailable to users but still has a working citation. A reference to a
+new study can be provided when deaccessioning a study. Only Network
+Administrators can now permanently delete a study once it has been
+released.</p>
+<p>To delete a draft version:</p>
+<ol class="arabic">
+<li><p class="first">Click the Delete Draft Version link in the top-right area of the
+study page.</p>
+<p>You see the Delete Draft Study Version popup.</p>
+</li>
+<li><p class="first">Click the Delete button to remove the draft study version from the
+dataverse.</p>
+</li>
+</ol>
+<p>To deaccession a study:</p>
+<ol class="arabic">
+<li><dl class="first docutils">
+<dt>Click the Deaccession link in the top-right area of the study page.</dt>
+<dd><p class="first last">You see the Deaccession Study page.</p>
+</dd>
+</dl>
+</li>
+<li><p class="first">You have the option to add your comments about why the study was
+deaccessioned, and a link reference to a new study by including the
+Global ID of the study.</p>
+</li>
+<li><p class="first">Click the Deaccession button to remove your study from the
+dataverse.</p>
+</li>
+</ol>
+<p><strong>Submit Study for Review</strong></p>
+<p>When you finish setting options for your study, click <em>Submit For
+Review</em> in the top-right corner of the study page. The page study
+version changes to show <em>In Review</em>.</p>
+<p>You receive e-mail after you click <em>Submit For Review</em>, notifying you
+that your study was submitted for review by the Curator or Dataverse
+Admin. When a study is in review, it is not available to the public. You
+receive another e-mail notifying you when your study is released for
+public use.</p>
+<p>After your study is reviewed and released, it is made available to the
+public, and it is included in the search and browse functions. The
+Cataloging Information tab for your study contains the Citation
+Information for the complete study. The Documentation, Data and Analysis
+tab lists the files associated with the study. For each subsettable file
+in the study, a link is available to show the Data Citation for that
+specific data set.</p>
+<p><strong>UNF Calculation</strong></p>
+<p>When a study is created, a UNF is calculated for each subsettable file
+uploaded to that study. All subsettable file UNFs then are combined to
+create another UNF for the study. If you edit a study and upload new
+subsettable files, a new UNF is calculated for the new files and for the
+study.</p>
+<p>If the original study was created before version 2.0 of the Dataverse
+Network software, the UNF calculations were performed using version 3 of
+that standard. If you upload new subsettable files to an existing study
+after implementation of version 2.0 of the software, the UNFs are
+recalculated for all subsettable files and for the study using version 5
+of that standard. This prevents incompatibility of UNF version numbers
+within a study.</p>
+</div>
+<div class="section" id="manage-studies">
+<h3>Manage Studies<a class="headerlink" href="#manage-studies" title="Permalink to this headline">¶</a></h3>
+<p>You can find all studies that you uploaded to the dataverse, or that
+were submitted by a Contributor for review. Giving you access to view,
+edit, release, or delete studies.</p>
+<p><strong>View, Edit, and Delete/Deaccession Studies</strong></p>
+<p>To view and edit studies that you uploaded:</p>
+<ol class="arabic simple">
+<li>Click a study Global ID, title, or <em>Edit</em> link to go to the study
+page.</li>
+<li>From the study page, do any of the following:<ul>
+<li>Edit Cataloging Information</li>
+<li>Edit/Delete File + Information</li>
+<li>Add File(s)</li>
+<li>Edit Study Version Notes</li>
+<li>Permissions</li>
+<li>Create Study Template</li>
+<li>Release</li>
+<li>Deaccession</li>
+<li>Destroy Study</li>
+</ul>
+</li>
+</ol>
+<p>To delete or deaccession studies that you uploaded:</p>
+<ol class="arabic simple">
+<li>If the study has not been released, click the <em>Delete</em> link to open
+the Delete Draft Study Version popup.</li>
+<li>If the study has been released, click the <em>Deaccession</em> link to open
+the Deaccession Study page.</li>
+<li>Add your comments about why the study was deaccessioned, and a
+reference link to another study by including the Global ID, then
+click the <em>Deaccession</em> button.</li>
+</ol>
+<p><strong>Release Studies</strong></p>
+<p>When you release a study, you make it available to the public. Users can
+browse it or search for it from the dataverse or Network homepage.</p>
+<p>You receive e-mail notification when a Contributor submits a study for
+review. You must review each study submitted to you and release that
+study to the public. You receive a second e-mail notification after you
+release a study.</p>
+<p>To release a study draft version:</p>
+<ol class="arabic simple">
+<li>Review the study draft version by clicking the Global ID, or title,
+to go to the Study Page, then click Release in the upper right
+corner. For a quick release, click <em>Release</em> from the Manage Studies
+page.</li>
+<li>If the study draft version is an edit of an existing study, you will
+see the Study Version Differences page. The table allows you to view
+the changes compared to the current public version of the study.
+Click the <em>Release</em> button to continue.</li>
+<li>Add comments or a brief description in the Study Version Notes popup.
+Then click the <em>Continue</em> button and your study is now public.</li>
+</ol>
+</div>
+<div class="section" id="manage-study-templates">
+<h3>Manage Study Templates<a class="headerlink" href="#manage-study-templates" title="Permalink to this headline">¶</a></h3>
+<p>You can set up study templates for a dataverse to prepopulate any of
+the Cataloging Information fields of a new study with default values.
+When a user adds a new study, that user can select a template to fill in
+the defaults.</p>
+<p><strong>Create Template</strong></p>
+<p>Study templates help to reduce the work needed to add a study, and to
+apply consistency to studies within a dataverse. For example, you can
+create a template to include the Distributor and Contact details so that
+every study has the same values for that metadata.</p>
+<p>To create a new study template:</p>
+<ol class="arabic simple">
+<li>Click Clone on any Template.</li>
+<li>You see the Study Template page.</li>
+<li>In the Template Name field, enter a descriptive name for this
+template.</li>
+<li>Enter generic information in any of the Cataloging Information
+metadata fields. &nbsp;You may also change the input level of any field to
+make a certain field required, recommended, optional or hidden.
+&nbsp;Hidden fields will not be visible to the user creating studies from
+the template.</li>
+<li>After you complete entry of generic details in the fields that you
+choose to prepopulate for new studies, click Save to create the
+template.</li>
+</ol>
+<p>Note: You also can create a template directly from the study page to
+use that study&#8217;s Cataloging Information in the template.</p>
+<p><strong>Enable a template</strong></p>
+<p>Click the Enabled link for the given template. Enabled templates are
+available to end users for creating studies.</p>
+<p><strong>Edit Template</strong></p>
+<p>To edit an existing study template:</p>
+<ol class="arabic simple">
+<li>In the list of templates, click the Edit link for the template that
+you choose to edit.</li>
+<li>You see the Study Template page, with the template setup that you
+selected.</li>
+<li>Edit the template fields that you choose to change, add, or remove.</li>
+</ol>
+<p>Note: You cannot edit any Network Level Template.</p>
+<p><strong>Make a Template the Default</strong></p>
+<p>To set any study template as the default template that applies
+automatically to new studies:
+In the list of templates, click the Make Default link next to the name
+of the template that you choose to set as the default.
+| The Current Default Template label is displayed next to the name of
+the template that you set as the default.</p>
+<div class="line-block">
+<div class="line"><strong>Remove Template</strong></div>
+<div class="line">To delete a study template from a dataverse:</div>
+</div>
+<ol class="arabic simple">
+<li>In the list of templates, click the Delete link for the template that
+you choose to remove from the dataverse.</li>
+<li>You see the Delete Template page.</li>
+<li>Click Delete to remove the template from the dataverse.</li>
+</ol>
+<p>Note: &nbsp;You cannot delete any network template, default template or
+template in use by any study.</p>
+</div>
+<div class="section" id="data-uploads">
+<h3>Data Uploads<a class="headerlink" href="#data-uploads" title="Permalink to this headline">¶</a></h3>
+<p><strong>Troubleshooting Data Uploads:</strong></p>
+<p>Though the add files page works for the majority of our users, there can
+be situations where uploading files does not work. Below are some
+troubleshooting tips, including situations where uploading a file might
+fail and things to try.</p>
+<p><strong>Situations where uploading a file might fail:</strong></p>
+<ol class="arabic simple">
+<li>File is too large, larger than the maximum size, should fail immediately with an error.</li>
+<li>File takes too long and connection times out (currently this seems to happen after 5 mins) Failure behavior is vague, depends
+on browser. This is probably an IceFaces issue.</li>
+<li>User is going through a web proxy or firewall that is not passing through partial submit headers. There is specific failure
+behavior here that can be checked and it would also affect other web site functionality such as create account link. See
+redmine ticket <a class="reference external" href="https://redmine.hmdc.harvard.edu/issues/2532">#2352</a>.</li>
+<li>AddFilesPage times out, user begins adding files and just sits there idle for a long while until the page times out, should
+see the red circle slash.</li>
+<li>For subsettable files, there is something wrong with the file
+itself and so is not ingested. In these cases they should upload as other and we can test here.</li>
+<li>For subsettable files, there is something wrong with our ingest code that can&#8217;t process something about that particular file,
+format, version.</li>
+<li>There is a browser specific issue that is either a bug in our
+software that hasn&#8217;t been discovered or it is something unique to their browser such as security settings or a conflict with a
+browser plugin like developer tools. Trying a different browser such as Firefox or Chrome would be a good step.</li>
+<li>There is a computer or network specific issue that we can&#8217;t determine such as a firewall, proxy, NAT, upload versus download
+speed, etc. Trying a different computer at a different location might be a good step.</li>
+<li>They are uploading a really large subsettable file or many files and it is taking a really long time to upload.</li>
+<li>There is something wrong with our server such as it not responding.</li>
+<li>Using IE 8, if you add 2 text or pdf files in a row it won&#8217;t upload but if you add singly or also add a subsettable file they
+all work. Known issue, reported previously, <a class="reference external" href="https://redmine.hmdc.harvard.edu/issues/2367">#2367</a></li>
+</ol>
+<p><strong>So, general information that would be good to get and things to try would be:</strong></p>
+<ol class="arabic simple">
+<li>Have you ever been able to upload a file?</li>
+<li>Does a small text file work?</li>
+<li>Which browser and operating system are you using? Can you try Firefox or Chrome?</li>
+<li>Does the problem affect some files or all files? If some files, do they work one at a time? Are they all the same type such as
+Stata or SPSS? Which version? Can they be saved as a supported version, e.g. Stata 12 or SPSS 20? Upload them as type &#8220;other&#8221;
+and we&#8217;ll test here.</li>
+<li>Can you try a different computer at a different location?</li>
+<li>Last, we&#8217;ll try uploading it for you (may need DropBox to facilitate upload).</li>
+</ol>
+</div>
+<div class="section" id="manage-collections">
+<span id="id8"></span><h3>Manage Collections<a class="headerlink" href="#manage-collections" title="Permalink to this headline">¶</a></h3>
+<p>Collections can contain studies from your own dataverse or another,
+public dataverse in the Network.</p>
+<p><strong>Create Collection</strong></p>
+<p>You can create new collections in your dataverse, but any new collection
+is a child of the root collection except for Collection Links. When you
+create a child in the root collection, you also can create a child
+within that child to make a nested organization of collections. The root
+collection remains the top-level parent to all collections that are not
+linked from another dataverse.</p>
+<p>There are three ways in which you can create a collection:</p>
+<ul class="simple">
+<li>Static collection - You assign specific studies to this type of
+collection.</li>
+<li>Dynamic collection - You can create a query that gathers studies into
+a collection based on matching criteria, and keep the contents
+current. If a study matches the query selection criteria one week,
+then is changed and no longer matches the criteria, that study is
+only a member of the collection as long as it&#8217;s criteria matches the
+query.</li>
+<li>Linked collection - You can link an existing collection from another
+dataverse to your dataverse homepage. Note that the contents of that
+collection can be edited only in the originating dataverse.</li>
+</ul>
+<p><strong>Create Static Collection by Assigning Studies</strong></p>
+<p>To create a collection by assigning studies directly to it:</p>
+<ol class="arabic">
+<li><p class="first">Locate the root collection to create a direct subcollection in the
+root, or locate any other existing collection in which you choose
+create a new collection. Then, click the <em>Create</em> link in the Create
+Child field for that collection.</p>
+<p>You see the Study Collection page.</p>
+</li>
+<li><p class="first">In the Type field, click the Static option.</p>
+</li>
+<li><p class="first">Enter your collection Name.</p>
+</li>
+<li><p class="first">Select the Parent in which you choose to create the collection.
+The default is the collection in which you started on the <em>Manage
+Collections</em> page. You cannot create a collection in another
+dataverse unless you have permission to do so.</p>
+</li>
+<li><p class="first">Populate the Selected Studies box:</p>
+<ul class="simple">
+<li>Click the <em>Browse</em> link to use the Dataverse and Collection
+pull-down lists to create a list of studies.</li>
+<li>Click the <em>Search</em> link to select a query field and search for
+specific studies, enter a term to search for in that query field,
+and then click Search.</li>
+</ul>
+<p>A list of available studies is displayed in the Studies to Choose
+from box.</p>
+</li>
+<li><p class="first">In the Studies to Choose from box, click a study to assign it to your
+collection.</p>
+<p>You see the study you clicked in the Selected Studies box.</p>
+</li>
+<li><p class="first">To remove studies from the list of Selected Studies, click the study
+in that box.</p>
+<p>The study is remove from the Selected Studies box.</p>
+</li>
+<li><p class="first">If needed, repopulate the Studies to Choose from box with new
+studies, and add additional studies to the Studies Selected list.</p>
+</li>
+</ol>
+<p><strong>Create Linked Collection</strong></p>
+<p>You can create a collection as a link to one or more collections from
+other dataverses, thereby defining your own collections for users to
+browse in your dataverse.</p>
+<p>Note: A collection created as a link to a collection from another
+dataverse is editable only in the originating dataverse. Also,
+collections created by use of this option might not adhere to the
+policies for adding Cataloging Information and study files that you
+require in your own dataverse.</p>
+<p>To create a collection as a link to another collection:</p>
+<ol class="arabic">
+<li><p class="first">In the Linked Collections field, click Add Collection Link.</p>
+<p>You see the Add Collection Link window.</p>
+</li>
+<li><p class="first">Use the Dataverse pull-down list to select the dataverse from which
+you choose to link a collection.</p>
+</li>
+<li><p class="first">Use the Collection pull-down list to select a collection from your
+selected dataverse to add a link to that collection in your
+dataverse.</p>
+<p>The collection you select will be displayed in your dataverse
+homepage, and will be included in your dataverse searches.</p>
+</li>
+</ol>
+<p><strong>Create Dynamic Collection as a Query</strong></p>
+<p>When you create a collection by assigning the results of a query to it,
+that collection is dynamic and is updated regularly based on the query
+results.</p>
+<p>To create a collection by assigning the results of a query:</p>
+<ol class="arabic">
+<li><p class="first">Locate the root collection to create a direct subcollection in the
+root, or locate any other existing collection in which you choose
+create a new collection. Then, click the <em>Create</em> link in the Create
+Child field for that collection.</p>
+<p>You see the Study Collection page.</p>
+</li>
+<li><p class="first">In the Type field, click the Dynamic option.</p>
+</li>
+<li><p class="first">Enter your collection Name.</p>
+</li>
+<li><p class="first">Select the Parent in which you choose to create the collection.</p>
+<p>The default is the collection in which you started on the <em>Manage Collections</em> page. You cannot create a collection in another
+dataverse unless you have permission to do so.</p>
+</li>
+<li><p class="first">Enter a Description of this collection.</p>
+</li>
+<li><p class="first">In the Enter query field, enter the study field terms for which to
+search to assign studies with those terms to this collection.
+Use the following guidelines:</p>
+<ul>
+<li><p class="first">Almost all study fields can be used to build a collection query.</p>
+<p>The study fields must be entered in the appropriate format to
+search the fields&#8217; contents.</p>
+</li>
+<li><p class="first">Use the following format for your query:
+<tt class="docutils literal"><span class="pre">title:Elections</span> <span class="pre">AND</span> <span class="pre">keywordValue:world</span></tt>.</p>
+<p>For more information on query syntax, refer to the
+<a class="reference external" href="http://lucene.apache.org/java/docs/">Documentation</a> page at
+the Lucene website and look for <em>Query Syntax</em>. See the
+<a class="reference external" href="http://guides.thedata.org/files/thedatanew_guides/files/catalogingfields11apr08.pdf">cataloging fields</a>
+document for field query names.</p>
+</li>
+<li><p class="first">For each study in a dataverse, the Study Global Id field in the
+Cataloging Information consists of three query terms:
+<tt class="docutils literal"><span class="pre">protocol</span></tt>, <tt class="docutils literal"><span class="pre">authority</span></tt>, and <tt class="docutils literal"><span class="pre">globalID</span></tt>.</p>
+<p>If you build a query using <tt class="docutils literal"><span class="pre">protocol</span></tt>, your collection can
+return any study that uses the <tt class="docutils literal"><span class="pre">protocol</span></tt> you specified.</p>
+<p>If you build a query using all three terms, you collection
+returns only one study.</p>
+</li>
+</ul>
+</li>
+<li><p class="first">To limit this collection to search for results in your own dataverse,
+click the <em>Only your dataverse</em> check box.</p>
+</li>
+</ol>
+<p><strong>Edit Collections</strong></p>
+<ol class="arabic">
+<li><p class="first">Click a collection title to edit the contents or setup of that
+collection.</p>
+<p>You see the Collection page, with the current collection settings
+applied.</p>
+</li>
+<li><p class="first">Change, add, or delete any settings that you choose, and then click
+Save Collection to save your edits.</p>
+</li>
+</ol>
+<p><strong>Delete Collections or Remove Links</strong></p>
+<p>To delete existing static or dynamic collections:</p>
+<ol class="arabic simple">
+<li>For the collection that you choose to delete, click the Delete link.</li>
+<li>Confirm the delete action to remove the collection from your
+dataverse.</li>
+</ol>
+<p>To remove existing linked collections:</p>
+<ol class="arabic simple">
+<li>For the linked collection that you choose to remove, click the
+<em>Remove</em> link. (Note: There is no confirmation for a Remove action.
+When you click the Remove link, the Dataverse Network removes the linked collection immediately.)</li>
+</ol>
+</div>
+<div class="section" id="managing-user-file-access">
+<h3>Managing User File Access<a class="headerlink" href="#managing-user-file-access" title="Permalink to this headline">¶</a></h3>
+<p>User file access is managed through a set of access permissions that
+together determines whether or not a user can access a particular file,
+study, or dataverse. Generally speaking, there are three places where
+access permissions can be configured: at the dataverse level, at the
+study level, and at the file level. Think of each of these as a security
+perimeter or lock with dataverse being the outer most perimeter, study
+the next, and finally the file level. When configuring user file access,
+it might be helpful to approach this from the dataverse access level
+first and so on.</p>
+<p>For example, a user would like access to a particular file. Since files
+belong to studies and studies belong to dataverses, first determine
+whether the user has access to the dataverse. If the dataverse is
+released, all users have access to it. If it is unreleased, the user
+must appear in the User Permissions section on the dataverse permissions
+page.</p>
+<p>Next, they would need access to the study. If the study is public, then
+everyone has access. If it is restricted, the user must appear in the
+User Restricted Study Settings section on the study permissions page.</p>
+<p>Last, they would need access to the file. If the file is public,
+everyone has access. If the file is restricted, then the user must be
+granted access.</p>
+<p><strong>There are two ways a file can be restricted.</strong></p>
+<p>First, on the dataverse permissions page, all files in the dataverse
+could be restricted using Restrict ALL files in this Dataverse. To
+enable user access in this case, add the username to the Restricted File
+User Access section on this page.</p>
+<p>Second, individual files can be restricted at the study level on the
+study permissions page in the &#8220;Files&#8221; subtab. These can be restricted on a file-by-file basis.
+If this is the case, the file(s) will be displayed
+as restricted in the Individual File Permission Settings section. To
+enable user access to a particular file in this case, check the file to
+grant access to, type the username in the Restricted File User Access
+section, click update so their name appears next to the file, then click
+save.</p>
+<p>Another option at the study level when restricting files is to allow users the ability to
+request access to restricted files. This can be done in the study Permissions page in the &#8220;Files&#8221; subtab where
+you must first select the files you want to restrict, click on &#8220;update permissions&#8221; to restrict, and then under
+&#8220;File Permission Settings&#8221; check off the box to &#8220;Allow users to request access...&#8221; and click on Save at the bottom
+of the page. The contact(s) set for the Dataverse (<tt class="docutils literal"><span class="pre">Dataverse</span> <span class="pre">Options</span> <span class="pre">&gt;</span> <span class="pre">Settings</span> <span class="pre">&gt;</span> <span class="pre">General</span></tt>) will get an email
+notification each time a user sends a request. The request access email will displays a list of the file(s)
+requested and a DOI or Handle for the study. To approve or deny access to these file(s) go back to the study
+permissions page under the &#8220;Files&#8221; subtab and Approve or Deny the specific files that were requested. If you
+choose to deny any files you will have the option to add a reason why. Be sure to remember to click on the &#8220;update&#8221;
+button and then select Save so that your selections are saved and an email is sent to the requestor granting or
+denying them access. The email then sent to the requestor will list out which files were approved with a DOI or
+Handle URL, and any files which were denied along with any reasons that may have been provided.</p>
+<p>Finally, a somewhat unusual configuration could exist where both
+Restrict all files in a dataverse is set and an individual file is
+restricted. In this case access would need to be granted in both places
+-think of it as two locks. This last situation is an artifact of
+integrating these two features and will be simplified in a future
+release.</p>
+</div>
+</div>
+<div class="section" id="network-administration">
+<h2>Network Administration<a class="headerlink" href="#network-administration" title="Permalink to this headline">¶</a></h2>
+<p>The Dataverse Network provides several options for configuring and
+customizing your application. To access these options, login to the
+Dataverse Network application with an account that has Network
+Administrator privileges. By default, a brand new installation of the
+application will include an account of this type - the username and
+password is &#8216;networkAdmin&#8217;.</p>
+<p>After you login, the Dataverse Network home page links to the Options
+page from the &#8220;Options&#8221; gear icon, in the menu bar. Click on the icon to
+view all the options available for customizing and configuring the
+applications, as well as some network adminstrator utilities.</p>
+<p>The following tasks can be performed from the Options page:</p>
+<ul class="simple">
+<li>Manage dataverses, harvesting, exporting, and OAI sets - Create,
+edit, and manage standard and harvesting dataverses, manage
+harvesting schedules, set study export schedules, and manage OAI
+harvesting sets.</li>
+<li>Manage subnetworks - Create, edit, and manage subnetworks, manage network and subnetwork level study templates.</li>
+<li>Customize the Network pages and description - Brand your Network and
+set up your Network e-mail contact.</li>
+<li>Set and edit Terms of Use - Apply Terms of Use at the Network level
+for accounts, uploads, and downloads.</li>
+<li>Create and manage user accounts and groups and Network privileges,
+and enable option to create a dataverse - Manage logins, permissions,
+and affiliate access to the Network.</li>
+<li>Use utilities and view software information - Use the administrative
+utilities and track the current Network installation.</li>
+</ul>
+<div class="section" id="dataverses-section">
+<h3>Dataverses Section<a class="headerlink" href="#dataverses-section" title="Permalink to this headline">¶</a></h3>
+<div class="section" id="create-a-new-dataverse">
+<h4>Create a New Dataverse<a class="headerlink" href="#create-a-new-dataverse" title="Permalink to this headline">¶</a></h4>
+<p>A dataverse is a container for studies and is the home for an individual
+scholar&#8217;s or organization&#8217;s data.</p>
+<p>Creating a dataverse is easy but first you must be a registered user.
+Depending on site policy, there may be a link on the Network home page,
+entitled &#8220;Create a Dataverse&#8221;. This first walks you through creating an
+account, then a dataverse. If this is not the case on your site, log in,
+then navigate to the Create a New Dataverse page and complete the
+required information. That&#8217;s it!</p>
+<ol class="arabic">
+<li><dl class="first docutils">
+<dt>Navigate to the Create a New Dataverse page:</dt>
+<dd><p class="first last">Network home page &gt; Options page &gt;Dataverses tab &gt; Dataverse subtab &gt; &#8220;Create Dataverse&#8221; link.</p>
+</dd>
+</dl>
+</li>
+<li><p class="first">Fill in the required information:</p>
+<blockquote>
+<div><p><strong>Type of Dataverse</strong></p>
+<p>Choose Scholar if it represents an individual&#8217;s work otherwise choose Basic.</p>
+<p><strong>Dataverse Name</strong></p>
+<p>This will be displayed on the network and dataverse home
+pages. If this is a Scholar dataverse it will automatically be
+filled in with the scholar&#8217;s first and last name.</p>
+<p><strong>Dataverse Alias</strong></p>
+<p>This is an abbreviation, usually lower-case, that becomes part of the URL for the new dataverse.</p>
+</div></blockquote>
+</li>
+<li><p class="first">Click Save and you&#8217;re done!</p>
+<p>An email will be sent to you with more information, including
+the url to access you new dataverse.</p>
+</li>
+</ol>
+<p><strong>Required information</strong> can vary depending on site policy. Required fields are noted with a red asterisk.</p>
+<p>Note: If &#8220;Allow users to create a new Dataverse when they create an account&#8221; is enabled, there is a Create a Dataverse link on the Network home page.</p>
+</div>
+<div class="section" id="manage-dataverses">
+<h4>Manage Dataverses<a class="headerlink" href="#manage-dataverses" title="Permalink to this headline">¶</a></h4>
+<p>As dataverses increase in number it&#8217;s useful to view summary information
+in table form and quickly locate a dataverse of interest. The Manage
+Dataverse table does just that.</p>
+<p>Navigate to Network home page &gt; Options page &gt; Dataverses tab &gt;
+Dataverses subtab &gt; Manage Dataverse table:</p>
+<ul class="simple">
+<li>Dataverses are listed in order of most recently created.</li>
+<li>Clicking on a column name sorts the list by that column such as Name
+or Affiliation.</li>
+<li>Clicking on a letter in the alpha selector displays only those
+dataverses beginning with that letter.</li>
+<li>Move through the list of dataverses by clicking a page number or the
+forward and back buttons.</li>
+<li>Click Delete to remove a dataverse.</li>
+</ul>
+</div>
+</div>
+<div class="section" id="subnetwork-section">
+<h3>Subnetwork Section<a class="headerlink" href="#subnetwork-section" title="Permalink to this headline">¶</a></h3>
+<p>A subnetwork is a container for a group of dataverses.  Users will be able to create their dataverses in a particular subnetwork.  It may include its own branding and its own custom study templates.</p>
+<div class="section" id="create-a-new-subnetwork">
+<h4>Create a New Subnetwork<a class="headerlink" href="#create-a-new-subnetwork" title="Permalink to this headline">¶</a></h4>
+<p>You must be a network admin in order to create a subnetwork.  These are the steps to create a subnetwork:</p>
+<ol class="arabic">
+<li><dl class="first docutils">
+<dt>Navigate to Create a New Subnetwork Page:</dt>
+<dd><p class="first last">Network home page &gt; Options page &gt; Subnetworks tab&gt; Create Subnetwork Link</p>
+</dd>
+</dl>
+</li>
+<li><p class="first">Fill in required information:</p>
+<blockquote>
+<div><p><strong>Subnetwork Name</strong></p>
+<p>The name to be displayed in the menubar. Please use a short name.</p>
+<p><strong>Subnetwork Alias</strong></p>
+<p>Short name used to build the URL for this Subnetwork. It is case sensitive.</p>
+<p><strong>Subnetwork Short Description</strong></p>
+<p>This short description is displayed on the Network Home page</p>
+</div></blockquote>
+</li>
+<li><dl class="first docutils">
+<dt>Fill in Optional Branding</dt>
+<dd><p class="first last">These fields include a logo file, Subnetwork affiliation, description, and custom banner and footer.</p>
+</dd>
+</dl>
+</li>
+<li><p class="first">Click Save and you’re done!</p>
+</li>
+</ol>
+</div>
+<div class="section" id="manage-subnetworks">
+<h4>Manage Subnetworks<a class="headerlink" href="#manage-subnetworks" title="Permalink to this headline">¶</a></h4>
+<p>The Manage Subnetworks page gives summary information about all of the subnetworks in your installation.</p>
+<p>Navigate to Network home page &gt; Options Page &gt; Subnetworks tab:</p>
+<ul class="simple">
+<li>Subnetworks are listed alphabetically</li>
+<li>Clicking on a column name sorts the list by that column</li>
+<li>Click Edit to edit the subnetwork’s information or branding</li>
+<li>Click Delete to remove a subnetwork.  Note: this will not remove the dataverses assigned to the subnetwork.  The dataverses will remain and may be reassigned to another subnetwork.</li>
+</ul>
+</div>
+<div class="section" id="manage-classifications">
+<h4>Manage Classifications<a class="headerlink" href="#manage-classifications" title="Permalink to this headline">¶</a></h4>
+<p>Classifications are a way to organize dataverses on the network home
+page so they are more easily located. They appear on the left side of
+the page and clicking on a classification causes corresponding
+dataverses to be displayed. An example classification might be
+Organization, Government.</p>
+<p>Classifications typically form a hierarchy defined by the network
+administrator to be what makes sense for a particular site. A top level
+classification could be Organization, the next level Association,
+Business, Government, and School.</p>
+<p>The classification structure is first created on the Options page, from
+the Manage Classifications table. Once a classification is created,
+dataverses can be assigned to it either when the dataverse is first
+created or later from the Options page: Network home page &gt; (Your)
+Dataverse home page &gt; Options page &gt; Settings tab &gt; General subtab.</p>
+<p>To manage classifications, navigate to the Manage Classifications table:</p>
+<p>Network home page &gt; Options page &gt; Classifications tab &gt; Manage
+Classifications table</p>
+<p>From here you can view the current classification hierarchy, create a
+classification, edit an existing classification including changing its
+place in the hierarchy, and delete a classification.</p>
+</div>
+<div class="section" id="manage-study-comments-notifications">
+<h4>Manage Study Comments Notifications<a class="headerlink" href="#manage-study-comments-notifications" title="Permalink to this headline">¶</a></h4>
+<p>Dataverse admins can enable or disable a User Comment feature within
+their dataverses. If this feature is enabled, users are able to add
+comments to studies within that dataverse. Part of the User Comment
+feature is the ability for users to report comments as abuse if they
+deem that comment to be inappropriate in some way.</p>
+<p>Note that it is a best practice to explicitly define terms of use
+regarding comments when the User Comments feature is enabled. If you
+define those terms at the Network level, then any study to which
+comments are added include those terms.</p>
+<p>When a user reports another&#8217;s comment as abuse, that comment is listed
+on the Manage Study Comment Notifications table on the Options page. For
+each comment reported as abuse, you see the study&#8217;s Global ID, the
+comment reported, the user who posted the comment, and the user who
+reported the comment as abuse.</p>
+<p>There are two ways to manage abuse reports: In the Manage Study Comment
+Notifications table on the Options page, and on the study page User
+Comments tab. In both cases, you have the options to remove the comment
+or to ignore the abuse report.</p>
+<p>The Manage Study Comments Notifications table can be found here:</p>
+<p>Network home page &gt; Options page &gt; Dataverses tab &gt; Study Comments
+subtab &gt; Manage Study Comment Notifications table</p>
+</div>
+<div class="section" id="manage-controlled-vocabulary">
+<h4>Manage Controlled Vocabulary<a class="headerlink" href="#manage-controlled-vocabulary" title="Permalink to this headline">¶</a></h4>
+<p>You can set up controlled vocabulary for a dataverse network to give the
+end user a set list of choices to select from for most fields in a study
+template. Study fields which do not allow controlled vocabulary include
+the study title and subtitle, certain date fields and geographic
+boundaries.</p>
+<p>To <strong>manage controlled vocabulary</strong>, navigate to the Manage Controlled
+Vocabulary table:</p>
+<p><tt class="docutils literal"><span class="pre">Network</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Vocabulary</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Manage</span> <span class="pre">Controlled</span> <span class="pre">Vocabulary</span> <span class="pre">table</span></tt></p>
+<p><strong>To create a new controlled vocabulary:</strong></p>
+<ol class="arabic simple">
+<li>Click Create New Controlled Vocabulary.</li>
+<li>You see the Edit Controlled Vocabulary page.</li>
+<li>In the Name field, enter a descriptive name for this Controlled
+Vocabulary. In the Description field enter any additional information
+that will make it easier to identify a particular controlled
+vocabulary item to assign to a given custom field. In the Values
+field enter the controlled vocabulary values that you want to make
+available to users for a study field. Here you can submit an entire list of terms at once. Use the &#8220;add&#8221; and &#8220;remove&#8221; buttons
+to add or subtract values from the list.  You may also copy and paste a list of values separated by carriage returns.</li>
+<li>After you complete entry of values, click Save to create the
+controlled vocabulary.</li>
+</ol>
+<p><strong>Edit Controlled Vocabulary</strong></p>
+<p>To edit an existing controlled vocabulary:</p>
+<ol class="arabic simple">
+<li>In the list of controlled vocabulary, click the Edit link for the
+controlled vocabulary that you choose to edit. You see the Edit
+Controlled Vocabulary page, with the controlled vocabulary setup that
+you selected.</li>
+<li>Edit the controlled vocabulary items that you choose to change, add,
+or remove. You may also copy and paste a list of values separated by carriage returns.</li>
+</ol>
+</div>
+<div class="section" id="manage-network-study-templates">
+<h4>Manage Network Study Templates<a class="headerlink" href="#manage-network-study-templates" title="Permalink to this headline">¶</a></h4>
+<p>You can set up study templates for a dataverse network to prepopulate
+any of the Cataloging Information fields of a new study with default
+values. Dataverse administrators may clone a Network template and modify
+it for users of that dataverse. You may also change the input level of
+any field to make a certain field required, recommended, optional,
+hidden or disabled. Hidden fields will not be available to the user, but
+will be available to the dataverse administrator for update in cloned
+templates. Disabled field will not be available to the dataverse
+administrator for update. You may also add your own custom fields. When
+a user adds a new study, that user can select a template to fill in the
+defaults.</p>
+<p>To manage study templates, navigate to the Manage Study Templates table:</p>
+<p><tt class="docutils literal"><span class="pre">Network</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Templates</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Manage</span> <span class="pre">Study</span> <span class="pre">Templates</span> <span class="pre">table</span></tt></p>
+<p><strong>Create Template</strong></p>
+<p>Study templates help to reduce the work needed to add a study, and to
+apply consistency to studies across a dataverse network. For example,
+you can create a template to include the Distributor and Contact details
+so that every study has the same values for that metadata.</p>
+<p>To create a new study template:</p>
+<ol class="arabic simple">
+<li>Click Create New Network Template.</li>
+<li>You see the Study Template page.</li>
+<li>In the Template Name field, enter a descriptive name for this
+template.</li>
+<li>Enter generic information in any of the Cataloging Information
+metadata fields. You can also add your own custom fields to the Data
+Collection/Methodology section of the template. Each custom field
+must be assigned a Name, Description and Field Type. You may also
+apply controlled vocabulary to any of the custom fields that are set
+to Plain Text Input as Field Type.</li>
+<li>After you complete entry of generic details in the fields that you
+choose to prepopulate for new studies, click Save to create the
+template.</li>
+</ol>
+<p><strong>Enable a template</strong></p>
+<p>Click the Enabled link for the given template. Enabled templates are
+available to database administrators for cloning and end users for
+creating studies.</p>
+<p><strong>Edit Template</strong></p>
+<p>To edit an existing study template:</p>
+<ol class="arabic simple">
+<li>In the list of templates, click the Edit link for the template that
+you choose to edit.</li>
+<li>You see the Study Template page, with the template setup that you
+selected.</li>
+<li>Edit the template fields that you choose to change, add, or remove.</li>
+</ol>
+<p><strong>Make a Template the Default</strong></p>
+<p>To set any study template as the default template that applies
+automatically to the creation of new network templates:</p>
+<p>In the list of templates, click the Make Default Selection link next to the name
+of the template that you choose to set as the default for a subnetwork(s). A pop-up window with the names of the subnetworks will appear and you may select the appropriate subnetworks.  The subnetwork name(s) is displayed in the Default column of the template that you set as the
+default for each given subnetwork.</p>
+<p><strong>Remove Template</strong></p>
+<p>To delete a study template from a dataverse:</p>
+<ol class="arabic simple">
+<li>In the list of templates, click the Delete link for the template that
+you choose to remove from the network.</li>
+<li>You see the Delete Template page.</li>
+<li>Click Delete to remove the template from the network. Note that you
+cannot delete any template that is in use or is a default template at
+the network or dataverse level.</li>
+</ol>
+</div>
+</div>
+<div class="section" id="harvesting-section">
+<h3>Harvesting Section<a class="headerlink" href="#harvesting-section" title="Permalink to this headline">¶</a></h3>
+<div class="section" id="create-a-new-harvesting-dataverse">
+<h4>Create a New Harvesting Dataverse<a class="headerlink" href="#create-a-new-harvesting-dataverse" title="Permalink to this headline">¶</a></h4>
+<p>A harvesting dataverse allows studies from another site to be imported
+so they appear to be local, though data files remain on the remote site.
+This makes it possible to access content from data repositories and
+other sites with interesting content as long as they support the OAI or
+Nesstar protocols.</p>
+<p>Harvesting dataverses differ from ordinary dataverses in that study
+content cannot be edited since it is provided by a remote source. Most
+dataverse functions still apply including editing the dataverse name,
+branding, and setting permissions.</p>
+<p>Aside from providing the usual name, alias, and affiliation information,
+Creating a harvesting dataverse involves specifying the harvest
+protocol, OAI or Nesstar, the remote server URL, possibly format and set
+information, whether or how to register incoming studies, an optional
+harvest schedule, and permissions settings.</p>
+<p>To create a harvesting dataverse navigate to the Create a New Harvesting
+Dataverse page:</p>
+<p><tt class="docutils literal"><span class="pre">Network</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;&nbsp;Harvesting</span> <span class="pre">tab</span> <span class="pre">&gt;&nbsp;Harvesting</span> <span class="pre">Dataverses</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">&quot;Create</span> <span class="pre">Harvesting</span> <span class="pre">Dataverse&quot;</span> <span class="pre">link</span></tt></p>
+<p>Complete the form by entering required information and click Save.</p>
+<p>An example dataverse to harvest studies native to the Harvard dataverse:</p>
+<ul class="simple">
+<li><strong>Harvesting Type:</strong> OAI Server</li>
+<li><strong>Dataverse Name:</strong> Test IQSS Harvest</li>
+<li><strong>Dataverse Alias:</strong> testiqss</li>
+<li><strong>Dataverse Affiliation:</strong> Our Organization</li>
+<li><strong>Server URL:</strong> <a class="reference external" href="http://dvn.iq.harvard.edu/dvn/OAIHandler">http://dvn.iq.harvard.edu/dvn/OAIHandler</a></li>
+<li><strong>Harvesting Set:</strong> No Set (harvest all)</li>
+<li><strong>Harvesting Format:</strong> DDI</li>
+<li><strong>Handle Registration:</strong> Do not register harvested studies (studies must already have a handle)</li>
+</ul>
+</div>
+<div class="section" id="manage-harvesting">
+<h4>Manage Harvesting<a class="headerlink" href="#manage-harvesting" title="Permalink to this headline">¶</a></h4>
+<p>Harvesting is a background process meaning once initiated, either
+directly or via a timer, it conducts a transaction with a remote server
+and exists without user intervention. Depending on site policy and
+considering the update frequency of remote content this could happen
+daily, weekly, or on-demand. How does one determine what happened? By
+using the Manage Harvesting Dataverses table on the Options page.</p>
+<p>To manage harvesting dataverses, navigate to the <strong>Manage Harvesting
+Dataverses</strong> table:</p>
+<p><tt class="docutils literal"><span class="pre">Network</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Harvesting</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Harvesting</span> <span class="pre">Dataverses</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">Manage</span> <span class="pre">Harvesting</span> <span class="pre">Dataverses</span> <span class="pre">table</span></tt></p>
+<p>The Manage Harvesting table displays all harvesting dataverses, their
+schedules, and harvest results in table form. The name of each
+harvesting dataverse is a link to that harvesting dataverse&#8217;s
+configuration page. The schedule, if configured, is displayed along with
+a button to enable or disable the schedule. The last attempt and result
+is displayed along with the last non-zero result. It is possible for the
+harvest to check for updates and there are none. A Run Now button
+provides on-demand harvesting and a Remove link deletes the harvesting
+dataverse.</p>
+<p>Note: the first time a dataverse is harvested the entire catalog is
+harvested. This may take some time to complete depending on size.
+Subsequent harvests check for additions and changes or updates.</p>
+<p>Harvest failures can be investigated by examining the import and server
+logs for the timeframe and dataverse in question.</p>
+</div>
+<div class="section" id="schedule-study-exports">
+<h4>Schedule Study Exports<a class="headerlink" href="#schedule-study-exports" title="Permalink to this headline">¶</a></h4>
+<p>Sharing studies programmatically or in batch such as by harvesting
+requires information about the study or metadata to be exported in a
+commonly understood format. As this is a background process requiring no
+user intervention, it is common practice to schedule this to capture
+updated information.</p>
+<p>Our export process generates DDI, Dublin Core, Marc, and FGDC formats
+though DDI and Dublin Core are most commonly used. Be aware that
+different formats contain different amounts of information with DDI
+being most complete because it is our native format.</p>
+<p>To schedule study exports, navigate to the Harvesting Settings subtab:</p>
+<p><tt class="docutils literal"><span class="pre">Network</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Harvesting</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Settings</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">Export</span> <span class="pre">Schedule</span></tt></p>
+<p>First enable export then choose frequency: daily using hour of day or
+weekly using day of week. Click Save and you are finished.</p>
+<p>To disable, just choose Disable export and Save.</p>
+</div>
+<div class="section" id="manage-oai-harvesting-sets">
+<h4>Manage OAI Harvesting Sets<a class="headerlink" href="#manage-oai-harvesting-sets" title="Permalink to this headline">¶</a></h4>
+<p>By default, a client harvesting from the Dataverse Network that does not
+specify a set would fetch all unrestricted, locally owned
+studies - in other words public studies that were not harvested
+from elsewhere. For various reasons it might be desirable to define sets
+of studies for harvest such as by owner, or to include a set that was
+harvested from elsewhere. This is accomplished using the Manage OAI
+Harvesting Sets table on the Options page.</p>
+<p>The Manage OAI Harvesting Sets table lists all currently defined OAI
+sets, their specifications, and edit, create, and delete functionality.</p>
+<p>To manage OAI harvesting sets, navigate to the&nbsp;Manage OAI Harvesting
+Sets table:</p>
+<p><tt class="docutils literal"><span class="pre">Network</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Harvesting</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">OAI</span> <span class="pre">Harvesting</span> <span class="pre">Sets</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">Manage&nbsp;OAI</span> <span class="pre">Harvesting</span> <span class="pre">Sets</span> <span class="pre">table</span></tt></p>
+<p>To create an OAI set, click Create OAI Harvesting Set, complete the
+required fields and Save. The essential parameter that defines the set
+is the Query Definition. This is a search query using <a class="reference external" href="http://lucene.apache.org/java/3_0_0/queryparsersyntax.html">Lucene
+syntax</a>
+whose results populate the set.</p>
+<p>Once created, a set can later be edited by clicking on its name.</p>
+<p>To delete a set, click the appropriately named Delete Set link.</p>
+<p>To test the query results before creating an OAI set, a recommended
+approach is to create a <a class="reference internal" href="#manage-collections"><em>dynamic study
+collection</em></a> using the
+proposed query and view the collection contents. Both features use the
+same <a class="reference external" href="http://lucene.apache.org/java/3_0_0/queryparsersyntax.html">Lucene
+syntax</a>
+but a study collection provides a convenient way to confirm the results.</p>
+<p>Generally speaking, basic queries take the form of study metadata
+field:value. Examples include:</p>
+<ul class="simple">
+<li><tt class="docutils literal"><span class="pre">globalId:&quot;hdl</span> <span class="pre">1902</span> <span class="pre">1</span> <span class="pre">10684&quot;</span> <span class="pre">OR</span> <span class="pre">globalId:&quot;hdl</span> <span class="pre">1902</span> <span class="pre">1</span> <span class="pre">11155&quot;</span></tt>: Include studies with global ids <a class="reference external" href="hdl:1902.1/10684">hdl:1902.1/10684</a> and
+<a class="reference external" href="hdl:1902.1/11155">hdl:1902.1/11155</a></li>
+<li><tt class="docutils literal"><span class="pre">authority:1902.2</span></tt>: Include studies whose authority is 1902.2. Different authorities usually represent different sources such
+as IQSS, ICPSR, etc.</li>
+<li><tt class="docutils literal"><span class="pre">dvOwnerId:184</span></tt>: Include all studies belonging to dataverse with database id 184</li>
+<li><tt class="docutils literal"><span class="pre">studyNoteType:&quot;DATAPASS&quot;</span></tt>: Include all studies that were tagged with or include the text DATAPASS in their study note field.</li>
+</ul>
+<p><strong>Study Metadata Search Terms:</strong></p>
+<div class="line-block">
+<div class="line">title</div>
+<div class="line">subtitle</div>
+<div class="line">studyId</div>
+<div class="line">otherId</div>
+<div class="line">authorName</div>
+<div class="line">authorAffiliation</div>
+<div class="line">producerName</div>
+<div class="line">productionDate</div>
+<div class="line">fundingAgency</div>
+<div class="line">distributorName</div>
+<div class="line">distributorContact</div>
+<div class="line">distributorContactAffiliation</div>
+<div class="line">distributorContactEmail</div>
+<div class="line">distributionDate</div>
+<div class="line">depositor</div>
+<div class="line">dateOfDeposit</div>
+<div class="line">seriesName</div>
+<div class="line">seriesInformation</div>
+<div class="line">studyVersion</div>
+<div class="line">relatedPublications</div>
+<div class="line">relatedMaterial</div>
+<div class="line">relatedStudy</div>
+<div class="line">otherReferences</div>
+<div class="line">keywordValue</div>
+<div class="line">keywordVocabulary</div>
+<div class="line">topicClassValue</div>
+<div class="line">topicClassVocabulary</div>
+<div class="line">abstractText</div>
+<div class="line">abstractDate</div>
+<div class="line">timePeriodCoveredStart</div>
+<div class="line">timePeriodCoveredEnd</div>
+<div class="line">dateOfCollection</div>
+<div class="line">dateOfCollectionEnd</div>
+<div class="line">country</div>
+<div class="line">geographicCoverage</div>
+<div class="line">geographicUnit</div>
+<div class="line">unitOfAnalysis</div>
+<div class="line">universe</div>
+<div class="line">kindOfData</div>
+<div class="line">timeMethod</div>
+<div class="line">dataCollector</div>
+<div class="line">frequencyOfDataCollection</div>
+<div class="line">samplingProcedure</div>
+<div class="line">deviationsFromSampleDesign</div>
+<div class="line">collectionMode</div>
+<div class="line">researchInstrument</div>
+<div class="line">dataSources</div>
+<div class="line">originOfSources</div>
+<div class="line">characteristicOfSources</div>
+<div class="line">accessToSources</div>
+<div class="line">dataCollectionSituation</div>
+<div class="line">actionsToMinimizeLoss</div>
+<div class="line">controlOperations</div>
+<div class="line">weighting</div>
+<div class="line">cleaningOperations</div>
+<div class="line">studyLevelErrorNotes</div>
+<div class="line">responseRate</div>
+<div class="line">samplingErrorEstimate</div>
+<div class="line">otherDataAppraisal</div>
+<div class="line">placeOfAccess</div>
+<div class="line">originalArchive</div>
+<div class="line">availabilityStatus</div>
+<div class="line">collectionSize</div>
+<div class="line">studyCompletion</div>
+<div class="line">confidentialityDeclaration</div>
+<div class="line">specialPermissions</div>
+<div class="line">restrictions</div>
+<div class="line">contact</div>
+<div class="line">citationRequirements</div>
+<div class="line">depositorRequirements</div>
+<div class="line">conditions</div>
+<div class="line">disclaimer</div>
+<div class="line">studyNoteType</div>
+<div class="line">studyNoteSubject</div>
+<div class="line">studyNoteText</div>
+</div>
+</div>
+<div class="section" id="edit-lockss-harvest-settings">
+<span id="id9"></span><h4>Edit LOCKSS Harvest Settings<a class="headerlink" href="#edit-lockss-harvest-settings" title="Permalink to this headline">¶</a></h4>
+<p><strong>Summary:</strong></p>
+<p><a class="reference external" href="http://lockss.stanford.edu/lockss/Home">LOCKSS Project</a> or <em>Lots
+of Copies Keeps Stuff Safe</em> is an international initiative based at
+Stanford University Libraries that provides a way to inexpensively
+collect and preserve copies of authorized e-content. It does so using an
+open source, peer-to-peer, decentralized server infrastructure. In order
+to make a LOCKSS server crawl, collect and preserve content from a Dataverse Network,
+both the server (the LOCKSS daemon) and the client (the Dataverse Network) sides must
+be properly configured. In simple terms, the LOCKSS server needs to be
+pointed at the Dataverse Network, given its location and instructions on what to
+crawl; the Dataverse Network needs to be configured to allow the LOCKSS daemon to
+access the data. The section below describes the configuration tasks
+that the Dataverse Network administrator will need to do on the client side. It does
+not describe how LOCKSS works and what it does in general; it&#8217;s a fairly
+complex system, so please refer to the documentation on the <a class="reference external" href="http://lockss.stanford.edu/lockss/Home">LOCKSS Project</a> site for more
+information. Some information intended to a LOCKSS server administrator
+is available in the <a class="reference external" href="http://guides.thedata.org/book/h-using-lockss-dvn">&#8220;Using LOCKSS with Dataverse Network (DVN)&#8221;</a>  of the
+<a class="reference external" href="http://guides.thedata.org/book/installers-guides">Dataverse Network Installers Guide</a></p>
+<blockquote>
+<div>(our primary sysadmin-level manual).</div></blockquote>
+<p><strong>Configuration Tasks:</strong></p>
+<p>Note that neither the standard LOCKSS Web Crawler, nor the OAI plugin
+can properly harvest materials from a Dataverse Network.&nbsp; A custom LOCKSS plugin
+developed and maintained by the Dataverse Network project is available here:
+<a class="reference external" href="http://lockss.hmdc.harvard.edu/lockss/plugin/DVNOAIPlugin.jar">http://lockss.hmdc.harvard.edu/lockss/plugin/DVNOAIPlugin.jar</a>.
+For more information on the plugin, please see the <a class="reference external" href="http://guides.thedata.org/book/h-using-lockss-dvn">&#8220;Using LOCKSS with
+Dataverse Network (DVN)&#8221;</a> section of
+the Dataverse Network Installers Guide. In order for a LOCKSS daemon to collect DVN
+content designated for preservation, an Archival Unit must be created
+with the plugin above. On the Dataverse Network side, a Manifest must be created that
+gives the LOCKSS daemon permission to collect the data. This is done by
+completing the &#8220;LOCKSS Settings&#8221; section of the:
+<tt class="docutils literal"><span class="pre">Network</span> <span class="pre">Options</span> <span class="pre">-&gt;</span> <span class="pre">Harvesting</span> <span class="pre">-&gt;</span> <span class="pre">Settings</span> <span class="pre">tab.</span></tt></p>
+<p>For the Dataverse Network, LOCKSS can be configured at the network level
+for the entire site and also locally at the dataverse level. The network
+level enables LOCKSS harvesting but more restrictive policies, including
+disabling harvesting, can be configured by each dataverse. A dataverse
+cannot enable LOCKSS harvesting if it has not first been enabled at the
+network level.</p>
+<p>This &#8220;Edit LOCKSS Harvest Settings&#8221; section refers to the network level
+LOCKSS configuration.</p>
+<p>To enable LOCKSS harvesting at the network level do the following:</p>
+<ul class="simple">
+<li>Navigate to the LOCKSS Settings page: <tt class="docutils literal"><span class="pre">Network</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">-&gt;</span> <span class="pre">Network</span> <span class="pre">Options</span> <span class="pre">-&gt;</span> <span class="pre">Harvesting</span> <span class="pre">-&gt;</span> <span class="pre">Settings</span></tt>.</li>
+<li>Fill in the harvest information including the level of harvesting allowed (Harvesting Type, Restricted Data Files), the scope
+of harvest by choosing a predefined OAI set, then if necessary a list of servers or domains allowed to harvest.</li>
+<li>It&#8217;s important to understand that when a LOCKSS daemon is authorized
+to &#8220;crawl restricted files&#8221;, this does not by itself grant the actual
+access to the materials! This setting only specifies that the daemon
+should not be skipping such restricted materials outright. (The idea
+behind this is that in an archive with large amounts of
+access-restricted materials, if only public materials are to be
+preserved by LOCKSS, lots of crawling time can be saved by instructing
+the daemon to skip non-public files, instead of having it try to access
+them and get 403/Permission Denied). If it is indeed desired to have
+non-public materials collected and preserved by LOCKSS, it is the
+responsibility of the DVN Administrator to give the LOCKSS daemon
+permission to access the files. As of DVN version 3.3, this can only be
+done based on the IP address of the LOCKSS server (by creating an
+IP-based user group with the appropriate permissions).</li>
+<li>Next select any licensing options or enter additional terms, and click &#8220;Save Changes&#8221;.</li>
+<li>Once LOCKSS harvesting has been enabled, the LOCKSS Manifest page will
+be provided by the application. This manifest is read by LOCKSS servers
+and constitutes agreement to the specified terms. The URL for the
+network-level LOCKSS manifest is
+<tt class="docutils literal"><span class="pre">http</span></tt><tt class="docutils literal"><span class="pre">://&lt;YOUR</span> <span class="pre">SERVER&gt;/dvn/faces/ManifestPage.xhtml</span></tt> (it will be
+needed by the LOCKSS server administrator in order to configure an
+<em>Archive Unit</em> for crawling and preserving the DVN).</li>
+</ul>
+</div>
+</div>
+<div class="section" id="settings-section">
+<h3>Settings Section<a class="headerlink" href="#settings-section" title="Permalink to this headline">¶</a></h3>
+<div class="section" id="edit-name">
+<h4>Edit Name<a class="headerlink" href="#edit-name" title="Permalink to this headline">¶</a></h4>
+<p>The name of your Dataverse Network installation is displayed at the top
+of the Network homepage, and as a link at the top of each dataverse
+homepage in your Network.</p>
+<p>To create or change the name of your Network, navigate to the Settings
+tab on the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Network</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Settings</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">General</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">Network</span> <span class="pre">Name</span></tt></p>
+<p>Enter a descriptive title for your Network. There are no naming
+restrictions, but it appears in the heading of every dataverse in your
+Network, so a short name works best.</p>
+<p>Click Save and you are done!</p>
+</div>
+<div class="section" id="id10">
+<h4>Edit Layout Branding<a class="headerlink" href="#id10" title="Permalink to this headline">¶</a></h4>
+<p>When you install a Network, there is no banner or footer on any page in
+the Network. You can apply any style to the Network pages, such as that
+used on your organization&#8217;s website. You can use plain text, HTML,
+JavaScript, and style tags to define your custom banner and footer. If
+your website has such elements as a navigation menu or images, you can
+add them to your Network pages.</p>
+<p>To customize the layout branding of your Network, navigate to the
+Customization subtab on the Options page:</p>
+<p>Network home page &gt; Options page &gt; Settings tab &gt; Customization subtab &gt;
+Edit Layout Branding</p>
+<p>Enter your banner and footer content in the Custom Banner and Custom
+Footer fields and Save.</p>
+<p>See <a class="reference internal" href="#edit-layout-branding"><em>Layout Branding Tips</em></a> for guidelines.</p>
+</div>
+<div class="section" id="id11">
+<h4>Edit Description<a class="headerlink" href="#id11" title="Permalink to this headline">¶</a></h4>
+<p>By default your Network homepage has the following description:
+<tt class="docutils literal"><span class="pre">A</span> <span class="pre">description</span> <span class="pre">of</span> <span class="pre">your</span> <span class="pre">Dataverse</span> <span class="pre">Network</span> <span class="pre">or</span> <span class="pre">announcements</span> <span class="pre">may</span> <span class="pre">be</span> <span class="pre">added</span> <span class="pre">here.</span> <span class="pre">Use</span> <span class="pre">Network</span> <span class="pre">Options</span> <span class="pre">to</span> <span class="pre">edit</span> <span class="pre">or</span> <span class="pre">remove</span> <span class="pre">this</span> <span class="pre">text.</span></tt>
+You can edit that text to describe or announce such things as new
+Network features, new dataverses, or maintenance activities. You also
+can disable the description to not appear on the homepage.</p>
+<p>To manage the Network description, navigate to:</p>
+<p><tt class="docutils literal"><span class="pre">Network</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Settings</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">General</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">Network</span> <span class="pre">Description</span></tt></p>
+<p>Create a description by entering your desired content in the text box.
+HTML, JavaScript, and style tags are permitted. The <tt class="docutils literal"><span class="pre">html</span></tt> and
+<tt class="docutils literal"><span class="pre">body</span></tt> element types are not allowed. Next enable the description
+display by checking the Enable Description in Homepage checkbox. Click
+Save and you&#8217;re done. You can disable the display of the description but
+keep the content by unchecking and saving.</p>
+</div>
+<div class="section" id="edit-dataverse-requirements">
+<h4>Edit Dataverse Requirements<a class="headerlink" href="#edit-dataverse-requirements" title="Permalink to this headline">¶</a></h4>
+<p>Enforcing a minimum set of requirements can help ensure content
+consistency.</p>
+<p>When you enable dataverse requirements, newly created dataverses cannot
+be made public or released until the selected requirements are met.
+Existing dataverses are not affected until they are edited. Edits to
+existing dataverses cannot be saved until requirements are met.</p>
+<p>To manage the requirements, navigate to:</p>
+<p><tt class="docutils literal"><span class="pre">Network</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Settings</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Advanced</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">Release</span> <span class="pre">Dataverse</span> <span class="pre">Requirements</span></tt></p>
+<p>Available requirements include:</p>
+<ul class="simple">
+<li>Require Network Homepage Dataverse Description</li>
+<li>Require Dataverse Affiliation</li>
+<li>Require Dataverse Classification</li>
+<li>Require Dataverse Studies included prior to release</li>
+</ul>
+</div>
+<div class="section" id="id12">
+<h4>Manage E-Mail Notifications<a class="headerlink" href="#id12" title="Permalink to this headline">¶</a></h4>
+<p>The Dataverse Network sends notifications via email for a number of
+events on the site, including workflow events such as creating a
+dataverse, uploading files, releasing a study, etc. Many of these
+notifications are sent to the user initiating the action as well as to
+the network administrator. Additionally, the Report Issue link on the
+network home page sends email to the network administrator. By default,
+this email is sent to
+<cite>support&#64;thedata.org &lt;mailto:support&#64;thedata.org&gt;</cite>.</p>
+<p>To change this email address navigate to the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Network</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Settings</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">General</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">E-Mail</span> <span class="pre">Address(es)</span></tt></p>
+<p>Enter the address of network administrators who should receive these
+notifications and Save.</p>
+<p>Please note the Report Issue link when accessed within a dataverse gives
+the option of sending notification to the network or dataverse
+administrator. Configuring the dataverse administrator address is done
+at the dataverse level:
+<tt class="docutils literal"><span class="pre">(Your)</span> <span class="pre">Dataverse</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Settings</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">General</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">E-Mail</span> <span class="pre">Address(es)</span></tt></p>
+</div>
+<div class="section" id="id13">
+<h4>Enable Twitter<a class="headerlink" href="#id13" title="Permalink to this headline">¶</a></h4>
+<p>If your Dataverse Network has been configured for Automatic Tweeting,
+you will see an option listed as &#8220;Enable Twitter.&#8221; When you click this,
+you will be redirected to Twitter to authorize the Dataverse Network
+application to send tweets for you.</p>
+<p>To manage the Dataverse Twitter configuration, navigate to:</p>
+<p><tt class="docutils literal"><span class="pre">Dataverse</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Settings</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Promote</span> <span class="pre">Your</span> <span class="pre">Dataverse</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">Sync</span> <span class="pre">Dataverse</span> <span class="pre">With</span> <span class="pre">Twitter</span></tt></p>
+<p>Once authorized, tweets will be sent for each new dataverse that is
+released.</p>
+<p>To disable Automatic Tweeting, go to the options page, and click
+&#8220;Disable Twitter.&#8221;</p>
+</div>
+</div>
+<div class="section" id="terms-section">
+<h3>Terms Section<a class="headerlink" href="#terms-section" title="Permalink to this headline">¶</a></h3>
+<div class="section" id="edit-terms-for-account-creation">
+<h4>Edit Terms for Account Creation<a class="headerlink" href="#edit-terms-for-account-creation" title="Permalink to this headline">¶</a></h4>
+<p>You can set up Terms of Use that require users with new accounts to
+accept your terms before logging in for the first time.</p>
+<p>To configure these terms navigate to the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Network</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Permissions</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Terms</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">Account</span> <span class="pre">Term</span> <span class="pre">of</span> <span class="pre">Use</span></tt></p>
+<p>Enter your required terms as you would like them to appear to users.
+HTML, JavaScript, and style tags are permitted. The <tt class="docutils literal"><span class="pre">html</span></tt> and
+<tt class="docutils literal"><span class="pre">body</span></tt> element types are not allowed. Check Enable Terms of Use to
+display these terms. Click Save and you are finished. To disable but
+preserve your current terms, uncheck the Enable checkbox and save.</p>
+</div>
+<div class="section" id="id14">
+<h4>Edit Terms for Study Creation<a class="headerlink" href="#id14" title="Permalink to this headline">¶</a></h4>
+<p>You can set up Terms of Use for the Network that require users to accept
+your terms before they can create or modify studies, including adding
+data files. These terms are defined at the network level so they apply
+across all dataverses. Users will be presented with these terms the
+first time they attempt to modify or create a study during each session.</p>
+<p>To configure these terms of use navigate to the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Network</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Permissions</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Terms</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">Deposit</span> <span class="pre">Term</span> <span class="pre">of</span> <span class="pre">Use</span></tt></p>
+<p>Enter your terms as you would like to display them to the user. HTML,
+JavaScript, and style tags are permitted. The <tt class="docutils literal"><span class="pre">html</span></tt> and <tt class="docutils literal"><span class="pre">body</span></tt>
+element types are not allowed. Check Enable Terms of Use and save.
+Uncheck Enable Terms of Use and save to disable but preserve existing
+terms of use.</p>
+</div>
+<div class="section" id="id15">
+<h4>Edit Terms for File Download<a class="headerlink" href="#id15" title="Permalink to this headline">¶</a></h4>
+<p>You can set up Terms of Use for the Network that require users to accept
+your terms before they can download or subset files from the Network.
+Since this is defined at the network level it applies to all dataverses.
+Users will be presented with these terms the first time they attempt to
+download a file or access the subsetting and analysis page each session.</p>
+<p>To configure these terms, navigate to the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Network</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Permissions</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Terms</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">Download</span> <span class="pre">Term</span> <span class="pre">of</span> <span class="pre">Use</span></tt></p>
+<p>Enter the terms as you want them to appear to the user. HTML,
+JavaScript, and style tags are permitted. The <tt class="docutils literal"><span class="pre">html</span></tt> and <tt class="docutils literal"><span class="pre">body</span></tt>
+element types are not allowed. Check Enable Terms of Use and save.
+Unchecking the checkbox and saving disables the display of the terms but
+preserves the current content.</p>
+</div>
+<div class="section" id="id16">
+<h4>Download Tracking Data<a class="headerlink" href="#id16" title="Permalink to this headline">¶</a></h4>
+<p>You can view any guestbook responses that have been made in all
+dataverses. Beginning with version 3.2 of Dataverse Network, for any
+dataverse where the guestbook is not enabled data will be collected
+silently based on the logged in user or anonymously. The data displayed
+includes user account data or the session id of an anonymous user, the
+global ID, study title and filename of the file downloaded, the time of
+the download, the type of download and any custom questions that have
+been answered. The username/session ID and download type were not
+collected in the 3.1 version of DVN. A comma separated values file of
+all download tracking data may be downloaded by clicking the Export
+Results button.</p>
+<p>To manage the Network download tracking data, navigate to:</p>
+<p><tt class="docutils literal"><span class="pre">Network</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Permissions</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Download</span> <span class="pre">Tracking</span> <span class="pre">Data</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">Manage</span> <span class="pre">Download</span> <span class="pre">Tracking</span> <span class="pre">Data</span> <span class="pre">table</span></tt></p>
+</div>
+</div>
+<div class="section" id="permissions-and-users-section">
+<h3>Permissions and Users Section<a class="headerlink" href="#permissions-and-users-section" title="Permalink to this headline">¶</a></h3>
+<div class="section" id="manage-network-permissions">
+<h4>Manage Network Permissions<a class="headerlink" href="#manage-network-permissions" title="Permalink to this headline">¶</a></h4>
+<p>Permissions that are configured at the network level include:</p>
+<ul class="simple">
+<li>Enabling users to create an account when they create a dataverse.</li>
+<li>Granting privileged roles to existing users including network
+administrator and dataverse creator.</li>
+<li>Changing and revoking privileged roles of existing users.</li>
+</ul>
+<p>Enabling users to create an account when they create a dataverse
+displays a &#8220;Create a Dataverse&#8221; link on the network home page. New and
+unregistered users coming to the site can click on this link, create an
+account and a dataverse in one workflow rather than taking two separate
+steps involving the network administrator.</p>
+<p>Granting a user account network administrator status gives that user
+full control over the application as managed through the UI.</p>
+<p>Granting a user account dataverse creator status is somewhat a legacy
+function since any user who creates a dataverse has this role.</p>
+<p>To manage these permissions, navigate to the Manage Network Permissions
+table on the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Network</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Permissions</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Permissions</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">Manage</span> <span class="pre">Network</span> <span class="pre">Permissions</span> <span class="pre">table</span></tt></p>
+<p>Enable account with dataverse creation by checking that option and
+saving.</p>
+<p>Granting privileged status to a user requires entering a valid, existing
+user name, clicking add, choosing the role, then saving changes.</p>
+</div>
+<div class="section" id="roles-by-version-state-table">
+<h4>Roles by Version State Table<a class="headerlink" href="#roles-by-version-state-table" title="Permalink to this headline">¶</a></h4>
+<table border="1" class="docutils">
+<colgroup>
+<col width="20%" />
+<col width="11%" />
+<col width="15%" />
+<col width="3%" />
+<col width="13%" />
+<col width="17%" />
+<col width="20%" />
+</colgroup>
+<thead valign="bottom">
+<tr class="row-odd"><th class="head">&nbsp;</th>
+<th class="head"><strong>Role</strong></th>
+<th class="head">&nbsp;</th>
+<th class="head" colspan="2">&nbsp;</th>
+<th class="head">&nbsp;</th>
+<th class="head">&nbsp;</th>
+</tr>
+</thead>
+<tbody valign="top">
+<tr class="row-even"><td><strong>Version State</strong></td>
+<td>None</td>
+<td>Contributor +,
+++</td>
+<td colspan="2">Curator</td>
+<td>Admin</td>
+<td>Network Admin**</td>
+</tr>
+<tr class="row-odd"><td>Draft</td>
+<td>&nbsp;</td>
+<td>E,E2,D3,S,V</td>
+<td colspan="2">E,E2,P,T,D3,R,V</td>
+<td>E,E2,P,T,D3,R,V</td>
+<td>E,E2,P,T,D3,D2,R,V</td>
+</tr>
+<tr class="row-even"><td>In Review</td>
+<td>&nbsp;</td>
+<td>E,E2,D3,V</td>
+<td colspan="2">E,E2,P,T,D3,R,V</td>
+<td>E,E2,P,T,D3,R,V</td>
+<td>E,E2,P,T,D3,R,D2,V</td>
+</tr>
+<tr class="row-odd"><td>Released</td>
+<td>V</td>
+<td>E,V</td>
+<td colspan="2">E,P,T,D1,V</td>
+<td>E,P,T,D1,V</td>
+<td>E,P,T,D2,D1,V</td>
+</tr>
+<tr class="row-even"><td>Archived</td>
+<td>V</td>
+<td>V</td>
+<td colspan="2">P,T,V</td>
+<td>P,T,V</td>
+<td>P,T,D2,V</td>
+</tr>
+<tr class="row-odd"><td>Deaccessioned</td>
+<td>&nbsp;</td>
+<td>&nbsp;</td>
+<td colspan="2">P,T,R2,V</td>
+<td>P,T,R2,V</td>
+<td>P,T,R2,D2,V</td>
+</tr>
+</tbody>
+</table>
+<p><strong>Legend:</strong></p>
+<p>E = Edit (Cataloging info, File meta data, Add files)</p>
+<p>E2 = Edit Study Version Notes</p>
+<p>D1 = Deaccession</p>
+<p>P = Permission</p>
+<p>T = Create Template</p>
+<p>D2 = Destroy</p>
+<p>D3 = Delete Draft, Delete Review Version</p>
+<p>S = Submit for Review</p>
+<p>R = Release</p>
+<p>R2 = Restore</p>
+<p>V = View</p>
+<p><strong>Notes:</strong></p>
+<p><a href="#id17"><span class="problematic" id="id18">*</span></a>Same as Curator</p>
+<p><a href="#id19"><span class="problematic" id="id20">**</span></a>Same as Curator + D2</p>
+<p>+Contributor actions (E,D3,S,V) depend on new DV permission settings. A
+contributor role can act on their own studies (default) or all studies
+in a dv, and registered users can become contributors and act on their
+own studies or all studies in a dv.</p>
+<p>++ A contributor is defined either as a contributor role or as any
+registered user in a DV that allows all registered users to contribute.</p>
+</div>
+<div class="section" id="authorization-to-access-terms-protected-files-via-the-api">
+<h4>Authorization to access Terms-protected files via the API<a class="headerlink" href="#authorization-to-access-terms-protected-files-via-the-api" title="Permalink to this headline">¶</a></h4>
+<p>As of DVN v. 3.2, a programmatic API has been provided for accessing DVN
+materials. It supports Basic HTTP Auth where the client authenticates
+itself as an existing DVN (or anonymous) user. Based on this, the API
+determines whether the client has permission to access the requested
+files or metadata. It is important to remember however, that in addition
+to access permissions, DVN files may also be subject to &#8220;Terms of Use&#8221;
+agreements. When access to such files is attempted through the Web
+Download or Subsetting interfaces, the user is presented with an
+agreement form. The API however is intended for automated clients, so
+the remote party&#8217;s compliance with the Terms of Use must be established
+beforehand.&nbsp;<strong>We advise you to have a written agreement with authorized
+parties before allowing them to access data sets, bypassing the Terms of
+Use. The authorized party should be responsible for enforcing the Terms
+of Use to their end users.</strong>Once such an agreement has been
+established, you can grant the specified user unrestricted access to
+Terms-protected materials on the Network home page &gt; Options page &gt;
+PERMISSIONS tab &gt; Permissions subtab, in the &#8220;Authorize Users to bypass
+Terms of Use&#8221; section.</p>
+<p>Please consult the Data Sharing section of the Guide for additional
+information on the <a class="reference internal" href="dataverse-api-main.html#data-sharing-api"><em>Data Sharing API</em></a>.</p>
+</div>
+<div class="section" id="create-account">
+<h4>Create Account<a class="headerlink" href="#create-account" title="Permalink to this headline">¶</a></h4>
+<p>There are several ways to create accounts: at the network level by the
+network administrator, at the dataverse level by the dataverse
+administrator, and by the new user themselves if the option to create an
+account when creating a dataverse is enabled.</p>
+<p>Accounts created by all methods are equivalent with the exception of
+granting dataverse creator status during the create a dataverse
+workflow. That status can be granted afterwards by the network
+administrator if necessary.</p>
+<p>To create an account at the <strong>network admin level</strong>, navigate to the Create
+Account page from the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Network</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Permissions</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Users</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">Create</span> <span class="pre">User</span> <span class="pre">link</span> <span class="pre">&gt;</span> <span class="pre">Create</span> <span class="pre">Account</span> <span class="pre">page</span></tt></p>
+<p>Complete the required information denoted by the red asterisk and save.
+Note: an email address can also be used as a username.</p>
+</div>
+<div class="section" id="manage-users">
+<h4>Manage Users<a class="headerlink" href="#manage-users" title="Permalink to this headline">¶</a></h4>
+<p>The Manage Users table gives the network administrator a list of all
+user accounts in table form. It lists username, full name, roles
+including at which dataverse the role is granted, and the current status
+whether active or deactivated.</p>
+<p>Usernames are listed alphabetically and clicking on a username takes you
+to the account page that contains detailed information on that account.
+It also provides the ability to update personal details and change
+passwords.</p>
+<p>The Manage Users table also provides the ability to deactivate a user
+account.</p>
+<p>To view the Manage Users table navigate to the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Network</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Permissions</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Users</span> <span class="pre">subtab</span> <span class="pre">&gt;</span> <span class="pre">Manage</span> <span class="pre">Users</span> <span class="pre">table</span></tt></p>
+</div>
+<div class="section" id="manage-groups">
+<h4>Manage Groups<a class="headerlink" href="#manage-groups" title="Permalink to this headline">¶</a></h4>
+<p>Groups in the Dataverse Network are a way to identify collections of
+users so permissions can be applied collectively rather than
+individually. This allows controlling permissions for individuals by
+altering membership in the group without affecting permissions of other
+members. Groups can be defined by user names or IP addresses.</p>
+<p>The Manage Groups table lists information about existing groups in table
+form including name, display or friendly name, and group membership.</p>
+<p>Clicking on the name takes you to the Edit Group page where the group&#8217;s
+configuration can be changed. It is also possible to create and delete
+groups from the Manage Groups table.</p>
+<p>To view the Manage Groups table, navigate to the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Network</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Permissions</span> <span class="pre">tab</span> <span class="pre">&gt;</span> <span class="pre">Groups</span> <span class="pre">subtab</span> <span class="pre">&gt;</span>
+<span class="pre">Manage</span> <span class="pre">Groups</span> <span class="pre">table</span></tt></p>
+<p>Once on the Groups subtab, viewing the Manage Groups table, you can
+create or delete a group.</p>
+<p>When creating a group you must choose whether to identify users by
+username or by IP address with a Username Group or IP User Group.</p>
+<p>With a Username Group, enter an existing username into the edit box,
+click the &#8220;+&#8221; symbol to enter additional users, then save.</p>
+<p>With an IP User Group, enter an IP address or domain name into the edit
+box. Wildcards can be used by specifying an asterisk (*) in place of an
+IP address octet (eg. 10.20.30.*), or for the sub-domain or host
+portion of the domain name (eg. *.mydomain.edu).</p>
+<p>Last, an optional special feature of the IP User Group is to allow for
+an Affiliate Login Service. Effectively this allows for the use of a
+proxy to access the Dataverse Network on behalf of a group such as a
+University Library where identification and authorization of users is
+managed by their proxy service. To enable this feature, enter IP
+addresses of any proxy servers that will access Dataverse Network, check
+This IP group has an affiliate login service, enter the Affiliate Name
+as it will appear on the&nbsp;Dataverse Network Login page, and the Affiliate
+URL which would go to the proxy server. Save and you are finished.</p>
+</div>
+</div>
+<div class="section" id="utilities">
+<h3>Utilities<a class="headerlink" href="#utilities" title="Permalink to this headline">¶</a></h3>
+<p>The Dataverse Network provides the network administrator with tools to
+manually execute background processes, perform functions in batch, and
+resolve occasional operational issues.</p>
+<p>Navigate to the Utilities from the Options page:</p>
+<p><tt class="docutils literal"><span class="pre">Network</span> <span class="pre">home</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Options</span> <span class="pre">page</span> <span class="pre">&gt;</span> <span class="pre">Utilities</span> <span class="pre">tab</span></tt></p>
+<p>Available tools include:</p>
+<ul class="simple">
+<li><strong>Study Utilities</strong> - Create draft versions of studies, release file locks and delete multiple studies by inputting ID&#8217;s.</li>
+<li><strong>Index Utilities</strong> - Create a search index.</li>
+<li><strong>Export Utilities</strong> - Select files and export them.</li>
+<li><strong>Harvest Utilities</strong> - Harvest selected studies from another Network.</li>
+<li><strong>File Utilities</strong> - Select files and apply the JHOVE file validation process to them.</li>
+<li><strong>Import Utilities</strong> - Import multiple study files by using this custom batch process.</li>
+<li><strong>Handle Utilities</strong> - Register and re-register study handles.</li>
+</ul>
+<p><strong>Study Utilities</strong></p>
+<p>Curating a large group of studies sometimes requires direct database
+changes affecting a large number of studies that may belong to different
+dataverses. An example might be changing the distributor name and logo
+or the parent dataverse. Since the Dataverse Network employs study
+versioning, it was decided that any such backend changes should
+increment the affected studies&#8217; version. However, incrementing a study&#8217;s
+version is nontrivial as a database update. So, this utility to create a
+draft of an existing study was created.</p>
+<p>The practice would involve generating a list of study database ID&#8217;s that
+need changing, use the utility to create drafts of those studies, then
+run the database update scripts. The result is new, unreleased draft
+versions of studies with modifications made directly through the
+database. These studies would then need to be reviewed and released
+manually.</p>
+<p>Due to the transactional nature of study updates, particularly when
+uploading large files, it is possible a study update is interrupted such
+as during a system restart. When this occurs, the study lock, created to
+prevent simultaneous updates while one is already in progress, remains
+and the study cannot be edited until it is cleared.</p>
+<p>Checking for this condition and clearing it is easy. Open this utility,
+check if any locks are listed and remove them. The user should once
+again be able to edit their study.</p>
+<p>The user interface provides a convenient way to delete individual
+studies but when faced with deleting a large number of studies that do
+not conveniently belong to a single dataverse, use the Delete utility.</p>
+<p>Specify studies by their database id single, as a comma-separated list
+(1,7,200, etc.), or as a hyphen-separated range (1-1000, 2005,
+2500-2700).</p>
+<p><strong>Index Utilities</strong></p>
+<p>Indexing is the process of making study metadata searchable. The Lucence
+search engine used by the Dataverse Network uses file-based indexes.
+Normally, any time a study or new study version is released the study
+information is automatically indexed. Harvesting also indexes studies in
+small batches as they are harvested. Sometimes this does not occur, such
+as when the harvest process is interrupted. The index could also become
+corrupt for some reason though this would be extremely rare.</p>
+<p>The index utility allows for reindexing of studies, dataverses, and the
+entire site. Studies and dataverses can be specified by their database
+id&#8217;s alone, in a comma separated list, or in a hyphenated range: 1-1000.
+Use index all sparingly, particularly if you have a large site. This is
+a single transaction and should not be interrupted or you will need to
+start again. A more flexible approach is to determine the lowest and
+highest study ID&#8217;s and index in smaller ranges: 1-1000, 1001-2000, etc.</p>
+<p>Note: if for some reason a study change was not indexed, there is an
+automatic background process that will detect this, inform the
+administrator and will be reindexed once every 24 hours so manually
+reindexing is not required.</p>
+<p><strong>Export Utilities</strong></p>
+<p>Export is a background process that normally runs once every 24 hours.
+Its purpose is to produce study metadata files in well known formats
+such as DDI, DC, MIF, and FGDC that can be used to import studies to
+other systems such as through harvesting.</p>
+<p>Sometimes it&#8217;s useful to manually export a study, dataverse, any updated
+studies, or all studies. Studies and dataverses are specified by
+database id rather than global id or handle.</p>
+<p>Export is tied to OAI set creation and Harvesting. To enable harvesting
+of a subset of studies by another site, first an OAI set is created that
+defines the group of studies. Next, the scheduled export runs and
+creates the export files if they&#8217;re not already available. It also
+associates those studies defined by the set with the set name so future
+requests for the set receive updates&nbsp;— additions or deletions from the
+set. This way remote sites harvesting the set maintain an updated study
+list.</p>
+<p>If you do not want to wait 24 hours to test harvest a newly created set,
+use the export utility. Click &#8220;Run Export&#8221; to export any changed studies
+and associate studies to the set. Exporting studies or dataverses alone
+will not associate studies to a set, in those cases Update Harvest
+Studies must also be run.</p>
+<p><strong>Harvest Utilities</strong></p>
+<p>The Harvest utility allows for on-demand harvesting of a single study.
+First select one of the predefined harvesting dataverses which provide
+remote server connection information as well as the local dataverse
+where the study will be harvested to. Specify the harvest ID of the
+study to be harvested. The harvest id is particular to the study and
+server being harvested from. It can be obtained from the OAI protocol
+ListIdentifiers command, from the harvest log if previously harvested,
+or if from another DVN it takes the form: &lt;OAI set alias&gt;//&lt;global id&gt;.
+A&nbsp;Dataverse Network study with <tt class="docutils literal"><span class="pre">globalID:</span> <span class="pre">hdl:1902.1/10004</span></tt>, from the OAI
+set &#8220;My Set&#8221;, having alias &#8220;myset&#8221;, would have a harvest identifier of:
+<tt class="docutils literal"><span class="pre">myset//hdl:1902.1/10004</span></tt></p>
+<p><strong>File Utilities</strong></p>
+<p>The Dataverse Network attempts to identify file types on upload to
+provide more information to an end user. It does this by calling a file
+type identification library called JHOVE. Though JHOVE is a very
+comprehensive library, sometimes a file type may not be recognized or is
+similar to another type and misidentified. For these cases we provide an
+override mechanism&nbsp;— a list of file extensions and a brief text
+description. Since these are created after the files have been uploaded,
+this file utility provides a way to re-identify the file types and
+furthermore limits this process to specific file types or to studies,
+specified by database ID singly, as a comma separated, or as a
+hype-separated range.</p>
+<p><strong>Import Utilities</strong></p>
+<p>Importing studies usually is done by harvesting study metadata from a
+remote site via the OAI protocol. This causes study metadata to be
+hosted locally but files are served by the remote server. The Import
+utility is provided for cases where an OAI server is unavailable or
+where the intent is to relocate studies and their files to the Dataverse
+Network.</p>
+<p>At present this requires the help of the network administrator and can
+be manually intensive. First, study metadata may need to be modified
+slightly then saved in a specific directory structure on the server file
+system. Next, the study metadata import format and destination dataverse
+is chosen. Last, the top level directory where the study metadata and
+files are stored and &#8220;Batch Import&#8221; is clicked. Because the DDI input
+format can be quite complex and usage varies, verify the results are
+what&#8217;s intended.</p>
+<p>A single study import function is also provided as a test for importing
+your study&#8217;s metadata syntax but is not meant for actual import. It will
+not import associated files.</p>
+<p>Before performing a batch import, you must organize your files in the
+following manner:</p>
+<ol class="arabic simple">
+<li>If you plan to import multiple files or studies, create a master
+directory to hold all content that you choose to import.</li>
+<li>Create a separate subdirectory for each study that you choose to
+import.
+The directory name is not important.</li>
+<li>In each directory, place a file called <tt class="docutils literal"><span class="pre">study.xml</span></tt> and use that
+file to hold the XML-formatted record for one study.
+Note: Do not include file description elements in
+the <tt class="docutils literal"><span class="pre">study.xml</span></tt> file. Including those fields results in the
+addition of multiple blank files to that study.</li>
+<li>Also place in the directory any additional files that you choose to
+upload for that study.</li>
+</ol>
+<p>For an example of a simple study DDI, refer to the <a class="reference internal" href="#metadata-references"><em>Metadata References</em></a>
+section.</p>
+<p><strong>Handle Utilities</strong></p>
+<p>When a study is created, the global ID is first assigned, then
+registered with handle.net as a persistent identifier. This identifier
+becomes part of the study&#8217;s citation and is guaranteed to always resolve
+to the study. For the study with global ID, <a class="reference external" href="hdl:1902.1/16598">hdl:1902.1/16598</a> or handle
+1902.1/16596, the URL in the citation would be:
+<a class="reference external" href="http://hdl.handle.net/1902.1/16598">http://hdl.handle.net/1902.1/16598</a>.</p>
+<p>If for any reason a study is created and not registered or is registered
+in a way that needs to be changed, use the Handle utility to either
+register currently unregistered studies or to re-register all registered
+studies.</p>
+</div>
+<div class="section" id="web-statistics">
+<h3>Web Statistics<a class="headerlink" href="#web-statistics" title="Permalink to this headline">¶</a></h3>
+<p>The Dataverse Network provides the capability to compile and analyze
+site usage through Google Analytics. A small amount of code is embedded
+in each page so when enabled, any page access along with associated
+browser and user information is recorded by Google. Later analysis of
+this compiled access data can be performed using the <a class="reference external" href="http://www.google.com/analytics/">Google Analytics</a> utility.</p>
+<p>Note: Access to Google Analytics is optional. If access to this utility
+is not configured for your network, in place of the Manage Web Usage
+menu option is a message
+stating: <tt class="docutils literal"><span class="pre">Google</span> <span class="pre">Analytics</span> <span class="pre">are</span> <span class="pre">not</span> <span class="pre">configured</span> <span class="pre">for</span> <span class="pre">this</span> <span class="pre">Network.</span></tt></p>
+<p><strong>To enable Google Analytics:</strong></p>
+<ol class="arabic simple">
+<li>Create a Gmail account.</li>
+<li>Go to <a class="reference external" href="http://www.google.com/analytics/">Google Analytics</a> and create a profile for the server or website domain. You will
+be assigned a Web Property ID.</li>
+<li>Using the Glassfish Admin console, add a JVM option and assign it the value of the newly assigned Web Property ID:
+<tt class="docutils literal"><span class="pre">Ddvn.googleanalytics.key=</span></tt></li>
+<li>Restart Glassfish.</li>
+<li>It takes about 24 hours after installation and set up of this option for tracking data to become available for use.</li>
+</ol>
+<p>Note: Google provides the code necessary for tracking. This has already
+been embedded into the Dataverse Network but not the Web Property ID.
+That is configured as a JVM option by the network admin when enabling
+this feature.</p>
+<p><strong>To view Web Statistics, navigate to:</strong></p>
+<ul class="simple">
+<li>Network home page &gt; Options page &gt; Settings tab &gt; General subtab &gt; Web Statistics</li>
+<li>You will be redirected to <a class="reference external" href="http://www.google.com/analytics/">Google Analytics</a>. Log in using your Gmail account used to
+create the profile.</li>
+</ul>
+</div>
+</div>
+<div class="section" id="appendix">
+<h2>Appendix<a class="headerlink" href="#appendix" title="Permalink to this headline">¶</a></h2>
+<p>Additional documentation complementary to Users Guides.</p>
+<div class="section" id="control-card-based-data-ingest">
+<h3>Control Card-Based Data Ingest<a class="headerlink" href="#control-card-based-data-ingest" title="Permalink to this headline">¶</a></h3>
+<p>As of version 2.2 the DVN supports ingesting plain text data files, in
+addition to SPSS and STATA formats. This allows users and institutions
+to ingest raw data into Dataverse Networks without having to purchase
+and maintain proprietary, commercial software packages.</p>
+<p>Tab-delimited and CSV files are supported. In order to ingest a plain
+data file, an additional file containing the variable metadata needs to
+be supplied.</p>
+<p><strong>Two Metadata Types Are Supported</strong></p>
+<ol class="arabic simple">
+<li>A simplified format based on the classic SPSS control card syntax;
+this appears as &#8220;CSV/SPSS&#8221; in the menu on the Add Files page.</li>
+<li>DDI, an xml format from the Data Documentation Inititative
+consortium. Choose &#8220;TAB/DDI&#8221; to ingest a tab file with a DDI metadata sheet.</li>
+</ol>
+<p>The specifics of the formats are documented in the 2 sections below.</p>
+<div class="section" id="csv-data-spss-style-control-card">
+<span id="controlcard-datafile-ingest"></span><h4>CSV Data, SPSS-style Control Card<a class="headerlink" href="#csv-data-spss-style-control-card" title="Permalink to this headline">¶</a></h4>
+<p>Unlike other supported “subsettable” formats, this ingest mechanism
+requires 2 files: the CSV raw data file proper and an SPSS Setup file
+(&#8220;control card&#8221;) with the data set metadata. In the future, support for
+other data definition formats may be added (STATA, SAS, etc.). As
+always, user feedback is welcome.</p>
+<p><strong>The supported SPSS command syntax:</strong></p>
+<p>Please note that it is not our goal to attempt to support any set of
+arbitrary SPSS commands and/or syntax variations. The goal is to enable
+users who do not own proprietary statistical software to prepare their
+raw data for DVN ingest, using a select subset of SPSS data definitional
+syntax.</p>
+<p>(In addition to its simplicity and popularity, we chose to use the SPSS
+command syntax because Dataverse Network already has support for the SPSS <tt class="docutils literal"><span class="pre">.SAV</span></tt> and <tt class="docutils literal"><span class="pre">.POR</span></tt> formats, so we have a good working knowledge of the SPSS formatting
+conventions.)</p>
+<p>The following SPSS commands are supported:</p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">DATA</span> <span class="pre">LIST&nbsp;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">VARIABLE</span> <span class="pre">LABELS&nbsp;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">NUMBER</span> <span class="pre">OF</span> <span class="pre">CASES</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">VALUE</span> <span class="pre">LABELS</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">FORMATS</span></tt> (actually, not supported as of now &#8211; see below)</div>
+<div class="line"><tt class="docutils literal"><span class="pre">MISSING</span> <span class="pre">VALUES</span></tt></div>
+</div>
+<p>We support mixed cases and all the abbreviations of the above commands
+that are valid under SPSS. For example, both &#8220;var labels&#8221; and &#8220;Var Lab&#8221;
+are acceptable commands.</p>
+<p>Individual command syntax.</p>
+<p><strong>1. DATA LIST</strong></p>
+<p>An explicit delimiter definition is required. For example:</p>
+<p><tt class="docutils literal"><span class="pre">DATA</span> <span class="pre">LIST</span> <span class="pre">LIST(',')</span></tt></p>
+<p>specifies <tt class="docutils literal"><span class="pre">','</span></tt> as the delimiter. This line is followed by the <tt class="docutils literal"><span class="pre">'/'</span></tt>
+separator and variable definitions. Explicit type definitions are
+required. Each variable is defined by a name/value pair <tt class="docutils literal"><span class="pre">VARNAME</span></tt></p>
+<p><tt class="docutils literal"><span class="pre">(VARTYPE)</span></tt> where <tt class="docutils literal"><span class="pre">VARTYPE</span></tt> is a standard SPSS fortran-type
+definition.</p>
+<p><strong>Note</strong> that this is the only <strong>required</strong> section. The minimum
+amount of metadata required to ingest a raw data file is the delimiter
+character, the names of the variables and their data type. All of these
+are defined in the <tt class="docutils literal"><span class="pre">DATA</span> <span class="pre">LIST</span></tt> section. Here’s an example of a
+complete, valid control card:</p>
+<p><tt class="docutils literal"><span class="pre">DATA</span> <span class="pre">LIST</span> <span class="pre">LIST(’,’)</span></tt>
+<tt class="docutils literal"><span class="pre">CASEID</span> <span class="pre">(f)</span> <span class="pre">NAME</span> <span class="pre">(A)</span> <span class="pre">RATIO</span> <span class="pre">(f)</span></tt>
+<tt class="docutils literal"><span class="pre">.</span></tt></p>
+<p>It defines a comma-separated file with 3 variables named <tt class="docutils literal"><span class="pre">CASEID</span></tt>,
+<tt class="docutils literal"><span class="pre">NAME</span></tt> and <tt class="docutils literal"><span class="pre">RATIO</span></tt>, two of them of the types numeric and one character
+string.</p>
+<p>Examples of valid type definitions:</p>
+<div class="line-block">
+<div class="line"><strong>A8</strong> 8 byte character string;</div>
+<div class="line"><strong>A</strong> character string;</div>
+<div class="line"><strong>f10.2</strong> numeric value, 10 decimal digits, with 2 fractional digits;</div>
+<div class="line"><strong>f8</strong> defaults to F8.0</div>
+<div class="line"><strong>F</strong> defaults to F.0, i.e., numeric integer value</div>
+<div class="line"><strong>2</strong> defaults to F.2, i.e., numeric float value with 2 fractional digits.</div>
+</div>
+<p>The following SPSS date/time types are supported:</p>
+<p>type&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; format</p>
+<p><tt class="docutils literal"><span class="pre">DATE``&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;``yyyy-MM-dd</span></tt></p>
+<p><tt class="docutils literal"><span class="pre">DATETIME``&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;``yyyy-MM-dd</span> <span class="pre">HH:mm:ss</span></tt></p>
+<p>The variable definition pairs may be separated by any combination of
+white space characters and newlines.&nbsp;<strong>Wrapped-around lines must start
+with white spaces</strong>&nbsp;(i.e., newlines must be followed by spaces). The
+list must be terminated by a line containing a single dot.</p>
+<p>Please note, that the actual date values should be stored in the CSV
+file as strings, in the format above. As opposed to how SPSS stores the
+types of the same name (as integer numbers of seconds).</p>
+<p><strong>2. VARIABLE LABELS</strong></p>
+<p>Simple name/value pairs, separated by any combination of white space
+characters and newlines (as described in section 1 above). The list is
+terminated by a single dot.</p>
+<p>For example:</p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">VARIABLE</span> <span class="pre">LABELS</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">CELLS</span> <span class="pre">&quot;Subgroups</span> <span class="pre">for</span> <span class="pre">sample-see</span> <span class="pre">documentation&quot;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">STRATA</span> <span class="pre">&quot;Cell</span> <span class="pre">aggregates</span> <span class="pre">for</span> <span class="pre">sample”</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">.</span></tt></div>
+</div>
+<p><strong>3. NUMBER OF CASES (optional)</strong></p>
+<p>The number of cases may be explicitly specified. For example:</p>
+<p><tt class="docutils literal"><span class="pre">num</span> <span class="pre">of</span> <span class="pre">cases</span> <span class="pre">1000</span></tt></p>
+<p>When the number of cases is specified, it will be checked against the
+number of observations actually found in the CSV file, and a mismatch
+would result in an ingest error.</p>
+<p><strong>4. VALUE LABELS</strong></p>
+<p>Each value label section is a variable name followed by a list of
+value/label pairs, terminated by a single &#8220;/&#8221; character. The list of
+value label sections is terminated by a single dot.</p>
+<p>For example,</p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">VALUE</span> <span class="pre">labels</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">FOO</span> <span class="pre">0</span> <span class="pre">&quot;NADA&quot;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">1</span> <span class="pre">&quot;NOT</span> <span class="pre">MUCH&quot;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">99999999</span> <span class="pre">&quot;A</span> <span class="pre">LOT&quot;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">/</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">BAR</span> <span class="pre">97</span> <span class="pre">&quot;REFUSAL&quot;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">98</span> <span class="pre">&quot;DONT</span> <span class="pre">KNOW&quot;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">99</span> <span class="pre">&quot;MISSING&quot;</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">/</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">.</span></tt></div>
+</div>
+<p><strong>5. FORMATS</strong></p>
+<p>This command is actually redundant if you explicitly supply the variable
+formats in the&nbsp;<a href="#id21"><span class="problematic" id="id22">``</span></a>DATA LIST``&nbsp;section above.</p>
+<p><strong>NOTE:</strong> It appears that the only reason the``FORMATS`` command exists is
+that <tt class="docutils literal"><span class="pre">DATA</span> <span class="pre">LIST</span></tt> syntax does not support explicit fortran-style format
+definitions when fixed-field data is defined. So it is in fact redundant
+when we&#8217;re dealing with delimited files only.</p>
+<p>Please supply valid, fortran-style variable formats in the&nbsp;<a href="#id23"><span class="problematic" id="id24">``</span></a>DATA
+LIST``&nbsp;section, as described above.</p>
+<p><strong>6. MISSING VALUES</strong></p>
+<p>This is a space/newline-separate list of variable names followed by a
+comma-separated list of missing values definition, in parentheses. For
+example:</p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">INTVU4</span> <span class="pre">(97,</span> <span class="pre">98,</span> <span class="pre">99)</span></tt></div>
+<div class="line">The list is terminated with a single dot.</div>
+</div>
+<p>An example of a valid&nbsp;<a href="#id25"><span class="problematic" id="id26">``</span></a>MISSING VALUES``&nbsp;control card section:</p>
+<div class="line-block">
+<div class="line"><tt class="docutils literal"><span class="pre">MISSING</span> <span class="pre">VALUES</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">INTVU4</span> <span class="pre">(97,</span> <span class="pre">98,</span> <span class="pre">99)</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">INTVU4A</span> <span class="pre">('97',</span> <span class="pre">'98',</span> <span class="pre">'99')</span></tt></div>
+<div class="line"><tt class="docutils literal"><span class="pre">.</span></tt></div>
+</div>
+<div class="line-block">
+<div class="line"><strong>An example of a control card ready for ingest:</strong></div>
+</div>
+<div class="highlight-guess"><div class="highlight"><pre><span class="n">data</span> <span class="n">list</span> <span class="n">list</span><span class="p">(</span><span class="sc">&#39;,&#39;</span><span class="p">)</span> <span class="o">/</span>
+  <span class="n">CELLS</span> <span class="p">(</span><span class="mi">2</span><span class="p">)</span>  <span class="n">STRATA</span> <span class="p">(</span><span class="mi">2</span><span class="p">)</span>  <span class="n">WT2517</span> <span class="p">(</span><span class="mi">2</span><span class="p">)</span>
+  <span class="n">SCRNRID</span> <span class="p">(</span><span class="n">f</span><span class="p">)</span> <span class="n">CASEID</span> <span class="p">(</span><span class="n">f</span><span class="p">)</span>  <span class="n">INTVU1</span> <span class="p">(</span><span class="n">f</span><span class="p">)</span>
+  <span class="n">INTVU2</span> <span class="p">(</span><span class="n">f</span><span class="p">)</span>  <span class="n">INTVU3</span> <span class="p">(</span><span class="n">f</span><span class="p">)</span>  <span class="n">INTVU4</span> <span class="p">(</span><span class="n">f</span><span class="p">)</span>
+  <span class="n">INTVU4A</span> <span class="p">(</span><span class="n">A</span><span class="p">)</span>
+  <span class="p">.</span>
+<span class="n">VARIABLE</span> <span class="n">LABELS</span>
+  <span class="n">CELLS</span> <span class="s">&quot;Subgroups for sample-see documentation&quot;</span>
+  <span class="n">STRATA</span> <span class="s">&quot;Cell aggregates for sample-see documenta&quot;</span>
+  <span class="n">WT2517</span> <span class="s">&quot;weight for rep. sample-see documentation&quot;</span>
+  <span class="n">SCRNRID</span> <span class="s">&quot;SCREENER-ID&quot;</span>
+  <span class="n">CASEID</span> <span class="s">&quot;RESPONDENT&#39;S CASE ID NUMBER&quot;</span>
+  <span class="n">INTVU1</span> <span class="s">&quot;MONTH RESPONDENT BEGAN INTERVIEW&quot;</span>
+  <span class="n">INTVU2</span> <span class="s">&quot;DAY RESPONDENT BEGAN INTERVIEW&quot;</span>
+  <span class="n">INTVU3</span> <span class="s">&quot;HOUR RESPONDENT BEGAN INTERVIEW&quot;</span>
+  <span class="n">INTVU4</span> <span class="s">&quot;MINUTE RESPONDENT BEGAN INTERVIEW&quot;</span>
+  <span class="n">INTVU4A</span> <span class="s">&quot;RESPONDENT INTERVIEW BEGAN AM OR PM&quot;</span>
+  <span class="p">.</span>
+<span class="n">VALUE</span> <span class="n">labels</span>
+  <span class="n">CASEID</span>   <span class="mi">99999997</span> <span class="s">&quot;REFUSAL&quot;</span>
+                                  <span class="mi">99999998</span> <span class="s">&quot;DONT KNOW&quot;</span>
+                                  <span class="mi">99999999</span> <span class="s">&quot;MISSING&quot;</span>
+                                  <span class="o">/</span>
+  <span class="n">INTVU1</span>   <span class="mi">97</span> <span class="s">&quot;REFUSAL&quot;</span>
+                                  <span class="mi">98</span> <span class="s">&quot;DONT KNOW&quot;</span>
+                                  <span class="mi">99</span> <span class="s">&quot;MISSING&quot;</span>
+                                  <span class="o">/</span>
+  <span class="n">INTVU2</span>   <span class="mi">97</span> <span class="s">&quot;REFUSAL&quot;</span>
+                                  <span class="mi">98</span> <span class="s">&quot;DONT KNOW&quot;</span>
+                                  <span class="mi">99</span> <span class="s">&quot;MISSING&quot;</span>
+                                  <span class="o">/</span>
+  <span class="n">INTVU3</span>   <span class="mi">97</span> <span class="s">&quot;REFUSAL&quot;</span>
+                                  <span class="mi">98</span> <span class="s">&quot;DONT KNOW&quot;</span>
+                                  <span class="mi">99</span> <span class="s">&quot;MISSING&quot;</span>
+                                  <span class="o">/</span>
+  <span class="n">INTVU4</span>   <span class="mi">97</span> <span class="s">&quot;REFUSAL&quot;</span>
+                                  <span class="mi">98</span> <span class="s">&quot;DONT KNOW&quot;</span>
+                                  <span class="mi">99</span> <span class="s">&quot;MISSING&quot;</span>
+                                  <span class="o">/</span>
+  <span class="n">INTVU4A</span> <span class="s">&quot;97&quot;</span> <span class="s">&quot;REFUSAL&quot;</span>
+                                  <span class="s">&quot;98&quot;</span> <span class="s">&quot;DONT KNOW&quot;</span>
+                                  <span class="s">&quot;99&quot;</span> <span class="s">&quot;MISSING&quot;</span>
+                                  <span class="s">&quot;AM&quot;</span> <span class="s">&quot;MORNING&quot;</span>
+                                  <span class="s">&quot;PM&quot;</span> <span class="s">&quot;EVENING&quot;</span>
+  <span class="p">.</span>
+<span class="n">MISSING</span> <span class="n">VALUES</span>
+  <span class="n">CASEID</span> <span class="p">(</span><span class="mi">99999997</span><span class="p">,</span> <span class="mi">99999998</span><span class="p">,</span> <span class="mi">99999999</span><span class="p">)</span>
+  <span class="n">INTVU1</span> <span class="p">(</span><span class="mi">97</span><span class="p">,</span> <span class="mi">98</span><span class="p">,</span> <span class="mi">99</span><span class="p">)</span>
+  <span class="n">INTVU2</span> <span class="p">(</span><span class="mi">97</span><span class="p">,</span> <span class="mi">98</span><span class="p">,</span> <span class="mi">99</span><span class="p">)</span>
+  <span class="n">INTVU3</span> <span class="p">(</span><span class="mi">97</span><span class="p">,</span> <span class="mi">98</span><span class="p">,</span> <span class="mi">99</span><span class="p">)</span>
+  <span class="n">INTVU4</span> <span class="p">(</span><span class="mi">97</span><span class="p">,</span> <span class="mi">98</span><span class="p">,</span> <span class="mi">99</span><span class="p">)</span>
+  <span class="n">INTVU4A</span> <span class="p">(</span><span class="err">&#39;</span><span class="mi">97</span><span class="err">&#39;</span><span class="p">,</span> <span class="err">&#39;</span><span class="mi">98</span><span class="err">&#39;</span><span class="p">,</span> <span class="err">&#39;</span><span class="mi">99</span><span class="err">&#39;</span><span class="p">)</span>
+  <span class="p">.</span>
+<span class="n">NUMBER</span> <span class="n">of</span> <span class="n">CASES</span> <span class="mi">2517</span>
+</pre></div>
+</div>
+<p><strong>DATA FILE.</strong></p>
+<p>Data must be stored in a text file, one observation per line. Both DOS
+and Unix new line characters are supported as line separators. On each
+line, individual values must be separated by the delimiter character
+defined in the&nbsp;DATA LISTsection. There may only be exactly&nbsp;(<tt class="docutils literal"><span class="pre">NUMBER</span> <span class="pre">OF</span>
+<span class="pre">VARIABLES</span> <span class="pre">-</span> <span class="pre">1</span></tt>)&nbsp;delimiter characters per line; i.e. character values must
+not contain the delimiter character.</p>
+<p><strong>QUESTIONS, TODOS:</strong></p>
+<p>Is there any reason we may want to support <tt class="docutils literal"><span class="pre">RECODE</span></tt> command also?</p>
+<p>&#8212; comments, suggestions are welcome! &#8212;</p>
+</div>
+<div class="section" id="tab-data-with-ddi-metadata">
+<span id="ddixml-datafile-ingest"></span><h4>Tab Data, with DDI Metadata<a class="headerlink" href="#tab-data-with-ddi-metadata" title="Permalink to this headline">¶</a></h4>
+<p>As of version 2.2, another method of ingesting raw TAB-delimited data
+files has been added to the Dataverse Network. Similarly to the SPSS control
+card-based ingest (also added in this release), this ingest mechanism
+requires 2 files: the TAB raw data file itself and the data set metadata
+in the DDI/XML format.</p>
+<p><strong>Intended use case:</strong></p>
+<p>Similarly to the SPSS syntax-based ingest, the goal is to provide
+another method of ingesting raw quantitative data into the DVN, without
+having to first convert it into one of the proprietary, commercial
+formats, such as SPSS or STATA. Pleaes note, that in our design
+scenario, the DDI files supplying the ingest metadata will be somehow
+machine-generated; by some software tool, script, etc. In other words,
+this design method is targeted towards more of an institutional user,
+perhaps another data archive with large quantities of data and some
+institutional knowledge of its structure, and with some resources to
+invest into developing an automated tool to generate the metadata
+describing the datasets. With the final goal of ingesting all the data
+into a DVN by another automated, batch process. The DVN project is also
+considering developing a standalone tool of our own that would guide
+users through the process of gathering the information describing their
+data sets and producing properly formatted DDIs ready to be ingested.</p>
+<p>For now, if you are merely looking for a way to ingest a single
+“subsettable” data set, you should definitely be able to create a
+working DDI by hand to achieve this goal. However, we strongly recommend
+that you instead consider the CSV/SPSS control card method, which was
+designed with this use case in mind. If anything, it will take
+considerably fewer keystrokes to create an SPSS-syntax control card than
+a DDI encoding the same amount of information.</p>
+<p><strong>The supported DDI syntax:</strong></p>
+<p>You can consult the DDI project for complete information on the DDI
+metadata (<a class="reference external" href="http://icpsr.umich.edu/DDI">http://icpsr.umich.edu/DDI</a>).
+However, only a small subset of the published format syntax is used for
+ingesting individual data sets. Of the 7 main DDI sections, only 2,
+fileDscr and dataDscr are used. Inside these sections, only a select set
+of fields, those that have direct equivalents in the DVN data set
+structure, are supported.</p>
+<p>These fields are outlined below. All the fields are mandatory, unless
+specified otherwise. An XSD schema of the format subset is also
+provided, for automated validation of machine-generated XML.</p>
+<div class="highlight-guess"><div class="highlight"><pre><span class="cp">&lt;?xml version=&quot;1.0&quot; encoding=&quot;UTF-8&quot;?&gt;</span>
+<span class="nt">&lt;codeBook</span> <span class="na">xmlns=</span><span class="s">&quot;http://www.icpsr.umich.edu/DDI&quot;</span><span class="err">\</span><span class="nt">&gt;</span>
+<span class="nt">&lt;fileDscr&gt;</span>
+        <span class="nt">&lt;fileTxt</span> <span class="na">ID=</span><span class="s">&quot;file1&quot;</span><span class="nt">&gt;</span>
+                        <span class="nt">&lt;dimensns&gt;</span>
+                                        <span class="nt">&lt;caseQnty&gt;</span>NUMBER OF OBSERVATIONS<span class="nt">&lt;/caseQnty&gt;</span>
+                                        <span class="nt">&lt;varQnty&gt;</span>NUMBER OF VARIABLES<span class="nt">&lt;/varQnty&gt;</span>
+                        <span class="nt">&lt;/dimensns&gt;</span>
+        <span class="nt">&lt;/fileTxt&gt;</span>
+<span class="nt">&lt;/fileDscr&gt;</span>
+<span class="nt">&lt;dataDscr&gt;</span>
+        <span class="c">&lt;!-- var section for a discrete numeric variable: --&gt;</span>
+        <span class="nt">&lt;var</span> <span class="na">ID=</span><span class="s">&quot;v1.1&quot;</span> <span class="na">name=</span><span class="s">&quot;VARIABLE NAME&quot;</span> <span class="na">intrvl=</span><span class="s">&quot;discrete&quot;</span> <span class="nt">&gt;</span>
+                        <span class="nt">&lt;location</span> <span class="na">fileid=</span><span class="s">&quot;file1&quot;</span><span class="nt">/&gt;</span>
+                        <span class="nt">&lt;labl</span> <span class="na">level=</span><span class="s">&quot;variable&quot;</span><span class="nt">&gt;</span>VARIABLE LABEL<span class="nt">&lt;/labl&gt;</span>
+                        <span class="nt">&lt;catgry&gt;</span>
+                                        <span class="nt">&lt;catValu&gt;</span>CATEGORY VALUE<span class="nt">&lt;/catValu&gt;</span>
+                        <span class="nt">&lt;/catgry&gt;</span>
+                …
+                <span class="c">&lt;!-- 1 or more category sections are allowed for discrete variables --&gt;</span>
+                        <span class="nt">&lt;varFormat</span> <span class="na">type=</span><span class="s">&quot;numeric&quot;</span> <span class="nt">/&gt;</span>
+        <span class="nt">&lt;/var&gt;</span>
+   <span class="c">&lt;!-- var section for a continuous numeric variable: --&gt;</span>
+        <span class="nt">&lt;var</span> <span class="na">ID=</span><span class="s">&quot;v1.2&quot;</span> <span class="na">name=</span><span class="s">&quot;VARIABLE NAME&quot;</span> <span class="na">intrvl=</span><span class="s">&quot;contin&quot;</span> <span class="nt">&gt;</span>
+                        <span class="nt">&lt;location</span> <span class="na">fileid=</span><span class="s">&quot;file1&quot;</span><span class="nt">/&gt;</span>
+                        <span class="nt">&lt;labl</span> <span class="na">level=</span><span class="s">&quot;variable&quot;</span><span class="nt">&gt;</span>VARIABLE LABEL<span class="nt">&lt;/labl&gt;</span>
+                        <span class="nt">&lt;varFormat</span> <span class="na">type=</span><span class="s">&quot;numeric&quot;</span> <span class="nt">/&gt;</span>
+        <span class="nt">&lt;/var&gt;</span>
+   <span class="c">&lt;!-- var section for a character (string) variable: --&gt;</span>
+        <span class="nt">&lt;var</span> <span class="na">ID=</span><span class="s">&quot;v1.10&quot;</span> <span class="na">name=</span><span class="s">&quot;VARIABLE NAME&quot;</span> <span class="na">intrvl=</span><span class="s">&quot;discrete&quot;</span> <span class="nt">&gt;</span>
+                        <span class="nt">&lt;location</span> <span class="na">fileid=</span><span class="s">&quot;file1&quot;</span><span class="nt">/&gt;</span>
+                        <span class="nt">&lt;labl</span> <span class="na">level=</span><span class="s">&quot;variable&quot;</span><span class="nt">&gt;</span>VARIABLE LABEL<span class="nt">&lt;/labl&gt;</span>
+                        <span class="nt">&lt;varFormat</span> <span class="na">type=</span><span class="s">&quot;character&quot;</span> <span class="nt">/&gt;</span>
+        <span class="nt">&lt;/var&gt;</span>
+        <span class="c">&lt;!-- a discrete variable with missing values defined: --&gt;</span>
+<span class="nt">&lt;/dataDscr&gt;</span>
+<span class="nt">&lt;/codeBook&gt;</span>
+</pre></div>
+</div>
+<p>&#8212; comments, suggestions are welcome! &#8212;</p>
+</div>
+</div>
+<div class="section" id="spss-data-file-ingest">
+<span id="spss-datafile-ingest"></span><h3>SPSS Data File Ingest<a class="headerlink" href="#spss-data-file-ingest" title="Permalink to this headline">¶</a></h3>
+<div class="section" id="ingesting-spss-por-files-with-extended-labels">
+<h4>Ingesting SPSS (.por) files with extended labels<a class="headerlink" href="#ingesting-spss-por-files-with-extended-labels" title="Permalink to this headline">¶</a></h4>
+<p>This feature has been added to work around the limit on the length of
+variable labels in SPSS Portable (.por) files. To use this
+feature, select &#8220;SPSS/POR,(w/labels)&#8221; from the list of file types on
+the AddFiles page. You will be prompted to first upload a text file
+containing the extended, &#8220;long&#8221; versions of the labels, and then
+upload the .por file. The label text file should contain one
+TAB-separated variable name/variable label pair per line.</p>
+</div>
+</div>
+<div class="section" id="ingest-of-r-rdata-files">
+<span id="r-datafile-ingest"></span><h3>Ingest of R (.RData) files<a class="headerlink" href="#ingest-of-r-rdata-files" title="Permalink to this headline">¶</a></h3>
+<div class="section" id="overview">
+<h4>Overview.<a class="headerlink" href="#overview" title="Permalink to this headline">¶</a></h4>
+<p>Support for ingesting R data files has been added in version 3.5. R
+has been increasingly popular in the research/academic community,
+owing to the fact that it is free and open-source (unlike SPSS and
+STATA). Consequently, more and more data is becoming available
+exclusively in RData format. This long-awaited feature makes it
+possible to ingest such data into DVN as &#8220;subsettable&#8221; files.</p>
+</div>
+<div class="section" id="requirements">
+<h4>Requirements.<a class="headerlink" href="#requirements" title="Permalink to this headline">¶</a></h4>
+<p>R ingest relies on R having been installed, configured and made
+available to the DVN application via RServe (see the Installers
+Guide). This is in contrast to the SPSS and Stata ingest - which can
+be performed without R present. (though R is still needed to perform
+most subsetting/analysis tasks on the resulting data files).</p>
+<p>The data must be formatted as an R dataframe (using data.frame() in
+R). If an .RData file contains multiple dataframes, only the 1st one
+will be ingested.</p>
+</div>
+<div class="section" id="data-types-compared-to-other-supported-formats-stat-spss">
+<h4>Data Types, compared to other supported formats (Stat, SPSS)<a class="headerlink" href="#data-types-compared-to-other-supported-formats-stat-spss" title="Permalink to this headline">¶</a></h4>
+<div class="section" id="integers-doubles-character-strings">
+<h5>Integers, Doubles, Character strings<a class="headerlink" href="#integers-doubles-character-strings" title="Permalink to this headline">¶</a></h5>
+<p>The handling of these types is intuitive and straightforward. The
+resulting tab file columns, summary statistics and UNF signatures
+should be identical to those produced by ingesting the same vectors
+from SPSS and Stata.</p>
+<p><strong>A couple of features that are unique to R/new in DVN:</strong></p>
+<p>R explicitly supports Missing Values for all of the types above;
+Missing Values encoded in R vectors will be recognized and preserved
+in TAB files (as &#8216;NA&#8217;), counted in the generated summary statistics
+and data analysis.</p>
+<p>In addition to Missing Values, R recognizes &#8220;Not a Number&#8221; (NaN) and
+positive and negative infinity for floating point values. These
+are now properly supported by the DVN.</p>
+<p>Also note that, unlike Stata, where &#8220;float&#8221; and &#8220;double&#8221; are supported
+as distinct data types, all floating point values in R are double
+precision.</p>
+</div>
+<div class="section" id="r-factors">
+<h5>R Factors<a class="headerlink" href="#r-factors" title="Permalink to this headline">¶</a></h5>
+<p>These are ingested as &#8220;Categorical Values&#8221; in the DVN.</p>
+<p>One thing to keep in mind: in both Stata and SPSS, the actual value of
+a categorical variable can be both character and numeric. In R, all
+factor values are strings, even if they are string representations of
+numbers. So the values of the resulting categoricals in the DVN will
+always be of string type too.</p>
+<div class="line-block">
+<div class="line"><strong>New:</strong> To properly handle <em>ordered factors</em> in R, the DVN now supports the concept of an &#8220;Ordered Categorical&#8221; - a categorical value where an explicit order is assigned to the list of value labels.</div>
+</div>
+</div>
+<div class="section" id="new-boolean-values">
+<h5>(New!) Boolean values<a class="headerlink" href="#new-boolean-values" title="Permalink to this headline">¶</a></h5>
+<p>R Boolean (logical) values are supported.</p>
+</div>
+<div class="section" id="limitations-of-r-data-format-as-compared-to-spss-and-stata">
+<h5>Limitations of R data format, as compared to SPSS and STATA.<a class="headerlink" href="#limitations-of-r-data-format-as-compared-to-spss-and-stata" title="Permalink to this headline">¶</a></h5>
+<p>Most noticeably, R lacks a standard mechanism for defining descriptive
+labels for the data frame variables.  In the DVN, similarly to
+both Stata and SPSS, variables have distinct names and labels; with
+the latter reserved for longer, descriptive text.
+With variables ingested from R data frames the variable name will be
+used for both the &#8220;name&#8221; and the &#8220;label&#8221;.</p>
+<div class="line-block">
+<div class="line"><em>Optional R packages exist for providing descriptive variable labels;
+in one of the future versions support may be added for such a
+mechanism. It would of course work only for R files that were
+created with such optional packages</em>.</div>
+</div>
+<p>Similarly, R categorical values (factors) lack descriptive labels too.
+<strong>Note:</strong> This is potentially confusing, since R factors do
+actually have &#8220;labels&#8221;.  This is a matter of terminology - an R
+factor&#8217;s label is in fact the same thing as the &#8220;value&#8221; of a
+categorical variable in SPSS or Stata and DVN; it contains the actual
+meaningful data for the given observation. It is NOT a field reserved
+for explanatory, human-readable text, such as the case with the
+SPSS/Stata &#8220;label&#8221;.</p>
+<p>Ingesting an R factor with the level labels &#8220;MALE&#8221; and &#8220;FEMALE&#8221; will
+produce a categorical variable with &#8220;MALE&#8221; and &#8220;FEMALE&#8221; in the
+values and labels both.</p>
+</div>
+</div>
+<div class="section" id="time-values-in-r">
+<h4>Time values in R<a class="headerlink" href="#time-values-in-r" title="Permalink to this headline">¶</a></h4>
+<p>This warrants a dedicated section of its own, because of some unique
+ways in which time values are handled in R.</p>
+<p>R makes an effort to treat a time value as a real time instance. This
+is in contrast with either SPSS or Stata, where time value
+representations such as &#8220;Sep-23-2013 14:57:21&#8221; are allowed; note that
+in the absence of an explicitly defined time zone, this value cannot
+be mapped to an exact point in real time.  R handles times in the
+&#8220;Unix-style&#8221; way: the value is converted to the
+&#8220;seconds-since-the-Epoch&#8221; Greenwitch time (GMT or UTC) and the
+resulting numeric value is stored in the data file; time zone
+adjustments are made in real time as needed.</p>
+<p>Things get ambiguous and confusing when R <strong>displays</strong> this time
+value: unless the time zone was explicitly defined, R will adjust the
+value to the current time zone. The resulting behavior is often
+counter-intuitive: if you create a time value, for example:</p>
+<blockquote>
+<div>timevalue&lt;-as.POSIXct(&#8220;03/19/2013 12:57:00&#8221;, format = &#8220;%m/%d/%Y %H:%M:%OS&#8221;);</div></blockquote>
+<p>on a computer configured for the San Francisco time zone, the value
+will be differently displayed on computers in different time zones;
+for example, as &#8220;12:57 PST&#8221; while still on the West Coast, but as
+&#8220;15:57 EST&#8221; in Boston.</p>
+<p>If it is important that the values are always displayed the same way,
+regardless of the current time zones, it is recommended that the time
+zone is explicitly defined. For example:</p>
+<blockquote>
+<div>attr(timevalue,&#8221;tzone&#8221;)&lt;-&#8220;PST&#8221;</div></blockquote>
+<dl class="docutils">
+<dt>or</dt>
+<dd>timevalue&lt;-as.POSIXct(&#8220;03/19/2013 12:57:00&#8221;, format = &#8220;%m/%d/%Y %H:%M:%OS&#8221;, tz=&#8221;PST&#8221;);</dd>
+</dl>
+<p>Now the value will always be displayed as &#8220;12:57 PST&#8221;, regardless of
+the time zone that is current for the OS ... <strong>BUT ONLY</strong> if the OS
+where R is installed actually understands the time zone &#8220;PST&#8221;, which
+is not by any means guaranteed! Otherwise, it will <strong>quietly adjust</strong>
+the stored GMT value to <strong>the current time zone</strong>, yet still
+display it with the &#8220;PST&#8221; tag attached! One way to rephrase this is
+that R does a fairly decent job <strong>storing</strong> time values in a
+non-ambiguous, platform-independent manner - but gives no guarantee that
+the values will be displayed in any way that is predictable or intuitive.</p>
+<p>In practical terms, it is recommended to use the long/descriptive
+forms of time zones, as they are more likely to be properly recognized
+on most computers. For example, &#8220;Japan&#8221; instead of &#8220;JST&#8221;.  Another possible
+solution is to explicitly use GMT or UTC (since it is very likely to be
+properly recognized on any system), or the &#8220;UTC+&lt;OFFSET&gt;&#8221; notation. Still, none of the above
+<strong>guarantees</strong> proper, non-ambiguous handling of time values in R data
+sets. The fact that R <strong>quietly</strong> modifies time values when it doesn&#8217;t
+recognize the supplied timezone attribute, yet still appends it to the
+<strong>changed</strong> time value does make it quite difficult. (These issues are
+discussed in depth on R-related forums, and no attempt is made to
+summarize it all in any depth here; this is just to made you aware of
+this being a potentially complex issue!)</p>
+<p>An important thing to keep in mind, in connection with the DVN ingest
+of R files, is that it will <strong>reject</strong> an R data file with any time
+values that have time zones that we can&#8217;t recognize. This is done in
+order to avoid (some) of the potential issues outlined above.</p>
+<p>It is also recommended that any vectors containing time values
+ingested into the DVN are reviewed, and the resulting entries in the
+TAB files are compared against the original values in the R data
+frame, to make sure they have been ingested as expected.</p>
+<p>Another <strong>potential issue</strong> here is the <strong>UNF</strong>. The way the UNF
+algorithm works, the same date/time values with and without the
+timezone (e.g. &#8220;12:45&#8221; vs. &#8220;12:45 EST&#8221;) <strong>produce different
+UNFs</strong>. Considering that time values in Stata/SPSS do not have time
+zones, but ALL time values in R do (yes, they all do - if the timezone
+wasn&#8217;t defined explicitely, it implicitly becomes a time value in the
+&#8220;UTC&#8221; zone!), this means that it is <strong>impossible</strong> to have 2 time
+value vectors, in Stata/SPSS and R, that produce the same UNF.</p>
+<p><strong>A pro tip:</strong> if it is important to produce SPSS/Stata and R versions of
+the same data set that result in the same UNF when ingested, you may
+define the time variables as <strong>strings</strong> in the R data frame, and use
+the &#8220;YYYY-MM-DD HH:mm:ss&#8221; formatting notation. This is the formatting used by the UNF
+algorithm to normalize time values, so doing the above will result in
+the same UNF as the vector of the same time values in Stata.</p>
+<p>Note: date values (dates only, without time) should be handled the
+exact same way as those in SPSS and Stata, and should produce the same
+UNFs.</p>
+</div>
+</div>
+<div class="section" id="fits-file-format-ingest">
+<span id="fits-datafile-ingest"></span><h3>FITS File format Ingest<a class="headerlink" href="#fits-file-format-ingest" title="Permalink to this headline">¶</a></h3>
+<p>This custom ingest is an experiment in branching out into a discipline
+outside of the Social Sciences. It has been added in v.3.4 as part of the
+collaboration between the IQSS and the Harvard-Smithsonian Center for
+Astrophysics. FITS is a multi-part file format for storing
+Astronomical data (<a class="reference external" href="http://fits.gsfc.nasa.gov/fits_standard.html">http://fits.gsfc.nasa.gov/fits_standard.html</a>). DVN
+now offers an ingest plugin that parses FITS file headers for
+key-value metadata that are extracted and made searchable.</p>
+<p>FITS is now listed on the DVN AddFiles page as a recognized file
+format. The same asynchronous process is used as for &#8220;subsettable&#8221;
+files: the processing is done in the background, with an email
+notification sent once completed.</p>
+<p>Unlike with the &#8220;subsettable&#8221; file ingest, no format conversion takes
+place and the FITS file is ingested as is, similarly to &#8220;other
+materials&#8221; files. The process is limited to the extaction of the
+searchable metadata.  Once the file is ingested and the study is
+re-indexed, these file-level FITS metadata fields can be searched on
+from the Advanced Search page, on either the Dataverse or Network
+level. Choose one of the FITS file Information listed in the drop
+down, and enter the relevant search term. Search results that match
+the query will show individual files as well as studies.</p>
+<p>The ingest also generates a short summary of the file contents (number
+and type of Header-Data Units) and adds it to the file description.</p>
+</div>
+<div class="section" id="metadata-references">
+<span id="id27"></span><h3>Metadata References<a class="headerlink" href="#metadata-references" title="Permalink to this headline">¶</a></h3>
+<p>The Dataverse Network metadata is compliant with the <a class="reference external" href="http://www.icpsr.umich.edu/DDI/">DDI schema
+version 2</a>. The Cataloging
+Information fields associated with each study contain most of the fields
+in the study description section of the DDI. That way the Dataverse
+Network metadata can be mapped easily to a DDI, and be exported into XML
+format for preservation and interoperability.</p>
+<p>Dataverse Network data also is compliant with <a class="reference external" href="http://www.dublincore.org/">Simple Dublin
+Core</a>&nbsp;(DC) requirements. For imports
+only, Dataverse Network data is compliant with the <a class="reference external" href="http://www.fgdc.gov/metadata">Content Standard
+for Digital Geospatial Metadata (CSDGM), Vers. 2 (FGDC-STD-001-1998)</a>&nbsp;(FGDC).</p>
+<p>Attached is a PDF file that defines and maps all Dataverse Network
+Cataloging Information fields. Information provided in the file includes
+the following:</p>
+<ul class="simple">
+<li>Field label - For each Cataloging Information field, the field label
+appears first in the mapping matrix.</li>
+<li>Description - A description of each field follows the field label.</li>
+<li>Query term - If a field is available for use in building a query, the
+term to use for that field is listed.</li>
+<li>Dataverse Network database element name - The Dataverse Network
+database element name for the field is provided.</li>
+<li>Advanced search - If a field is available for use in an advanced
+search, that is indicated.</li>
+<li>DDI element mapping for imports - For harvested or imported studies,
+the imported DDI elements are mapped to Dataverse Network fields.</li>
+<li>DDI element mapping for exports - When a study or dataverse is
+harvested or exported in DDI format, the Dataverse Network fields are
+mapped to DDI elements.</li>
+<li>DC element mapping for imports - For harvested or imported studies,
+the imported DC elements are mapped to specific Dataverse Network
+fields.</li>
+<li>DC element mapping for exports - When a study or dataverse is
+harvested or exported in DC format, specific Dataverse Network fields
+are mapped to the DC elements.</li>
+<li>FGDC element mapping for imports - For harvested or imported studies,
+the imported FGDC elements are mapped to specific Dataverse Network fields.</li>
+</ul>
+<p>Also attached is an example of a DDI for a simple study containing
+title, author, description, keyword, and topic classification cataloging
+information fields suitable for use with batch import.</p>
+<p><img alt="image9" src="_images/application-pdf.png" />
+<a class="reference external" href="https://github.com/IQSS/dvn/blob/develop/doc/sphinx/source/datausers-guides_files/catalogingfields11apr08.pdf">catalogingfields11apr08.pdf</a></p>
+<p><img alt="image10" src="_images/application-octet-stream.png" />
+<a class="reference external" href="https://github.com/IQSS/dvn/blob/develop/doc/sphinx/source/datausers-guides_files/simple_study_1.xml">simple_study.xml</a></p>
+</div>
+<div class="section" id="zelig-interface">
+<h3>Zelig Interface<a class="headerlink" href="#zelig-interface" title="Permalink to this headline">¶</a></h3>
+<p>Zelig is statistical software for everyone: researchers, instructors,
+and students. It is a front-end and back-end for R (Zelig is written in
+R). The Zellig software:</p>
+<ul class="simple">
+<li>Unifies diverse theories of inference</li>
+<li>Unifies different statistical models and notation</li>
+<li>Unifies R packages in a common syntax</li>
+</ul>
+<p>Zelig is distributed under the GNU General Public License, Version 2.
+After installation, the source code is located in your R library
+directory. You can download a tarball of the latest Zelig source code
+from&nbsp;<a class="reference external" href="http://projects.iq.harvard.edu/zelig">http://projects.iq.harvard.edu/zelig</a>.</p>
+<p>The Dataverse Network software uses Zelig to perform advanced
+statistical analysis functions. The current interface schema used by the
+Dataverse Network for Zelig processes is in the following location:</p>
+<p><strong>Criteria for Model Availability</strong></p>
+<p>Three factors determine which Zelig models are available for analysis in
+the Dataverse Network:</p>
+<ul class="simple">
+<li>Some new models require data structures and modeling parameters that
+are not compatible with the current framework of the Dataverse Network
+and other web-driven applications. These types of models are not
+available in the Dataverse Network.</li>
+<li>Models must be explicitly listed in the Zelig packages to be used in
+the Dataverse Network, and all models must be disclosed fully, including
+runtime errors. Zelig models that do not meet these specifications are
+excluded from the Dataverse Network until they are disclosed with a
+complete set of information.</li>
+<li>An installation-based factor also can limit the Zelig models available
+in the Dataverse Network. A minimum version of the core software package
+GCC 4.0 must be installed on any Linux OS-based R machine used with the
+Dataverse Network, to install and run a key Zelig package, MCMCpack. If
+a Linux machine that is designated to R is used for DSB services and
+does not have the minimum version of the GCC package installed, the
+Dataverse Network looses at least eight models from the available
+advanced analysis models.</li>
+</ul>
+<p><img alt="image11" src="_images/application-octet-stream.png" />
+<a class="reference external" href="https://github.com/IQSS/dvn/blob/develop/doc/sphinx/source/datausers-guides_files/configzeliggui_0.xml">configzeliggui.xml</a></p>
+</div>
+</div>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+        </div>
+        <div class="sidebar">
+          <h3>Table Of Contents</h3>
+          <ul class="current">
+<li class="toctree-l1 current"><a class="current reference internal" href="">User Guide</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="#common-tasks">Common Tasks</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="#finding-data">Finding Data</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#using-data">Using Data</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#publishing-data">Publishing Data</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#things-to-consider-next-steps">Things to Consider, Next Steps</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#how-the-guides-are-organized">How the Guides Are Organized</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#other-resources">Other Resources</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#contact-us">Contact Us</a></li>
+</ul>
+</li>
+<li class="toctree-l2"><a class="reference internal" href="#finding-and-using-data">Finding and Using Data</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="#search">Search</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#view-studies-download-data">View Studies / Download Data</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#subset-and-analysis">Subset and Analysis</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="#tabular-data">Tabular Data</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#network-data">Network Data</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="#data-visualization">Data Visualization</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="#explore-data">Explore Data</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#set-up">Set Up</a></li>
+</ul>
+</li>
+</ul>
+</li>
+<li class="toctree-l2"><a class="reference internal" href="#dataverse-administration">Dataverse Administration</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="#create-a-dataverse">Create a Dataverse</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#edit-general-settings">Edit General Settings</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#edit-layout-branding">Edit Layout Branding</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#edit-description">Edit Description</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#edit-study-comments-settings">Edit Study Comments Settings</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#manage-e-mail-notifications">Manage E-Mail Notifications</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#add-fields-to-search-results">Add Fields to Search Results</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#set-default-study-listing-sort-order">Set Default Study Listing Sort Order</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#enable-twitter">Enable Twitter</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#get-code-for-dataverse-link-or-search-box">Get Code for Dataverse Link or Search Box</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#edit-terms-for-study-creation">Edit Terms for Study Creation</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#edit-terms-for-file-download">Edit Terms for File Download</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#manage-permissions">Manage Permissions</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#create-user-account">Create User Account</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#download-tracking-data">Download Tracking Data</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#edit-file-download-guestbook">Edit File Download Guestbook</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#openscholar">OpenScholar</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#enabling-lockss-access-to-the-dataverse">Enabling LOCKSS access to the Dataverse</a></li>
+</ul>
+</li>
+<li class="toctree-l2"><a class="reference internal" href="#study-and-data-administration">Study and Data Administration</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="#create-new-study">Create New Study</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#manage-studies">Manage Studies</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#manage-study-templates">Manage Study Templates</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#data-uploads">Data Uploads</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#manage-collections">Manage Collections</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#managing-user-file-access">Managing User File Access</a></li>
+</ul>
+</li>
+<li class="toctree-l2"><a class="reference internal" href="#network-administration">Network Administration</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="#dataverses-section">Dataverses Section</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="#create-a-new-dataverse">Create a New Dataverse</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#manage-dataverses">Manage Dataverses</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="#subnetwork-section">Subnetwork Section</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="#create-a-new-subnetwork">Create a New Subnetwork</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#manage-subnetworks">Manage Subnetworks</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#manage-classifications">Manage Classifications</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#manage-study-comments-notifications">Manage Study Comments Notifications</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#manage-controlled-vocabulary">Manage Controlled Vocabulary</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#manage-network-study-templates">Manage Network Study Templates</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="#harvesting-section">Harvesting Section</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="#create-a-new-harvesting-dataverse">Create a New Harvesting Dataverse</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#manage-harvesting">Manage Harvesting</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#schedule-study-exports">Schedule Study Exports</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#manage-oai-harvesting-sets">Manage OAI Harvesting Sets</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#edit-lockss-harvest-settings">Edit LOCKSS Harvest Settings</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="#settings-section">Settings Section</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="#edit-name">Edit Name</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#id10">Edit Layout Branding</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#id11">Edit Description</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#edit-dataverse-requirements">Edit Dataverse Requirements</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#id12">Manage E-Mail Notifications</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#id13">Enable Twitter</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="#terms-section">Terms Section</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="#edit-terms-for-account-creation">Edit Terms for Account Creation</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#id14">Edit Terms for Study Creation</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#id15">Edit Terms for File Download</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#id16">Download Tracking Data</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="#permissions-and-users-section">Permissions and Users Section</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="#manage-network-permissions">Manage Network Permissions</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#roles-by-version-state-table">Roles by Version State Table</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#authorization-to-access-terms-protected-files-via-the-api">Authorization to access Terms-protected files via the API</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#create-account">Create Account</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#manage-users">Manage Users</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#manage-groups">Manage Groups</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="#utilities">Utilities</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#web-statistics">Web Statistics</a></li>
+</ul>
+</li>
+<li class="toctree-l2"><a class="reference internal" href="#appendix">Appendix</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="#control-card-based-data-ingest">Control Card-Based Data Ingest</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="#csv-data-spss-style-control-card">CSV Data, SPSS-style Control Card</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#tab-data-with-ddi-metadata">Tab Data, with DDI Metadata</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="#spss-data-file-ingest">SPSS Data File Ingest</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="#ingesting-spss-por-files-with-extended-labels">Ingesting SPSS (.por) files with extended labels</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="#ingest-of-r-rdata-files">Ingest of R (.RData) files</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="#overview">Overview.</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#requirements">Requirements.</a></li>
+<li class="toctree-l4"><a class="reference internal" href="#data-types-compared-to-other-supported-formats-stat-spss">Data Types, compared to other supported formats (Stat, SPSS)</a><ul>
+<li class="toctree-l5"><a class="reference internal" href="#integers-doubles-character-strings">Integers, Doubles, Character strings</a></li>
+<li class="toctree-l5"><a class="reference internal" href="#r-factors">R Factors</a></li>
+<li class="toctree-l5"><a class="reference internal" href="#new-boolean-values">(New!) Boolean values</a></li>
+<li class="toctree-l5"><a class="reference internal" href="#limitations-of-r-data-format-as-compared-to-spss-and-stata">Limitations of R data format, as compared to SPSS and STATA.</a></li>
+</ul>
+</li>
+<li class="toctree-l4"><a class="reference internal" href="#time-values-in-r">Time values in R</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="#fits-file-format-ingest">FITS File format Ingest</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#metadata-references">Metadata References</a></li>
+<li class="toctree-l3"><a class="reference internal" href="#zelig-interface">Zelig Interface</a></li>
+</ul>
+</li>
+</ul>
+</li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-installer-main.html">Installers Guide</a></li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-developer-main.html">DVN Developers Guide</a></li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-api-main.html">APIs Guide</a></li>
+</ul>
+
+          <h3 style="margin-top: 1.5em;">Search</h3>
+          <form class="search" action="search.html" method="get">
+            <input type="text" name="q" />
+            <input type="submit" value="Go" />
+            <input type="hidden" name="check_keywords" value="yes" />
+            <input type="hidden" name="area" value="default" />
+          </form>
+          <p class="searchtip" style="font-size: 90%">
+            Enter search terms.
+          </p>
+        </div>
+        <div class="clearer"></div>
+      </div>
+    </div>
+
+    <div class="footer-wrapper">
+      <div class="footer">
+        <div class="left">
+          <a href="index.html" title="Dataverse Network Guides"
+             >previous</a> |
+          <a href="dataverse-installer-main.html" title="Installers Guide"
+             >next</a> |
+          <a href="genindex.html" title="General Index"
+             >index</a>
+            <br/>
+            <a href="_sources/dataverse-user-main.txt"
+               rel="nofollow">Show Source</a>
+        </div>
+
+        <div class="right">
+          
+    <div class="footer">
+        &copy; Copyright 1997-2013, President &amp; Fellows Harvard University.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2b1.
+    </div>
+        </div>
+        <div class="clearer"></div>
+      </div>
+    </div>
+
+  </body>
+</html>
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/genindex.html	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,256 @@
+
+
+
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+  "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+    
+    <title>Index &mdash; The Harvard Dataverse Network 3.6.1 documentation</title>
+    
+    <link rel="stylesheet" href="_static/agogo.css" type="text/css" />
+    <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+    
+    <script type="text/javascript">
+      var DOCUMENTATION_OPTIONS = {
+        URL_ROOT:    './',
+        VERSION:     '3.6.1',
+        COLLAPSE_INDEX: false,
+        FILE_SUFFIX: '.html',
+        HAS_SOURCE:  true
+      };
+    </script>
+    <script type="text/javascript" src="_static/jquery.js"></script>
+    <script type="text/javascript" src="_static/underscore.js"></script>
+    <script type="text/javascript" src="_static/doctools.js"></script>
+    <script type="text/javascript" src="http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
+    <link rel="top" title="The Harvard Dataverse Network 3.6.1 documentation" href="index.html" /> 
+  </head>
+  <body>
+    <div class="header-wrapper">
+      <div class="header">
+        <div class="headertitle"><a
+          href="index.html">The Harvard Dataverse Network 3.6.1 documentation</a></div>
+        <div class="rel">
+          <a href="#" title="General Index"
+             accesskey="I">index</a>
+        </div>
+       </div>
+    </div>
+
+    <div class="content-wrapper">
+      <div class="content">
+        <div class="document">
+            
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body">
+            
+
+<h1 id="index">Index</h1>
+
+<div class="genindex-jumpbox">
+ <a href="#_"><strong>_</strong></a>
+ | <a href="#B"><strong>B</strong></a>
+ | <a href="#E"><strong>E</strong></a>
+ | <a href="#G"><strong>G</strong></a>
+ | <a href="#M"><strong>M</strong></a>
+ | <a href="#N"><strong>N</strong></a>
+ | <a href="#P"><strong>P</strong></a>
+ | <a href="#S"><strong>S</strong></a>
+ 
+</div>
+<h2 id="_">_</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%" valign="top"><dl>
+      
+  <dt>
+    __main__
+  </dt>
+
+      <dd><dl>
+        
+  <dt><a href="index.html#index-0">module</a>
+  </dt>
+
+      </dl></dd>
+  </dl></td>
+</tr></table>
+
+<h2 id="B">B</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%" valign="top"><dl>
+      
+  <dt><a href="index.html#index-1">BNF</a>
+  </dt>
+
+  </dl></td>
+</tr></table>
+
+<h2 id="E">E</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%" valign="top"><dl>
+      
+  <dt>
+    execution
+  </dt>
+
+      <dd><dl>
+        
+  <dt><a href="index.html#index-0">context</a>
+  </dt>
+
+      </dl></dd>
+  </dl></td>
+</tr></table>
+
+<h2 id="G">G</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%" valign="top"><dl>
+      
+  <dt><a href="index.html#index-1">grammar</a>
+  </dt>
+
+  </dl></td>
+</tr></table>
+
+<h2 id="M">M</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%" valign="top"><dl>
+      
+  <dt>
+    module
+  </dt>
+
+      <dd><dl>
+        
+  <dt><a href="index.html#index-0">__main__</a>
+  </dt>
+
+        
+  <dt><a href="index.html#index-0">search path</a>
+  </dt>
+
+        
+  <dt><a href="index.html#index-0">sys</a>
+  </dt>
+
+      </dl></dd>
+  </dl></td>
+</tr></table>
+
+<h2 id="N">N</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%" valign="top"><dl>
+      
+  <dt><a href="index.html#index-1">notation</a>
+  </dt>
+
+  </dl></td>
+</tr></table>
+
+<h2 id="P">P</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%" valign="top"><dl>
+      
+  <dt>
+    path
+  </dt>
+
+      <dd><dl>
+        
+  <dt><a href="index.html#index-0">module search</a>
+  </dt>
+
+      </dl></dd>
+  </dl></td>
+</tr></table>
+
+<h2 id="S">S</h2>
+<table style="width: 100%" class="indextable genindextable"><tr>
+  <td style="width: 33%" valign="top"><dl>
+      
+  <dt>
+    search
+  </dt>
+
+      <dd><dl>
+        
+  <dt><a href="index.html#index-0">path, module</a>
+  </dt>
+
+      </dl></dd>
+      
+  <dt><a href="index.html#index-1">syntax</a>
+  </dt>
+
+  </dl></td>
+  <td style="width: 33%" valign="top"><dl>
+      
+  <dt>
+    sys
+  </dt>
+
+      <dd><dl>
+        
+  <dt><a href="index.html#index-0">module</a>
+  </dt>
+
+      </dl></dd>
+  </dl></td>
+</tr></table>
+
+
+
+          </div>
+        </div>
+      </div>
+        </div>
+        <div class="sidebar">
+          <h3>Table Of Contents</h3>
+          <ul>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-user-main.html">User Guide</a></li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-installer-main.html">Installers Guide</a></li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-developer-main.html">DVN Developers Guide</a></li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-api-main.html">APIs Guide</a></li>
+</ul>
+
+          <h3 style="margin-top: 1.5em;">Search</h3>
+          <form class="search" action="search.html" method="get">
+            <input type="text" name="q" />
+            <input type="submit" value="Go" />
+            <input type="hidden" name="check_keywords" value="yes" />
+            <input type="hidden" name="area" value="default" />
+          </form>
+          <p class="searchtip" style="font-size: 90%">
+            Enter search terms.
+          </p>
+        </div>
+        <div class="clearer"></div>
+      </div>
+    </div>
+
+    <div class="footer-wrapper">
+      <div class="footer">
+        <div class="left">
+          <a href="#" title="General Index"
+             >index</a>
+        </div>
+
+        <div class="right">
+          
+    <div class="footer">
+        &copy; Copyright 1997-2013, President &amp; Fellows Harvard University.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2b1.
+    </div>
+        </div>
+        <div class="clearer"></div>
+      </div>
+    </div>
+
+  </body>
+</html>
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/index.html	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,413 @@
+
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+  "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+    
+    <title>Dataverse Network Guides &mdash; The Harvard Dataverse Network 3.6.1 documentation</title>
+    
+    <link rel="stylesheet" href="_static/agogo.css" type="text/css" />
+    <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+    
+    <script type="text/javascript">
+      var DOCUMENTATION_OPTIONS = {
+        URL_ROOT:    './',
+        VERSION:     '3.6.1',
+        COLLAPSE_INDEX: false,
+        FILE_SUFFIX: '.html',
+        HAS_SOURCE:  true
+      };
+    </script>
+    <script type="text/javascript" src="_static/jquery.js"></script>
+    <script type="text/javascript" src="_static/underscore.js"></script>
+    <script type="text/javascript" src="_static/doctools.js"></script>
+    <script type="text/javascript" src="http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
+    <link rel="top" title="The Harvard Dataverse Network 3.6.1 documentation" href="#" />
+    <link rel="next" title="User Guide" href="dataverse-user-main.html" /> 
+  </head>
+  <body>
+    <div class="header-wrapper">
+      <div class="header">
+        <div class="headertitle"><a
+          href="#">The Harvard Dataverse Network 3.6.1 documentation</a></div>
+        <div class="rel">
+          <a href="dataverse-user-main.html" title="User Guide"
+             accesskey="N">next</a> |
+          <a href="genindex.html" title="General Index"
+             accesskey="I">index</a>
+        </div>
+       </div>
+    </div>
+
+    <div class="content-wrapper">
+      <div class="content">
+        <div class="document">
+            
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body">
+            
+  <div class="section" id="dataverse-network-guides">
+<h1>Dataverse Network Guides<a class="headerlink" href="#dataverse-network-guides" title="Permalink to this headline">¶</a></h1>
+<p>Contents:</p>
+<div class="toctree-wrapper compound">
+<ul>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-user-main.html">User Guide</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="dataverse-user-main.html#common-tasks">Common Tasks</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#finding-data">Finding Data</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#using-data">Using Data</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#publishing-data">Publishing Data</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#things-to-consider-next-steps">Things to Consider, Next Steps</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#how-the-guides-are-organized">How the Guides Are Organized</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#other-resources">Other Resources</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#contact-us">Contact Us</a></li>
+</ul>
+</li>
+<li class="toctree-l2"><a class="reference internal" href="dataverse-user-main.html#finding-and-using-data">Finding and Using Data</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#search">Search</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#view-studies-download-data">View Studies / Download Data</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#subset-and-analysis">Subset and Analysis</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#tabular-data">Tabular Data</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#network-data">Network Data</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#data-visualization">Data Visualization</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#explore-data">Explore Data</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#set-up">Set Up</a></li>
+</ul>
+</li>
+</ul>
+</li>
+<li class="toctree-l2"><a class="reference internal" href="dataverse-user-main.html#dataverse-administration">Dataverse Administration</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#create-a-dataverse">Create a Dataverse</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#edit-general-settings">Edit General Settings</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#edit-layout-branding">Edit Layout Branding</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#edit-description">Edit Description</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#edit-study-comments-settings">Edit Study Comments Settings</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#manage-e-mail-notifications">Manage E-Mail Notifications</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#add-fields-to-search-results">Add Fields to Search Results</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#set-default-study-listing-sort-order">Set Default Study Listing Sort Order</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#enable-twitter">Enable Twitter</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#get-code-for-dataverse-link-or-search-box">Get Code for Dataverse Link or Search Box</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#edit-terms-for-study-creation">Edit Terms for Study Creation</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#edit-terms-for-file-download">Edit Terms for File Download</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#manage-permissions">Manage Permissions</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#create-user-account">Create User Account</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#download-tracking-data">Download Tracking Data</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#edit-file-download-guestbook">Edit File Download Guestbook</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#openscholar">OpenScholar</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#enabling-lockss-access-to-the-dataverse">Enabling LOCKSS access to the Dataverse</a></li>
+</ul>
+</li>
+<li class="toctree-l2"><a class="reference internal" href="dataverse-user-main.html#study-and-data-administration">Study and Data Administration</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#create-new-study">Create New Study</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#manage-studies">Manage Studies</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#manage-study-templates">Manage Study Templates</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#data-uploads">Data Uploads</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#manage-collections">Manage Collections</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#managing-user-file-access">Managing User File Access</a></li>
+</ul>
+</li>
+<li class="toctree-l2"><a class="reference internal" href="dataverse-user-main.html#network-administration">Network Administration</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#dataverses-section">Dataverses Section</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#create-a-new-dataverse">Create a New Dataverse</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#manage-dataverses">Manage Dataverses</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#subnetwork-section">Subnetwork Section</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#create-a-new-subnetwork">Create a New Subnetwork</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#manage-subnetworks">Manage Subnetworks</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#manage-classifications">Manage Classifications</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#manage-study-comments-notifications">Manage Study Comments Notifications</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#manage-controlled-vocabulary">Manage Controlled Vocabulary</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#manage-network-study-templates">Manage Network Study Templates</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#harvesting-section">Harvesting Section</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#create-a-new-harvesting-dataverse">Create a New Harvesting Dataverse</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#manage-harvesting">Manage Harvesting</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#schedule-study-exports">Schedule Study Exports</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#manage-oai-harvesting-sets">Manage OAI Harvesting Sets</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#edit-lockss-harvest-settings">Edit LOCKSS Harvest Settings</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#settings-section">Settings Section</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#edit-name">Edit Name</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#id10">Edit Layout Branding</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#id11">Edit Description</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#edit-dataverse-requirements">Edit Dataverse Requirements</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#id12">Manage E-Mail Notifications</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#id13">Enable Twitter</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#terms-section">Terms Section</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#edit-terms-for-account-creation">Edit Terms for Account Creation</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#id14">Edit Terms for Study Creation</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#id15">Edit Terms for File Download</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#id16">Download Tracking Data</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#permissions-and-users-section">Permissions and Users Section</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#manage-network-permissions">Manage Network Permissions</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#roles-by-version-state-table">Roles by Version State Table</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#authorization-to-access-terms-protected-files-via-the-api">Authorization to access Terms-protected files via the API</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#create-account">Create Account</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#manage-users">Manage Users</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#manage-groups">Manage Groups</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#utilities">Utilities</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#web-statistics">Web Statistics</a></li>
+</ul>
+</li>
+<li class="toctree-l2"><a class="reference internal" href="dataverse-user-main.html#appendix">Appendix</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#control-card-based-data-ingest">Control Card-Based Data Ingest</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#csv-data-spss-style-control-card">CSV Data, SPSS-style Control Card</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#tab-data-with-ddi-metadata">Tab Data, with DDI Metadata</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#spss-data-file-ingest">SPSS Data File Ingest</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#ingesting-spss-por-files-with-extended-labels">Ingesting SPSS (.por) files with extended labels</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#ingest-of-r-rdata-files">Ingest of R (.RData) files</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#overview">Overview.</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#requirements">Requirements.</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#data-types-compared-to-other-supported-formats-stat-spss">Data Types, compared to other supported formats (Stat, SPSS)</a><ul>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-user-main.html#integers-doubles-character-strings">Integers, Doubles, Character strings</a></li>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-user-main.html#r-factors">R Factors</a></li>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-user-main.html#new-boolean-values">(New!) Boolean values</a></li>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-user-main.html#limitations-of-r-data-format-as-compared-to-spss-and-stata">Limitations of R data format, as compared to SPSS and STATA.</a></li>
+</ul>
+</li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-user-main.html#time-values-in-r">Time values in R</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#fits-file-format-ingest">FITS File format Ingest</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#metadata-references">Metadata References</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-user-main.html#zelig-interface">Zelig Interface</a></li>
+</ul>
+</li>
+</ul>
+</li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-installer-main.html">Installers Guide</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="dataverse-installer-main.html#quick-install">Quick Install</a></li>
+<li class="toctree-l2"><a class="reference internal" href="dataverse-installer-main.html#system-requirements">SYSTEM REQUIREMENTS</a></li>
+<li class="toctree-l2"><a class="reference internal" href="dataverse-installer-main.html#prerequisites">PREREQUISITES</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-installer-main.html#glassfish">Glassfish</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-installer-main.html#postgresql">PostgreSQL</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-installer-main.html#r-and-rserve">R and RServe</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-installer-main.html#system-configuration">System Configuration</a></li>
+</ul>
+</li>
+<li class="toctree-l2"><a class="reference internal" href="dataverse-installer-main.html#running-the-installer">RUNNING THE INSTALLER</a></li>
+<li class="toctree-l2"><a class="reference internal" href="dataverse-installer-main.html#optional-components">Optional Components</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-installer-main.html#recaptcha-bot-blocker">reCAPTCHA bot blocker</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-installer-main.html#google-analytics">Google Analytics</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-installer-main.html#imagemagick">ImageMagick</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-installer-main.html#handle-system">Handle System</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-installer-main.html#twitter-setup">Twitter setup</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-installer-main.html#digital-object-identifiers">Digital Object Identifiers</a></li>
+</ul>
+</li>
+<li class="toctree-l2"><a class="reference internal" href="dataverse-installer-main.html#appendix">Appendix</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-installer-main.html#do-you-need-r">Do you need R?</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-installer-main.html#what-does-the-installer-do">What does the Installer do?</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-installer-main.html#glassfish-configuration-template">Glassfish configuration template</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-installer-main.html#glassfish-configuration-individual-settings">Glassfish Configuration, individual settings</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-installer-main.html#jvm-options">JVM options</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-installer-main.html#ejb-container">EJB Container</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-installer-main.html#http-service">HTTP Service</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-installer-main.html#javamail-session">JavaMail Session</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-installer-main.html#jdbc-resources">JDBC Resources</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-installer-main.html#jms-resources">JMS Resources</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-installer-main.html#postgresql-setup">PostgreSQL setup</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-installer-main.html#redhat-startup-file-for-glassfish-example">RedHat startup file for glassfish, example</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-installer-main.html#enabling-secure-remote-access-to-asadmin">Enabling secure remote access to Asadmin</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-installer-main.html#using-lockss-with-dvn">Using LOCKSS with DVN</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-installer-main.html#read-only-mode">Read Only Mode</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-installer-main.html#backup-and-restore">Backup and Restore</a></li>
+</ul>
+</li>
+</ul>
+</li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-developer-main.html">DVN Developers Guide</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="dataverse-developer-main.html#build-environment-configuring-netbeans">Build Environment (Configuring NetBeans)</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-developer-main.html#install-netbeans-and-glassfish">Install NetBeans and GlassFish</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-developer-main.html#install-netbeans-bundle">Install NetBeans bundle</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-developer-main.html#if-you-have-to-install-glassfish-3-1-2-2">[If you have to] Install GlassFish 3.1.2.2</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-developer-main.html#install-junit-if-you-haven-t-already">Install JUnit (if you haven&#8217;t already)</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-developer-main.html#check-out-a-new-copy-of-the-dvn-source-tree">Check out a new copy of the DVN source tree</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-developer-main.html#create-a-github-account-if-you-don-t-have-one-already">Create a GitHub account [if you don&#8217;t have one already]</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-developer-main.html#set-up-an-ssh-keypair-if-you-haven-t-already">Set up an ssh keypair (if you haven&#8217;t already)</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-developer-main.html#clone-the-repo">Clone the repo</a><ul>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-developer-main.html#remote-repository">Remote Repository</a></li>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-developer-main.html#remote-branches">Remote Branches</a></li>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-developer-main.html#destination-directory">Destination Directory</a></li>
+</ul>
+</li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-developer-main.html#open-projects">Open Projects</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-developer-main.html#build-for-the-first-time">Build for the first time</a></li>
+</ul>
+</li>
+<li class="toctree-l2"><a class="reference internal" href="dataverse-developer-main.html#application-environment-configuring-glassfish-and-postgresql">Application Environment (Configuring GlassFish and PostgreSQL)</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-developer-main.html#install-postgresql-database-server">Install PostgreSQL database server</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-developer-main.html#run-the-install-dev-script">Run the install-dev script</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-developer-main.html#testing-login">Testing login</a></li>
+</ul>
+</li>
+<li class="toctree-l2"><a class="reference internal" href="dataverse-developer-main.html#developing-with-git">Developing with Git</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-developer-main.html#commit">Commit</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-developer-main.html#push">Push</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-developer-main.html#release">Release</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-developer-main.html#merge-develop-into-master">Merge develop into master</a><ul>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-developer-main.html#tag-the-release">Tag the release</a></li>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-developer-main.html#make-release-available-for-download">Make release available for download</a></li>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-developer-main.html#increment-the-version-number">Increment the version number</a></li>
+</ul>
+</li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-developer-main.html#branches">Branches</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-developer-main.html#current-list-of-branches">Current list of branches</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-developer-main.html#new-branching-model-develop-vs-master">New branching model: develop vs. master</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-developer-main.html#feature-branches">Feature branches</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-developer-main.html#example-feature-branch-2656-lucene">Example feature branch: 2656-lucene</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-developer-main.html#switching-to-the-master-branch-to-merge-commits-from-the-develop-branch">Switching to the master branch to merge commits from the develop branch</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-developer-main.html#tips">Tips</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-developer-main.html#previewing-changes-before-a-pull">Previewing changes before a pull</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-developer-main.html#errors">Errors</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-developer-main.html#duplicate-class">Duplicate class</a></li>
+</ul>
+</li>
+</ul>
+</li>
+</ul>
+</li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-api-main.html">APIs Guide</a><ul>
+<li class="toctree-l2"><a class="reference internal" href="dataverse-api-main.html#data-sharing-api">Data Sharing API</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-api-main.html#api-urls">API URLs</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-api-main.html#metadata-api">Metadata API</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-api-main.html#metadatasearchfields">metadataSearchFields</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-api-main.html#metadatasearch">metadataSearch</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-api-main.html#metadataformatsavailable">metadataFormatsAvailable</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-api-main.html#metadata">metadata</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-api-main.html#file-access-api">File Access API</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-api-main.html#downloadinfo">downloadInfo</a></li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-api-main.html#download">download</a></li>
+</ul>
+</li>
+</ul>
+</li>
+<li class="toctree-l2"><a class="reference internal" href="dataverse-api-main.html#data-deposit-api">Data Deposit API</a><ul>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-api-main.html#overview-of-data-deposit-api">Overview of Data Deposit API</a><ul>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-api-main.html#data-deposit-api-v1-curl-examples">Data Deposit API v1 <cite>curl</cite> examples</a><ul>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-api-main.html#retrieve-sword-service-document">Retrieve SWORD service document</a></li>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-api-main.html#create-a-study-with-an-atom-entry-xml-file">Create a study with an Atom entry (XML file)</a></li>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-api-main.html#dublin-core-dc-qualified-mapping-ddi-dataverse-network-db-element-crosswalk">Dublin Core (DC) Qualified Mapping - DDI - Dataverse Network DB Element Crosswalk</a></li>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-api-main.html#add-files-to-a-study-with-a-zip-file">Add files to a study with a zip file</a></li>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-api-main.html#display-a-study-atom-entry">Display a study atom entry</a></li>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-api-main.html#display-a-study-statement">Display a study statement</a></li>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-api-main.html#delete-a-file-by-database-id">Delete a file by database id</a></li>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-api-main.html#replacing-cataloging-information-title-author-etc-for-a-study">Replacing cataloging information (title, author, etc.) for a study</a></li>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-api-main.html#list-studies-in-a-dataverse">List studies in a dataverse</a></li>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-api-main.html#delete-a-study-non-released-studies-only">Delete a study (non-released studies only)</a></li>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-api-main.html#deaccession-a-study-released-studies-only">Deaccession a study (released studies only)</a></li>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-api-main.html#release-a-study">Release a study</a></li>
+<li class="toctree-l5"><a class="reference internal" href="dataverse-api-main.html#determine-if-a-dataverse-has-been-released">Determine if a dataverse has been released</a></li>
+</ul>
+</li>
+<li class="toctree-l4"><a class="reference internal" href="dataverse-api-main.html#curl-reference"><cite>curl</cite> reference</a></li>
+</ul>
+</li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-api-main.html#dvn-data-deposit-api-v1-client-sample-code-python">DVN Data Deposit API v1 client sample code (Python)</a></li>
+<li class="toctree-l3"><a class="reference internal" href="dataverse-api-main.html#swordv2-client-libraries">SWORDv2 client libraries</a></li>
+</ul>
+</li>
+</ul>
+</li>
+</ul>
+</div>
+<div class="section" id="the-execution-context">
+<span id="index-0"></span><h2>The execution context<a class="headerlink" href="#the-execution-context" title="Permalink to this headline">¶</a></h2>
+</div>
+</div>
+<div class="section" id="indices-and-tables">
+<span id="index-1"></span><h1>Indices and tables<a class="headerlink" href="#indices-and-tables" title="Permalink to this headline">¶</a></h1>
+<ul class="simple">
+<li><a class="reference internal" href="genindex.html"><em>Index</em></a></li>
+<li><a class="reference internal" href="py-modindex.html"><em>Module Index</em></a></li>
+<li><a class="reference internal" href="search.html"><em>Search Page</em></a></li>
+</ul>
+</div>
+
+
+          </div>
+        </div>
+      </div>
+        </div>
+        <div class="sidebar">
+          <h3>Table Of Contents</h3>
+          <ul>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-user-main.html">User Guide</a></li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-installer-main.html">Installers Guide</a></li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-developer-main.html">DVN Developers Guide</a></li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-api-main.html">APIs Guide</a></li>
+</ul>
+
+          <h3 style="margin-top: 1.5em;">Search</h3>
+          <form class="search" action="search.html" method="get">
+            <input type="text" name="q" />
+            <input type="submit" value="Go" />
+            <input type="hidden" name="check_keywords" value="yes" />
+            <input type="hidden" name="area" value="default" />
+          </form>
+          <p class="searchtip" style="font-size: 90%">
+            Enter search terms.
+          </p>
+        </div>
+        <div class="clearer"></div>
+      </div>
+    </div>
+
+    <div class="footer-wrapper">
+      <div class="footer">
+        <div class="left">
+          <a href="dataverse-user-main.html" title="User Guide"
+             >next</a> |
+          <a href="genindex.html" title="General Index"
+             >index</a>
+            <br/>
+            <a href="_sources/index.txt"
+               rel="nofollow">Show Source</a>
+        </div>
+
+        <div class="right">
+          
+    <div class="footer">
+        &copy; Copyright 1997-2013, President &amp; Fellows Harvard University.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2b1.
+    </div>
+        </div>
+        <div class="clearer"></div>
+      </div>
+    </div>
+
+  </body>
+</html>
\ No newline at end of file
Binary file DVN-web/installer/dvninstall/doc/guides/objects.inv has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/search.html	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,128 @@
+
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+  "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+    
+    <title>Search &mdash; The Harvard Dataverse Network 3.6.1 documentation</title>
+    
+    <link rel="stylesheet" href="_static/agogo.css" type="text/css" />
+    <link rel="stylesheet" href="_static/pygments.css" type="text/css" />
+    
+    <script type="text/javascript">
+      var DOCUMENTATION_OPTIONS = {
+        URL_ROOT:    './',
+        VERSION:     '3.6.1',
+        COLLAPSE_INDEX: false,
+        FILE_SUFFIX: '.html',
+        HAS_SOURCE:  true
+      };
+    </script>
+    <script type="text/javascript" src="_static/jquery.js"></script>
+    <script type="text/javascript" src="_static/underscore.js"></script>
+    <script type="text/javascript" src="_static/doctools.js"></script>
+    <script type="text/javascript" src="http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
+    <script type="text/javascript" src="_static/searchtools.js"></script>
+    <link rel="top" title="The Harvard Dataverse Network 3.6.1 documentation" href="index.html" />
+  <script type="text/javascript">
+    jQuery(function() { Search.loadIndex("searchindex.js"); });
+  </script>
+   
+
+  </head>
+  <body>
+    <div class="header-wrapper">
+      <div class="header">
+        <div class="headertitle"><a
+          href="index.html">The Harvard Dataverse Network 3.6.1 documentation</a></div>
+        <div class="rel">
+          <a href="genindex.html" title="General Index"
+             accesskey="I">index</a>
+        </div>
+       </div>
+    </div>
+
+    <div class="content-wrapper">
+      <div class="content">
+        <div class="document">
+            
+      <div class="documentwrapper">
+        <div class="bodywrapper">
+          <div class="body">
+            
+  <h1 id="search-documentation">Search</h1>
+  <div id="fallback" class="admonition warning">
+  <script type="text/javascript">$('#fallback').hide();</script>
+  <p>
+    Please activate JavaScript to enable the search
+    functionality.
+  </p>
+  </div>
+  <p>
+    From here you can search these documents. Enter your search
+    words into the box below and click "search". Note that the search
+    function will automatically search for all of the words. Pages
+    containing fewer words won't appear in the result list.
+  </p>
+  <form action="" method="get">
+    <input type="text" name="q" value="" />
+    <input type="submit" value="search" />
+    <span id="search-progress" style="padding-left: 10px"></span>
+  </form>
+  
+  <div id="search-results">
+  
+  </div>
+
+          </div>
+        </div>
+      </div>
+        </div>
+        <div class="sidebar">
+          <h3>Table Of Contents</h3>
+          <ul>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-user-main.html">User Guide</a></li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-installer-main.html">Installers Guide</a></li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-developer-main.html">DVN Developers Guide</a></li>
+<li class="toctree-l1"><a class="reference internal" href="dataverse-api-main.html">APIs Guide</a></li>
+</ul>
+
+          <h3 style="margin-top: 1.5em;">Search</h3>
+          <form class="search" action="#" method="get">
+            <input type="text" name="q" />
+            <input type="submit" value="Go" />
+            <input type="hidden" name="check_keywords" value="yes" />
+            <input type="hidden" name="area" value="default" />
+          </form>
+          <p class="searchtip" style="font-size: 90%">
+            Enter search terms.
+          </p>
+        </div>
+        <div class="clearer"></div>
+      </div>
+    </div>
+
+    <div class="footer-wrapper">
+      <div class="footer">
+        <div class="left">
+          <a href="genindex.html" title="General Index"
+             >index</a>
+        </div>
+
+        <div class="right">
+          
+    <div class="footer">
+        &copy; Copyright 1997-2013, President &amp; Fellows Harvard University.
+      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2b1.
+    </div>
+        </div>
+        <div class="clearer"></div>
+      </div>
+    </div>
+
+  </body>
+</html>
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/doc/guides/searchindex.js	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,1 @@
+Search.setIndex({envversion:42,terms:{prefil:2,show_bug:5,prefix:4,whose:2,educ:2,frequencyofdatacollect:2,networkdata:4,tweet:[2,4],editnetworkprivilegesservicebean:5,authus:0,under:[5,2,4],xmx512m:4,formatschema:0,digit:[],everi:[5,2,4],risk:[5,4],caffein:0,govern:2,affect:2,servlet:[],viewabl:2,four:[2,4],school:2,scholar:2,correct:[5,2,4],vector:[1,2],ecmerg:5,ifac:[],direct:[5,2,4],commerci:2,consequ:[1,2],second:[1,2],aggreg:[0,2],junit_4:[],even:[1,2,4],postgres_databas:4,neg:[1,2],asid:[5,2],listidentifi:2,conduct:2,"new":[],databank:[],ever:[2,4],fdbfe57:5,distributorcontactaffili:2,abov:[0,1,2,5,4],createdb:4,never:2,here:[0,1,2,5,4],met:2,"1tawps4rouqaiw":2,path:5,pleasur:[],metadatasearchresult:0,acceptor:4,reenabl:2,everyon:2,precis:[1,2],datetim:[2,4],permit:[2,4],parlanc:4,portabl:2,collectionmod:2,brought:2,unix:[5,1,2,4],txt:[0,2,4],seriesinform:2,unit:[0,2,4],describ:[0,5,2,4],would:[1,2,4],call:[2,4],recommend:[5,1,2,4],decentr:2,timeperiodcoveredend:2,type:[],tell:4,notif:[],aggressiveheap:4,geographicunit:2,notic:[1,2,4],warn:[5,2,4],hold:[0,5,2,4],unpack:[2,4],must:[5,1,2,4],join:2,work:[],introduc:4,topicclassvalu:2,root:[5,2,4],undirect:2,searchfieldbundl:5,overrid:[2,4],workbench_help:[],give:[1,2,4],indic:[],cautiou:5,captcha:4,dsun:4,unavail:[0,2],want:[5,2,4],hog:4,end:[2,4],prepopul:2,ordinari:2,vagrant:4,dsbingest:4,how:[],rhistoryfil:2,answer:[5,2,4],disappoint:5,config:4,catgri:2,updat:[5,2,4],partialexclud:0,recogn:[1,2],after:[5,2,4],wrong:[2,4],os_files_xxlarg:[],studyfil:0,parallel:5,demonstr:0,attempt:[0,1,2,5,4],fyxlawzria:2,credenti:[0,5],exclud:[0,2],receiv:[5,2],swordprofil:0,pg_dump:4,environ:[],reloc:2,enter:[2,4],exclus:[1,2],lambda:2,order:[],indeped:[],oper:[0,2,4],feedback:2,araxi:5,diagnos:4,offici:[],orang:2,becaus:[5,1,2,4],jpeg:0,privileg:2,japan:[1,2],flexibl:[2,4],vari:2,murrai:2,fit:[],mydv:2,fix:[5,2],quantit:[2,4],persist:[0,5,2,4],comprehens:2,directaccess:0,hidden:2,coffeemil:0,easier:[2,4],them:[5,2,4],thei:[5,1,2,4],proce:4,safe:[2,4],opendiff:5,"break":2,db_name:4,promis:[],interrupt:2,choic:[2,4],geographiccoverag:[0,2],dvn333:0,roast:0,each:[0,2,4],side:[2,4],bone:4,mean:[5,1,2,4],sample_onli:2,logo:2,extract:2,pgsq1:4,network:[],newli:[5,2,4],content:[0,3,2,5,4],rewrit:[],eprint:[],"6ceb24f":5,gov:2,dsb:[2,4],linear:2,navig:2,situat:2,strata:2,filemimetyp:0,standard:[0,1,2,4],nth:2,fixm:[],md5:4,filter:[2,4],regress:2,portal:[],confus:[1,2,4],licens:[0,2],indexserviceloc:5,rang:[2,4],independ:[1,2],rank:2,necess:4,restrict:[0,2],sword2rubi:0,unlik:[1,2],wasn:[1,2],authenti:0,agre:2,primari:[5,2],rewritten:4,meatadatasearchfield:0,top:[0,2,4],sometim:2,master:[],too:[1,2,4],similarli:[1,2,4],john:0,listen:4,consol:[5,2,4],circl:2,namespac:[0,4],tool:[0,5,2,4],thisfeatur:0,took:2,varformat:2,thedata:[2,4],crawl:[2,4],technic:[2,4],target:[2,4],keyword:[0,2],disableexplicitgc:4,provid:[0,1,2,5,4],tree:[],zero:[0,2],googleanalyt:[2,4],matter:[5,1,2],minut:[5,2,4],isreferencedbi:0,boston:[1,2],modern:4,mind:[1,2],udp:4,raw:2,inherit:2,keywordvocabulari:2,seem:2,incompat:2,minu:[],"3ccahxgwy7kwptqg6m9m4":[],recreat:2,icpsr:[0,2],inexpens:2,latter:[1,2],transmit:0,simplifi:[2,4],perl_lib:[],though:[1,2],usernam:[0,5,2,4],object:[],regular:[5,4],specifi:[0,2,4],letter:2,filedownloadinfo:0,keystrok:2,simplic:2,doi:[0,2,4],don:[],doc:[0,4],pgadmin:4,doe:[],bracket:2,tech:[],wildcard:2,unchang:[5,4],section:[],came:[],pkp:[],visitor:2,containth:2,syntax:2,radio:2,protocol:[0,2,4],involv:2,absolut:4,exactli:[2,4],acquir:[5,4],menu:[2,4],explain:[2,4],apach:[],theme:2,busi:[2,4],folder:[],axiom:[],stop:4,compli:2,coast:[1,2],report:[2,4],recalcul:2,net:[0,5,2,4],devguid:[],bar:2,emb:2,method:[0,2],cleanli:5,septemb:[],fieldnam:0,studyui:5,elimin:2,roadmap:[],num:2,mandatori:[0,2],result:[],respons:[0,2,4],fail:[5,2,4],relatedpubl:2,best:[2,4],subject:[0,2],awar:[1,2],hopefulli:4,wikipedia:[],hmdc:[5,2,4],simplest:2,awai:[2,4],approach:2,xvf:4,accord:5,datacit:0,manpag:0,extens:2,harvard:[5,2,4],protect:[],biblcit:0,howev:[5,2,4],against:[1,2],facet:[5,2],unc:[],unf:[1,2],logic:[1,2],countri:2,login:[],seri:2,com:[0,5,2,4],fileformat:0,dataversehasbeenreleas:0,imagethumb:0,foobar:0,ulimit:4,height:2,googleanalyticstrackingcod:4,diff:5,trust:[5,4],assum:[5,2],summar:[1,2],speak:[2,4],haxx:0,requestor:2,chrome:2,three:[5,2],github:[],listsect:2,much:[5,2,4],interest:[0,2,4],basic:[0,2,4],deviationsfromsampledesign:2,quickli:2,life:0,vimdiff:5,mod_mbox:[],adminadmin:4,ani:[0,1,2,5,4],child:[5,2],spin:4,emploi:2,dspace:[],ident:[5,1,2,4],forum:[1,2],gnu:2,properti:[5,2,4],sourceforg:[5,4],calcul:2,publicli:2,vagu:2,privatekei:4,bmckinnei:[],indexmessag:4,timefram:2,tabl:[],conf:[5,4],sever:[5,2],n2t:4,studyrelpubl:0,prior:2,perform:[1,2,4],suggest:[2,4],complex:[1,2,4],split:4,splu:2,complet:[0,5,2,4],blue:2,hand:2,fairli:[1,2,4],requestprocess:4,refin:2,kib:5,dsbqueueconnectionfactori:4,scenario:[0,2,4],thu:2,postgresql84:4,domainnam:4,bugzilla:5,contact:[],pwrd:4,thi:[0,1,2,5,4],dvownerid:2,everyth:4,left:[2,4],kindofdata:[0,2],identifi:[],just:[5,1,2,4],laptop:4,ordin:2,human:[1,2,4],kdiff3:5,yet:[0,1,2,5,4],languag:0,previous:2,onload:2,easi:[2,4],mix:2,had:[2,4],extact:2,fortran:2,spread:4,collections:2,els:5,ffffff:2,save:[5,2,4],explanatori:[1,2],opt:2,applic:[],dvnoai:4,setateln9ubu2:[],preserv:[1,2,4],disposit:0,javaserver2:[],background:2,sampleurl:2,measur:[2,4],daemon:[2,4],specif:[0,2,4],arbitrari:2,reassign:2,greenwitch:[1,2],grizzli:[],www:[0,5,2,4],right:[0,5,2,4],old:[2,4],deal:2,wt2517:2,interv:4,somehow:2,swordv2collectionservlet:[],intern:[2,4],partialinclud:0,successfulli:5,total:5,bottom:2,formatavail:0,ear:4,condit:[0,2],gsfc:2,localhost:[5,4],core:[],plu:2,sfu:[],studynotesubject:2,widget:2,insecur:[],promot:2,peer:2,peet:0,chapter:[5,4],postgresql:[],slightli:2,unfortun:[],unsav:2,accessgr:0,commit:[],produc:[0,1,2,4],asquickstartup:4,"float":[1,2],encod:[1,2],bound:2,down:[5,2,4],creativ:0,"519cd8c":5,wrap:2,opportun:2,storag:[2,4],javax:4,git:[],suffici:[5,2,4],support:[],"class":[],avail:[],width:2,reli:[1,2],fraction:2,jane:0,war:5,lowest:2,head:[5,2],form:[0,1,2,4],forc:[],pagerank:2,dvn_server:0,formatnam:0,"true":[0,2,4],freenod:[],reset:2,attr:[1,2],createus:4,geospati:2,indexservicebean:5,unmerg:5,maximum:[2,4],until:[5,2],absenc:[1,2],fundament:2,later:[2,4],classic:2,ddoi:4,"abstract":[0,2],distributiond:2,unreleas:2,diagnost:0,exist:[0,1,2,5,4],baseurlstr:4,encrypt:[5,4],tkdiff:5,when:[0,1,2,5,4],actor:2,role:[],test:0,roll:2,timemethod:2,univari:2,node:2,intend:[5,2,4],irclog:[],tzone:[1,2],asterisk:2,intens:2,intent:2,consid:[],sql:4,femal:[1,2],longer:[1,2],furthermor:[2,4],anywher:2,studylevelerrornot:2,dateofcollectionend:2,idno:0,ignor:2,datafram:[1,2],time:[],daili:2,"1st":[1,2],osx:[],mydomain:2,concept:[1,2],queueconnectionfactori:4,skip:2,global:[0,2,4],snip:5,studycomplet:2,menubar:2,hierarch:2,decid:[2,4],middl:2,depend:[5,2,4],system:[],zone:[1,2],graph:2,proxim:2,umich:[0,2],"2b88b68":5,sourc:[],string:[],embark:4,cook:4,word:[5,2],brows:2,intvu4:2,foo:2,contenttyp:0,hour:[2,4],administr:[],level:[0,1,2,4],did:4,gui:4,item:2,team:[5,2,4],div:2,databasenam:4,metsdspacesip:[],prevent:[2,4],brave:4,sign:[5,4],patient:4,port:4,bold:[5,4],appear:[5,2,4],samplingerrorestim:2,placeofaccess:2,uniform:2,current:[],rephras:[1,2],domain1:4,meld:5,portnumb:4,deriv:4,zelig:[],gener:[],agreement:2,satisfi:2,modif:[2,4],address:[5,2,4],along:2,redmin:[5,2],wait:2,box:[],dublic:[],consumerkei:4,invit:2,dublin:[],unport:0,queue:4,simplezip:0,poisson:2,extrem:2,commonli:2,elect:2,extra:[0,5,4],modul:[3,4],glassfish3:4,instal:[],should:[5,1,2,4],regex:2,memori:4,catalogingfields11apr08:2,univers:[0,2,4],visit:[0,4],subvers:[],everybodi:5,criteria:2,scope:[0,2],checkout:5,minim:[2,4],admcredfil:4,peopl:[5,2],timeperiodcoveredstart:2,appendix:[],examin:2,effort:[1,2],behalf:2,dvndbpool:4,graphic:[2,4],local:[0,5,2,4],prepar:[5,2],uniqu:[1,2,4],descriptor:4,can:[0,1,2,5,4],tabul:[2,4],whiteboardofswordv2api:[],purpos:[0,2],stream:0,predict:[1,2],handle_technical_manu:4,topic:2,heard:4,critic:4,agenc:[0,2],occur:2,alwai:[1,2],multipl:[5,1,2,4],write:[0,5,2,4],anyon:[0,4],xhtml:[5,2,4],purl:0,map:[],product:[0,2,4],max:[2,4],membership:2,mai:[0,1,2,5,4],grow:2,goal:2,oai_dc:0,practic:[1,2,4],relatedmateri:[0,2],divid:2,favorit:4,bean:0,programmat:[0,2],"switch":[],combin:2,gamma:2,subcollect:2,talk:[5,4],dvnoaiplugin:[2,4],approv:2,tablular:2,entitl:2,still:[5,1,2,4],dynam:2,entiti:2,p4merg:5,disconnect:2,monitor:[2,4],polici:[2,4],platform:[5,1,2,4],window:[2,4],sample_only_http:2,mail:[],main:[5,2,4],metadatasearchablefield:0,financi:0,initi:[2,4],nation:2,interview:2,therebi:2,verifi:[5,2,4],now:[0,1,2,5,4],discuss:[1,2,4],nor:2,introduct:[0,4],term:[],csv:[],name:[],perspect:0,didn:4,separ:[5,2,4],januari:4,compil:2,domain:[2,4],javas:4,citat:[0,2],individu:[],receipt:[],continu:[5,2,4],significantli:4,year:2,distributor:2,happen:2,canada:0,subnet:4,shown:2,accomplish:2,"3rd":4,space:[2,4],faculti:[],ejb:[],profil:[0,2],factori:4,integr:2,earlier:[2,4],migrat:2,million:2,dataverse_network_integr:[],theori:2,enterprisedb:[],org:[0,5,2,4],"byte":[0,5,2,4],card:[],care:[2,4],peform:2,weibul:2,recod:2,version2:0,frequenc:2,dvninstall_v3_4:5,refus:2,dvninstall_v3_0:4,thing:[],place:[5,2],djhove:4,imposs:[1,2],frequent:4,first:[],origin:[5,1,2],directli:[0,2,4],distributorcontactemail:2,onc:[5,2,4],beverag:0,fast:5,oppos:2,custom:[2,4],open:[],predefin:2,size:[2,4],given:[0,1,2,4],mergetool:5,sheet:2,silent:[2,4],convent:2,virgin:4,availabilitystatu:2,draft:2,averag:[2,4],fgdc:2,confidentialitydeclar:2,conveni:2,cite:2,copi:[],artifact:[5,2],csdgm:2,logist:2,than:[2,4],png:0,serv:[0,2,4],jhove:2,balanc:4,lucenc:2,were:[5,1,2,4],posit:[1,2],browser:[5,2,4],pre:[5,4],fork:[5,4],sai:5,san:[1,2],nicer:5,openarch:0,pro:[1,2],svnroot:4,argument:0,dash:[],sav:2,producernam:2,engin:2,squar:2,destroi:[2,4],consortium:2,libpath:4,note:[0,1,2,5,4],ideal:5,maintain:[2,4],take:[5,2,4],advis:2,noth:[5,2],begin:[2,4],sure:[5,1,2,4],controloper:2,normal:[5,1,2,4],track:[],studynotetext:2,compress:5,beta:2,varqnti:2,abus:[2,4],pair:[2,4],america:2,homepag:[5,2,4],icon:[2,4],renam:[5,2],oaihandl:[2,4],adopt:5,drive:2,quantiti:2,iqss:[5,2,4],runtim:2,axi:2,subtab:2,shop:0,heap:4,show:[5,2,4],delta:5,permiss:[],configzeliggui:2,corner:2,help:[5,2,4],explicitli:[1,2],ratio:2,transact:[2,4],activ:[5,2,4],enough:4,analyz:2,nvie:5,analys:2,over:[0,5,2,4],carolina:[],variou:[5,2],get:[],installmodul:4,cannot:[5,1,2],study2:0,requir:[],thumbnail:[0,4],mediat:[],where:[0,1,2,5,4],summari:[1,2],wiki:[2,4],samplingprocedur:2,advsearchpag:5,rmi:4,detect:2,review:[1,2],enumer:[0,2],label:[],behind:[2,4],listinfo:[],between:2,"import":[5,1,2,4],paramet:[0,2,4],across:[2,4],parent:[5,2],screen:2,"192m":4,come:[2,4],distributorcontact:2,pertain:2,job:[1,2,4],audienc:5,improv:[2,4],inat:[],among:2,reindex:2,jdbc4:4,rescal:2,datvers:2,por:[],dir:4,pop:2,cancel:2,coupl:[1,2],marc:2,west:[1,2],mark:[5,2,4],listen_address:4,reflex:5,fedoraproject:4,addfil:2,resolut:[5,2],rubi:0,vartyp:2,workspac:2,i18n:[],those:[1,2,4],"case":[5,1,2,4],interoper:[0,2],thesi:2,keygen:5,testus:0,hdl:[0,2,4],henc:4,sameus:[5,4],chkconfig:4,xyxz:0,develop:[],dvnapi_v1_0:[],media:0,same:[0,1,2,5,4],epoch:[1,2],html:[0,5,2,4],"6_31":4,eventu:5,week:2,ifram:2,finish:[5,2,4],screenshot:[],leonid:0,nest:2,assist:2,driver:4,someon:5,driven:[2,4],capabl:2,mani:[5,2,4],extern:[0,2],appropri:[5,2],selectsupport:0,markup:2,without:[0,1,2,4],zipfile_:2,scholarship:[],execut:[],dateofdeposit:2,rest:4,weekli:2,b7fae01:5,svr_1:4,touch:2,passphras:[5,4],speed:2,versu:2,death:2,specialpermiss:2,except:[2,4],param:0,blog:[],color:2,swordv2mediaresourceservlet:[],blob:5,netapp:4,real:[0,1,2,4],hover:5,around:[5,2],read:[],period:2,traffic:4,batchdownload:4,world:2,pst:[1,2],javaclient2:0,integ:[],swordapp:0,benefit:4,either:[1,2,4],manag:[],yyyi:[0,1,2,4],authent:[0,5,2,4],constitut:2,deselect:2,slice:2,confirm:2,definit:[5,2],achiev:2,ddi:[],exit:[2,4],testiqss:2,complic:[5,4],decent:[1,2],undertak:4,power:2,fulli:[5,2,4],docroot:4,"throw":4,src:[5,2,4],swordappv2:0,central:2,firstnam:0,spss:[],degre:2,eigenvalu:2,lucen:[],act:2,studylist:5,processor:4,mulitnomi:2,gari:2,terminolog:[1,2],unregist:2,disablecustom:2,quietli:[0,1,2],studylistingpag:5,yyi:4,your:[0,5,2,4],log:[5,2,4],her:[2,4],area:2,overwrit:2,start:[0,5,2,4],"3b82f88":5,interfac:[],low:2,lot:2,strictli:4,machin:[2,4],svn:4,verb:0,xms512m:4,bundl:[],regard:[5,2],diffus:5,categor:[1,2],facetresultui:5,faster:4,extend:[],pull:[],manifestpag:[2,4],netbeansproject:5,possibl:[1,2,4],"default":[],insert:[5,2,4],unusu:2,prerequisist:4,embed:[0,2],connect:[5,1,2,4],sword_:[],dcterm:0,certain:2,censor:2,deep:[],strongli:[0,5,2,4],deem:2,dvn_url:2,proport:2,fill:[5,2,4],incorrect:2,again:[5,2,4],googl:[],upcom:[5,2],carriag:2,field:[],binomi:2,valid:[0,2,4],you:[],db_usernam:4,symbol:2,vertex:2,searchhit:0,dropbox:2,pool:4,reduc:2,unselect:2,descript:[],sword2:0,potenti:[0,1,2,4],latestversionst:0,cpu:4,cdlib:4,represent:[1,2],all:[0,1,2,5,4],dist:4,consider:[0,2],intrvl:2,lack:[1,2],dollar:2,connectionattribut:4,month:2,abil:[2,4],follow:[0,5,2,4],disk:[5,2,4],white:2,articl:[0,5,2],init:4,queri:[0,2,4],timerserv:4,macosx:[5,4],straightforward:[5,1,2,4],subsett:[1,2,4],far:[],faq:[],util:[],sparingli:2,candid:5,mechan:[1,2],tmprwsfile:2,failur:2,veri:[5,1,2,4],ticket:[5,2],prettypictur:0,consumersecret:4,trench:[],adjust:[5,1,2,4],wst:[],user_nam:4,enterpris:4,neighborhood:2,ten:2,sync:2,past:[2,4],rate:2,design:2,nbm:[],pass:[2,4],further:2,sub:2,sun:[5,4],sum:2,abl:[5,2,4],brief:2,dcom:4,abbrevi:2,version:[],xom:[],"public":[0,5,2,4],contrast:[1,2],essenc:5,hasn:2,full:[0,5,2,4],themselv:2,variat:2,nb6:[],infin:[1,2],free:[1,2],trunk:4,ver:2,modifi:[0,1,2,5,4],legend:2,valu:[],search:[],ahead:5,addfilespag:2,amount:[2,4],vdcnetwork:4,probit:2,social:2,action:[2,4],narrow:2,via:[],curat:2,transit:4,vim:5,filenam:[0,2,4],href:2,inappropri:2,famili:2,establish:[2,4],select:[],versionnumb:5,distinct:[1,2],regist:[2,4],two:[5,2],coverag:[0,2],morn:2,basi:2,toggl:2,more:[0,1,2,5,4],desir:[0,5,2],flag:2,particular:[5,2,4],known:[5,2,4],compani:0,cach:[5,4],psql:[5,4],none:[0,1,2],jdk:[5,4],hous:2,outlin:[1,2,4],dev:[],histori:2,buildupd:5,remain:[2,4],paragraph:4,"28protocol":[],male:[1,2],pgdriver:4,"10ece42ec9236ccd2e58eea2e69c7b54fc783133":[],prompt:[5,2,4],challeng:4,registr:[2,4],accept:[5,2,4],minimum:[2,4],phrase:4,uncheck:2,cours:[1,2,4],xxxx:4,newlin:2,secur:[],rather:[2,4],anoth:[1,2,4],perhap:[2,4],snippet:2,reject:[1,2],characteristicofsourc:2,css:2,dvn_subpag:2,resourc:[],overcommit:4,facetui:5,inquiri:2,"600px":2,associ:[2,4],"short":[5,2],footer:2,overnight:5,ambigu:[1,2],caus:2,callback:4,alphabet:2,cleaningoper:2,checkbox:2,rotat:4,notifymailsess:4,octob:5,paper:2,through:[2,4],unitofanalysi:2,hierarchi:2,dont:2,style:[],exact:[1,2],bypass:2,might:[5,2],alter:[2,4],good:2,"return":[0,5,2,4],sentenc:2,largest:2,framework:2,ecolog:2,intvu2:2,jdbc30datasourc:4,intvu3:2,citationrequir:2,troubleshoot:2,intvu1:2,instruct:[5,2,4],refresh:2,infer:2,easili:[2,4],token:4,compris:2,found:[0,2,4],intervent:2,id_rsa:5,truncat:2,england:0,weight:2,hard:4,idea:2,realli:[5,2,4],expect:[0,1,2,5,4],twtter:2,todo:2,event:[2,4],closest:2,datakind:0,basicsearchfrag:5,fielddescript:0,publish:[],research:[0,1,2],print:[2,4],qualifi:[],postgr:[],proxi:2,advanc:[2,4],pub:5,labl:2,reason:[5,2,4],base:[],ask:[5,2,4],earliest:2,workstat:4,"40mail":[],thread:[5,4],script:[],american:2,perman:[0,2,4],undergo:2,assign:[1,2,4],filedscr:2,feed:0,major:2,notifi:2,your_usernam:5,lastnam:0,placehold:4,dvn_client:0,done:[5,1,2,4],least:[5,2,4],blank:[5,2,4],stabl:4,miss:[1,2],stage:[5,4],differ:[1,2,4],exponenti:2,geogcov:0,interact:[2,4],unrestrict:2,construct:2,otherrefer:2,authornam:[0,2],esac:4,scheme:2,store:[5,1,2],dichotom:2,adher:2,otherid:[0,2],xmln:[0,2],option:[],relationship:2,selector:2,useparalleloldgc:4,part:[0,5,2,4],pars:2,consult:[2,4],webadmin:[],std:2,king:2,kind:[0,2],protocoloperations_editingcontent_metadata:0,whenev:5,remot:[],remov:[5,2],authmethod:0,jqueri:2,overcommit_memori:4,reus:5,arrang:4,schema:[0,2],set_dvn_url:2,comput:[1,2],beforehand:2,packag:[0,1,2,5,4],dedic:[1,2,4],"null":2,built:[0,5,4],lib:[5,4],self:2,also:[0,1,2,5,4],authoraffili:2,build:[],distribut:[5,2,4],exec:[2,4],unsur:2,previou:[5,2],reach:2,chart:2,jst:[1,2],most:[0,1,2,5,4],mcmcpack:2,plan:2,maco:4,alpha:2,charg:2,ddvn:[2,4],exo:2,clear:[0,2,4],cover:2,ext:[],clean:5,repli:4,subgroup:2,awesom:4,think:[2,4],somewhat:2,fink:4,particularli:2,cdr:[],astrophys:2,fine:5,find:[],firewal:[2,4],bibliographiccit:0,solut:[1,2],"_hmpgmainmessag":2,leisur:4,factor:[],hit:[5,4],unus:0,express:0,nativ:2,mainten:[2,4],banner:2,restart:[5,2,4],datadscr:[0,2],formattyp:0,common:[],hype:2,dta:2,certif:[],set:[],art:[],timevalu:[1,2],creator:[0,2],see:[0,1,2,5,4],bare:4,arg:0,reserv:[1,2],someth:2,particip:4,won:[5,2],"800px":2,nontrivi:2,experi:[5,2,4],xss128l:4,altern:[0,2],signatur:[1,2],popup:2,responser:2,numer:[1,2,4],javascript:2,isol:2,complementari:2,distinguish:2,classnam:4,popul:[2,4],disclaim:2,j2ee:4,toward:[5,2],last:[5,2],delimit:2,alon:2,bonacich:2,context:[],pdf:[2,4],accessrestrict:0,srdpe:4,load:4,accesspermiss:0,simpli:[5,2,4],point:[5,1,2,4],schedul:[],neatli:[],header:[0,2],"724px":2,"100mb":4,suppli:[0,1,2,5,4],throughout:4,backend:2,identif:2,vertic:2,framebord:2,due:2,empti:0,whom:2,secret:4,screener:2,strategi:5,files:0,adventur:4,imag:[0,5,2,4],convert:[1,2,4],unnecessarili:[5,4],fund:2,understand:[1,2,4],demand:2,instructor:2,creativecommon:0,look:[0,5,2,4],straight:4,histor:[],durat:2,pace:4,"while":[5,1,2,4],unifi:2,behavior:[1,2],error:[],anonym:2,mbox:[],loos:2,manner:[1,2],readi:[5,2],readm:5,jpg:[0,2],itself:[5,2],cento:4,fedora:[],grant:2,belong:[0,2],cert:[],conflict:[5,2],higher:[2,4],dgc:4,howto:5,formatmim:0,moment:2,temporari:4,user:[],repopul:2,oerpub:[],chang:[],recent:[5,2],lower:2,task:[],equival:[2,4],older:[5,4],searchabl:[0,2],parenthes:2,pdurbin:5,person:[0,2,4],reflect:4,academ:[1,2],propos:2,explan:4,dataverse_alia:0,collabor:2,administ:2,regardless:[1,2],tech_manu:4,relatedstudi:2,timevar:[],entrynam:4,ee6:[],wai:[5,1,2,4],apr:[],fundingag:2,input:2,subsequ:2,bin:[5,4],complaint:4,vendor:4,obsolet:[],format:[],intuit:[1,2],indexmessagefactori:4,bia:2,datasourc:[0,2,4],dvdc:4,accessservicessupport:0,outright:2,success:[5,2],xxdiff:5,resolv:[5,2],manifest:[2,4],collect:[],popular:[1,2,4],encount:4,studynotetyp:2,often:[1,2],acknowledg:2,visibl:2,some:[0,1,2,5,4],back:[5,2,4],understood:2,"30k":4,sampl:[],mirror:4,affili:2,scale:4,per:[0,5,2],recognit:2,substitut:2,larg:[2,4],slash:2,inetaddress:4,proc:4,cgi:5,myset:2,run:[],thefiledownloadinfo:0,proddat:0,step:[],dataversenotifi:4,subtract:2,thedataorg:2,vdcnetd:4,row:[2,4],materi:[0,2,4],datacollectionsitu:2,idl:2,dialog:5,zellig:2,block:2,file1:2,emphasi:4,primarili:4,within:[2,4],contributor:[0,2],announc:2,span:2,spam:[2,4],question:[2,4],submit:2,biom:[],adjac:2,includ:[0,5,2,4],suit:[2,4],forward:[5,2],xpath:0,properli:[1,2,4],repeatedli:2,subgraph:2,link:[],translat:2,newer:4,searchablefield:0,line:[5,2,4],murphi:5,info:[0,5,2],utc:[1,2],utf:2,consist:[5,2,4],caseid:2,highlight:5,similar:[2,4],impl:[],parser:[],accesstosourc:2,doesn:[1,2],repres:[0,2],datacollector:2,incomplet:2,choosedataverseforcreatestudi:5,singli:2,sequenti:2,nan:[1,2],invalid:2,servicenam:0,nat:2,nice:5,usert:4,gigabyt:[2,4],studyid:[0,2],elsewher:2,meaning:[1,2],posixct:[1,2],keywordvalu:2,msg00331:[],originofsourc:2,abdera:[],algorithm:[1,2],depth:[1,2],nasa:2,dot:2,fresh:4,scroll:2,adminstr:2,code:[],partial:[0,2],edg:2,scratch:5,maxperms:4,edu:[0,5,2,4],privat:[5,2,4],handleregistr:4,sensit:2,documen:0,dvndb:4,friendli:2,send:[2,4],macport:4,sens:[2,4],sent:2,deactiv:2,unzip:[2,4],disclos:2,distributornam:2,electron:2,volum:[5,4],implicitli:[1,2],nextwork:2,relev:2,tri:4,gender:2,button:2,geograph:[0,2],fewer:2,race:2,pleas:[0,5,2,4],smaller:2,natur:[0,2,4],pleae:2,click:[5,2,4],append:[1,2],compat:[2,4],index:[0,3,2,5,4],compar:[],cell:2,productiond:[0,2],whatev:4,perimet:2,poverti:2,chose:[5,2],bodi:2,let:2,networkadmin:[5,2],sinc:[5,1,2,4],great:4,survei:[0,2],convers:2,dvnextra:4,larger:2,dvnapp:4,depositorrequir:2,swordv2:[],typic:[5,2],chanc:4,firefox:2,holdingsuri:0,revok:2,appli:[2,4],app:[0,2,4],disciplin:2,studyfilesfrag:5,"boolean":[],regexp:2,home:[0,2,4],pgdownload:[],myvalu:2,acceptpackag:[],from:[],objectid:0,resultswithfacet:5,upgrad:[],next:[],websit:[0,2,4],few:2,usr:4,cran:4,simpler:[],servicedesc:0,tempsubsetfil:2,um06qkr:2,mismatch:2,actual:[1,2,4],confi:4,harvest:[],alik:4,alia:[2,4],datasrc:0,meet:2,fetch:[5,2],control:[],reveiw:2,tar:[2,4],process:[0,5,2,4],lock:[0,2],sudo:5,"1xxzy":0,proprietari:2,tab:[],hulmail:[],tarbal:2,onlin:[2,4],default_transaction_read_onli:4,gcc:2,sit:2,citationfil:2,dtwitter4j:4,subdirectori:2,instead:[5,1,2,4],sucess:2,await:[1,2],originalarch:2,redund:2,physic:4,essenti:2,seriou:[],counter:[1,2],arrow:2,element:[],issu:[5,1,2,4],allot:4,allow:[0,1,2,5,4],move:[5,2,4],microsystem:4,own:[0,1,2,5,4],comma:2,outer:2,chosen:2,byt:[],statusnotic:4,infrastructur:2,tsv:[],therefor:2,greater:2,filetxt:2,intvu4a:2,dai:2,auth:[2,4],devel:4,stdydscr:0,flash:2,facilit:[2,4],mediashelf:[],fingerprint:[5,2],front:2,nbinst:[],anyth:[0,2,4],edit:[],catvalu:2,pgpoolingdatasourc:4,astronom:2,februari:4,mode:[],dvninstal:[5,4],batch:2,querystr:0,francisco:[1,2],subset:[],consum:4,searchqueri:0,"static":2,our:[0,5,2,4],patch:4,malformedstreamexcept:[],special:[2,4],indexu:4,variabl:[1,2,4],matrix:2,servicearg:0,influenc:2,misidentifi:2,categori:2,suitabl:[5,2],rel:[2,4],lockss:[],hardwar:4,red:2,statist:[],insid:[2,4],workflow:[0,2,4],manipul:[2,4],undo:2,standalon:[2,4],scrnrid:2,rdata:[],afterward:2,complianc:2,dvnqueri:5,guarante:[1,2,4],could:2,latest:[5,2,4],mac:[5,4],timer:[2,4],keep:[5,1,2,4],length:[0,2,4],enforc:2,outsid:[2,4],pg_hba:[5,4],timezon:[1,2],gvimdiff:5,softwar:[2,4],suffix:4,echo:4,date:[0,1,2,5],owner:[2,4],technetwork:4,suffic:2,publickei:4,"long":[5,1,2,4],workbench:[],respond:2,sep:[1,2],mkdir:[],compliant:2,messag:[0,5,2,4],attach:[1,2],termin:[5,2],"final":[5,2,4],tortoisemerg:5,shell:4,gear:2,appdeploi:[],methodolog:2,rsa:5,jndi:4,accompani:2,nobodi:4,excludesupport:0,abstractd:2,structur:[2,4],charact:[],instanceroot:4,becom:[1,2,4],light:2,f10:2,f11:[],datapass:2,explicit:[1,2],clearli:4,robot:4,correspond:[2,4],sysadmin:2,corrupt:2,have:[],close:[5,2],need:[],turn:4,codebook:[0,2],min:2,mif:2,documenta:2,discret:2,which:[0,1,2,5,4],vertici:2,divers:2,singl:[0,2,4],fileid:[0,2],analysi:[],unless:[1,2,4],deploy:5,who:[5,2,4],oracl:[5,4],discov:2,eight:2,why:[5,2,4],gather:2,request:[0,2,4],uri:0,face:[2,4],inde:2,deni:[0,2],yum:4,brew:0,occasion:2,fact:[1,2,4],text:[1,2,4],bring:[2,4],anywai:5,varnam:2,redirect:[2,4],inlin:2,locat:[0,2,4],jar:[5,2,4],dispers:2,smallest:2,suppos:2,initdb:4,discontinu:5,hope:5,meant:[2,4],contribut:[5,2],notat:[0,1,2,4],regularli:2,msg00317:[],increas:[2,4],solr:5,organ:[],upper:2,stanford:2,stuff:2,she:4,actionstominimizeloss:2,contain:[],attribut:[0,1,2],cottagelab:[],view:[],conform:0,btw:[],legaci:2,frame:[1,2],knowledg:2,temporarili:2,multipart:[],dyadic:2,cold:0,gmail:2,closer:[2,4],allowlink:4,record:[0,2,4],pattern:[],boundari:2,state:[],"8fd223d":5,stata:[],censu:[0,2],progress:[0,2],neither:2,email:[2,4],kei:[5,2,4],handleprefix:4,entir:[2,4],addit:[0,1,2,4],restrctn:0,plugin:[],admin:[0,5,2,4],april:[],instanc:[1,2,4],caseqnti:2,initit:2,nbproject:[],guidelin:2,hyphen:2,arriv:2,chmod:4,walk:2,respect:4,rpm:4,mailto:2,quit:[1,2,4],ezid:4,addition:2,djava:4,compos:2,compon:[],besid:2,treat:[1,2],immedi:[2,4],bit:4,both:[0,1,2,5,4],subset_:2,decim:2,togeth:[2,4],subtitl:2,present:[1,2,4],replic:[2,4],multi:[2,4],plain:2,cursor:2,defin:[0,1,2,4],observ:[1,2],purchas:2,customiz:2,almost:2,demo:4,archiv:[0,2,4],incom:2,surprisingli:4,scienc:2,substant:2,welcom:2,parti:[2,4],began:2,cross:2,member:2,auto:5,nada:2,difficult:[1,2],http:[],hostnam:4,denot:2,upon:2,effect:[5,2],coffe:0,student:2,php:0,expand:2,referencedata:4,off:[5,2,4],center:[2,4],mention:4,builder:2,well:[2,4],command:[0,2,4],gcinterv:4,choos:[5,2,4],usual:2,newest:5,less:4,boot:4,obtain:[5,2,4],tcp:4,clinic:0,virtual:4,increasingli:[1,2],simple_studi:2,skill:4,simultan:2,demograph:2,abstracttext:2,book:0,warrant:[1,2],match:[5,2],gmt:[1,2],rememb:[5,2],crud:[],smithsonian:2,five:[],know:[5,2,4],press:[0,2],password:[0,5,2,4],recurs:5,mypag:2,institut:[0,2,4],resid:2,like:[0,1,2,5,4],lost:[],researchinstru:2,necessari:[2,4],servernam:4,xsd:[0,2],resiz:4,page:[5,3,2,4],crawler:2,dateofcollect:2,drop:[0,5,2,4],captur:2,twitter:[],linux:[2,4],collectionpolici:0,contin:2,"export":[],swordpoc:0,proper:[0,1,2],small:[2,4],seriesnam:2,librari:[],est:[1,2],admpriv:4,lead:[2,4],avoid:[1,2],octet:2,overlap:2,setti:2,nesstar:2,leav:[5,2,4],encourag:[0,5,2],investig:[5,2],globalid:[2,4],journal:[0,4],usag:[2,4],maven:5,host:[2,4],dissert:[],although:4,offset:[1,2],panel:2,about:[5,2,4],rare:2,socket:[5,4],column:[1,2],commons_log:[],ingest:[],lifecycl:[],fals:[0,2,4],discard:5,addendum:[],disabl:2,desper:4,seamlessli:2,automat:[5,2,4],dataset:[0,2],"_length":2,"2bxwoapwxzmlme1m3rg":[],stumptown:0,mere:2,merg:[],appl:5,"var":[2,4],"function":[5,2,4],otherdataapprais:2,north:2,brand:[],fileupload:[],baseurl:4,uninstal:[5,4],relmat:0,oauth:4,highest:2,bug:2,count:[5,1,2,4],depositor:2,succe:4,made:[5,1,2,4],temp:4,dmg:5,whether:[2,4],wish:[2,4],troubl:[],asynchron:2,fits_standard:2,below:[0,5,2,4],meta:2,limit:[],invest:2,otherwis:[1,2,4],problem:[5,2,4],dimensn:2,epel:4,evalu:0,dure:[5,2,4],graphml:2,probabl:[5,2,4],migtht:[],percent:2,detail:[0,5,2,4],oai:[],other:[],futur:[1,2,4],branch:[],scholarli:[],bc3:5,repeat:2,studyvers:2,stai:4,topicclassvocabulari:2,experienc:4,amp:0,portion:2,emerg:5,sincer:4,rep:2,accessservic:0},objtypes:{},objnames:{},filenames:["dataverse-api-main","dataverse-R-ingest","dataverse-user-main","index","dataverse-installer-main","dataverse-developer-main"],titles:["APIs Guide","Ingest of R (.RData) files","User Guide","Dataverse Network Guides","Installers Guide","DVN Developers Guide"],objects:{},titleterms:{lab:[],code:[0,2],identifi:4,ant:[],execut:3,blocker:4,session:4,ejb:4,permiss:2,file:[0,1,2,4],asadmin:4,find:2,xml:0,web:2,onli:[0,4],layout:2,fit:2,field:2,copi:5,configur:[5,4],readabl:[],written:[],common:2,add:[0,2],glassfish:[5,4],digit:4,jvm:4,factor:[1,2],mail:2,analyt:4,spec:[],applic:5,non:0,sourc:5,string:[1,2],format:[1,2],read:4,recaptcha:4,autom:[],repo:5,netbean:5,ssh:5,consid:2,studi:[0,2],requir:[1,2,4],name:2,term:2,document:0,metadataformatsavail:0,enabl:[2,4],administr:2,edit:2,list:[0,5,2],upload:2,integ:[1,2],server:5,"try":[],collect:2,api:[0,2],mode:4,datavers:[0,3,2],contain:4,quick:4,output:[],subnetwork:2,refer:[0,2],manag:2,view:2,subset:2,set:[5,2,4],replac:0,twitter:[2,4],creation:2,startup:4,keypair:5,video:[],result:2,download:[0,5,2],run:[5,4],charact:[1,2],servic:[0,4],librari:0,out:5,what:4,network:[0,3,2],compar:[1,2],databas:[0,5],section:2,http:4,publish:2,label:2,access:[0,2,4],delet:0,ddi:[0,2],version:[5,2],sword:0,stata:[1,2],"new":[5,1,2],crosswalk:0,merg:5,metadata:[0,2],core:0,qualifi:0,postgr:[],imagemagick:4,extend:2,state:2,zelig:2,gener:2,chang:5,style:2,step:2,base:2,javamail:4,repositori:5,rdata:[1,2],put:[],group:2,post:[],organ:2,card:2,deaccess:0,box:2,manual:[],search:2,postgresql:[5,4],xmllint:[],login:5,current:5,bot:4,ingest:[1,2],spss:[1,2],thing:2,befor:5,lucen:5,unabl:[],context:3,swordv2:0,commit:5,implement:[],backup:4,first:5,comment:2,via:2,prerequisit:4,redhat:4,overview:[0,1,2],por:2,number:5,releas:[0,5],"boolean":[1,2],ensur:[],instal:[5,4],guid:[0,3,2,5,4],open:5,select:[],duplic:5,avail:5,jdbc:4,from:5,zip:0,commun:[],support:[1,2],doubl:[1,2],system:4,been:0,next:2,compon:4,binari:[],master:5,valu:[1,2],statement:0,handl:4,cottag:[],interfac:2,reformat:[],preview:5,type:[1,2],individu:4,exampl:[0,5,4],etc:0,analysi:2,option:4,offer:[],python:0,checklist:[],brand:2,relat:[],protect:2,task:2,"export":2,appendix:[2,4],link:2,templat:[2,4],atom:0,alreadi:5,git:5,harvest:2,haven:5,statist:2,wide:[],account:[5,2],retriev:0,have:5,tab:2,restor:4,"default":2,dvn:[0,5,4],setup:4,plugin:[],tree:5,displai:0,dev:5,junit:5,project:5,url:0,intro:[],limit:[1,2],time:[5,1,2],error:5,downloadinfo:0,sampl:0,metadatasearch:0,control:2,sort:2,classif:2,featur:5,explor:2,creat:[0,5,2],dublin:0,share:0,site:[],indic:3,destin:5,tabular:2,openscholar:2,schedul:2,tag:5,increment:5,tabl:[3,2],need:4,curl:0,work:[],check:5,icefac:[],develop:5,googl:4,secur:4,titl:0,make:5,get:2,tip:5,"switch":5,oai:2,client:0,how:2,other:[1,2],role:2,build:5,branch:5,test:5,you:[5,4],csv:2,simpl:[],bundl:5,map:0,stat:[1,2],notif:2,resourc:[2,4],track:2,lockss:[2,4],clone:5,object:4,statu:[],metadatasearchfield:0,model:5,perl:[],catalog:0,user:2,vocabulari:2,deploi:[],data:[0,1,2],"class":5,guestbook:2,pull:5,util:2,github:5,don:5,java:[],directori:5,entri:0,author:[0,2],descript:2,visual:2,doe:4,inform:0,script:5,contact:2,environ:5,deposit:0,determin:0,rserv:4,push:5,element:0,remot:[5,4],order:2}})
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/domain.xml	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,477 @@
+<domain log-root="${com.sun.aas.instanceRoot}/logs" application-root="${com.sun.aas.instanceRoot}/applications" version="12">
+  <system-applications>
+    <application context-root="" location="${com.sun.aas.installRootURI}/lib/install/applications/__admingui" name="__admingui" directory-deployed="true" object-type="system-admin">
+      <module name="__admingui">
+        <engine sniffer="web"></engine>
+        <engine sniffer="security"></engine>
+      </module>
+    </application>
+  </system-applications>
+  <applications></applications>
+  <resources>
+    <jdbc-resource pool-name="__TimerPool" jndi-name="jdbc/__TimerPool" object-type="system-admin"></jdbc-resource>
+    <jdbc-resource pool-name="DerbyPool" jndi-name="jdbc/__default"></jdbc-resource>
+    <jdbc-connection-pool datasource-classname="org.apache.derby.jdbc.EmbeddedXADataSource" res-type="javax.sql.XADataSource" name="__TimerPool">
+      <property name="databaseName" value="${com.sun.aas.instanceRoot}/lib/databases/ejbtimer"></property>
+      <property name="connectionAttributes" value=";create=true"></property>
+    </jdbc-connection-pool>
+    <jdbc-connection-pool is-isolation-level-guaranteed="false" datasource-classname="org.apache.derby.jdbc.ClientDataSource" res-type="javax.sql.DataSource" name="DerbyPool">
+      <property name="PortNumber" value="1527"></property>
+      <property name="Password" value="APP"></property>
+      <property name="User" value="APP"></property>
+      <property name="serverName" value="localhost"></property>
+      <property name="DatabaseName" value="sun-appserv-samples"></property>
+      <property name="connectionAttributes" value=";create=true"></property>
+    </jdbc-connection-pool>
+    <jdbc-connection-pool datasource-classname="org.apache.derby.jdbc.ClientDataSource" res-type="javax.sql.DataSource" name="SamplePool">
+      <property name="URL" value="jdbc:derby://localhost:1527/sample"></property>
+      <property name="PortNumber" value="1527"></property>
+      <property name="Password" value="app"></property>
+      <property name="DatabaseName" value="sample"></property>
+      <property name="serverName" value="localhost"></property>
+      <property name="User" value="app"></property>
+    </jdbc-connection-pool>
+    <jdbc-resource pool-name="SamplePool" jndi-name="jdbc/sample"></jdbc-resource>
+    <mail-resource from="do-not-reply@jurzua-lenovo" host="localhost" description="" jndi-name="mail/notifyMailSession" user="dataversenotify"></mail-resource>
+    <jdbc-connection-pool driver-classname="" datasource-classname="org.postgresql.ds.PGPoolingDataSource" is-isolation-level-guaranteed="false" res-type="javax.sql.DataSource" description="" name="dvnDbPool">
+      <property name="ConnectionAttributes" value=";create=true"></property>
+      <property name="ServerName" value="localhost"/>
+      <property name="DatabaseName" value="test1"/>
+      <property name="PortNumber" value="5432"/>
+      <property name="User" value="postgres"/>
+      <property name="Password" value="221082"/>
+      <property name="MaxConnections" value="0"></property>
+      <property name="InitialConnections" value="0"></property>
+      <property name="Ssl" value="false"></property>
+      <property name="LoginTimeout" value="0"></property>
+      <property name="PrepareThreshold" value="0"></property>
+    </jdbc-connection-pool>
+    <jdbc-resource pool-name="dvnDbPool" description="" jndi-name="jdbc/VDCNetDS"></jdbc-resource>
+    <connector-connection-pool description="" name="jms/DSBQueueConnectionFactory" resource-adapter-name="jmsra" connection-definition-name="javax.jms.QueueConnectionFactory" transaction-support=""></connector-connection-pool>
+    <connector-resource pool-name="jms/DSBQueueConnectionFactory" jndi-name="jms/DSBQueueConnectionFactory"></connector-resource>
+    <connector-connection-pool description="" name="jms/IndexMessageFactory" resource-adapter-name="jmsra" connection-definition-name="javax.jms.QueueConnectionFactory" transaction-support=""></connector-connection-pool>
+    <connector-resource pool-name="jms/IndexMessageFactory" jndi-name="jms/IndexMessageFactory"></connector-resource>
+    <admin-object-resource res-adapter="jmsra" res-type="javax.jms.Queue" description="" jndi-name="jms/DSBIngest">
+      <property description="null" name="Name" value="DSBIngest"></property>
+    </admin-object-resource>
+    <admin-object-resource res-adapter="jmsra" res-type="javax.jms.Queue" description="" jndi-name="jms/IndexMessage">
+      <property description="null" name="Name" value="IndexMessage"></property>
+    </admin-object-resource>
+  </resources>
+  <servers>
+    <server name="server" config-ref="server-config">
+      <application-ref ref="__admingui" virtual-servers="__asadmin"></application-ref>
+      <resource-ref ref="jdbc/__TimerPool"></resource-ref>
+      <resource-ref ref="jdbc/__default"></resource-ref>
+      <resource-ref ref="jdbc/sample"></resource-ref>
+      <resource-ref ref="mail/notifyMailSession"></resource-ref>
+      <resource-ref ref="jdbc/VDCNetDS"></resource-ref>
+      <resource-ref ref="jms/DSBQueueConnectionFactory"></resource-ref>
+      <resource-ref ref="jms/IndexMessageFactory"></resource-ref>
+      <resource-ref ref="jms/DSBIngest"></resource-ref>
+      <resource-ref ref="jms/IndexMessage"></resource-ref>
+    </server>
+  </servers>
+  <nodes>
+    <node node-host="localhost" name="localhost-domain1" type="CONFIG" install-dir="${com.sun.aas.productRoot}"></node>
+  </nodes>
+  <configs>
+    <config name="server-config">
+      <http-service access-logging-enabled="true">
+        <access-log rotation-interval-in-minutes="15"></access-log>
+        <virtual-server id="server" network-listeners="http-listener-1,http-listener-2">
+          <property name="allowLinking" value="true"></property>
+        </virtual-server>
+        <virtual-server id="__asadmin" network-listeners="admin-listener"></virtual-server>
+      </http-service>
+      <iiop-service>
+        <orb use-thread-pool-ids="thread-pool-1"></orb>
+        <iiop-listener port="3700" id="orb-listener-1" address="0.0.0.0" lazy-init="true"></iiop-listener>
+        <iiop-listener port="3820" id="SSL" address="0.0.0.0" security-enabled="true">
+          <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" cert-nickname="s1as"></ssl>
+        </iiop-listener>
+        <iiop-listener port="3920" id="SSL_MUTUALAUTH" address="0.0.0.0" security-enabled="true">
+          <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" cert-nickname="s1as" client-auth-enabled="true"></ssl>
+        </iiop-listener>
+      </iiop-service>
+      <admin-service system-jmx-connector-name="system" type="das-and-server">
+        <jmx-connector port="8686" address="0.0.0.0" security-enabled="false" auth-realm-name="admin-realm" name="system"></jmx-connector>
+        <property name="adminConsoleContextRoot" value="/admin"></property>
+        <property name="adminConsoleDownloadLocation" value="${com.sun.aas.installRoot}/lib/install/applications/admingui.war"></property>
+        <property name="ipsRoot" value="${com.sun.aas.installRoot}/.."></property>
+        <das-config></das-config>
+      </admin-service>
+      <connector-service></connector-service>
+      <web-container>
+        <session-config>
+          <session-manager>
+            <manager-properties></manager-properties>
+            <store-properties></store-properties>
+          </session-manager>
+          <session-properties></session-properties>
+        </session-config>
+      </web-container>
+      <ejb-container session-store="${com.sun.aas.instanceRoot}/session-store">
+        <ejb-timer-service timer-datasource="jdbc/VDCNetDS"></ejb-timer-service>
+      </ejb-container>
+      <mdb-container></mdb-container>
+      <jms-service default-jms-host="default_JMS_host" type="EMBEDDED">
+        <jms-host host="localhost" name="default_JMS_host"></jms-host>
+      </jms-service>
+      <security-service>
+        <auth-realm classname="com.sun.enterprise.security.auth.realm.file.FileRealm" name="admin-realm">
+          <property name="file" value="${com.sun.aas.instanceRoot}/config/admin-keyfile"></property>
+          <property name="jaas-context" value="fileRealm"></property>
+        </auth-realm>
+        <auth-realm classname="com.sun.enterprise.security.auth.realm.file.FileRealm" name="file">
+          <property name="file" value="${com.sun.aas.instanceRoot}/config/keyfile"></property>
+          <property name="jaas-context" value="fileRealm"></property>
+        </auth-realm>
+        <auth-realm classname="com.sun.enterprise.security.auth.realm.certificate.CertificateRealm" name="certificate"></auth-realm>
+        <jacc-provider policy-provider="com.sun.enterprise.security.provider.PolicyWrapper" name="default" policy-configuration-factory-provider="com.sun.enterprise.security.provider.PolicyConfigurationFactoryImpl">
+          <property name="repository" value="${com.sun.aas.instanceRoot}/generated/policy"></property>
+        </jacc-provider>
+        <jacc-provider policy-provider="com.sun.enterprise.security.jacc.provider.SimplePolicyProvider" name="simple" policy-configuration-factory-provider="com.sun.enterprise.security.jacc.provider.SimplePolicyConfigurationFactory"></jacc-provider>
+        <audit-module classname="com.sun.enterprise.security.Audit" name="default">
+          <property name="auditOn" value="false"></property>
+        </audit-module>
+        <message-security-config auth-layer="SOAP">
+          <provider-config provider-type="client" provider-id="XWS_ClientProvider" class-name="com.sun.xml.wss.provider.ClientSecurityAuthModule">
+            <request-policy auth-source="content"></request-policy>
+            <response-policy auth-source="content"></response-policy>
+            <property name="encryption.key.alias" value="s1as"></property>
+            <property name="signature.key.alias" value="s1as"></property>
+            <property name="dynamic.username.password" value="false"></property>
+            <property name="debug" value="false"></property>
+          </provider-config>
+          <provider-config provider-type="client" provider-id="ClientProvider" class-name="com.sun.xml.wss.provider.ClientSecurityAuthModule">
+            <request-policy auth-source="content"></request-policy>
+            <response-policy auth-source="content"></response-policy>
+            <property name="encryption.key.alias" value="s1as"></property>
+            <property name="signature.key.alias" value="s1as"></property>
+            <property name="dynamic.username.password" value="false"></property>
+            <property name="debug" value="false"></property>
+            <property name="security.config" value="${com.sun.aas.instanceRoot}/config/wss-server-config-1.0.xml"></property>
+          </provider-config>
+          <provider-config provider-type="server" provider-id="XWS_ServerProvider" class-name="com.sun.xml.wss.provider.ServerSecurityAuthModule">
+            <request-policy auth-source="content"></request-policy>
+            <response-policy auth-source="content"></response-policy>
+            <property name="encryption.key.alias" value="s1as"></property>
+            <property name="signature.key.alias" value="s1as"></property>
+            <property name="debug" value="false"></property>
+          </provider-config>
+          <provider-config provider-type="server" provider-id="ServerProvider" class-name="com.sun.xml.wss.provider.ServerSecurityAuthModule">
+            <request-policy auth-source="content"></request-policy>
+            <response-policy auth-source="content"></response-policy>
+            <property name="encryption.key.alias" value="s1as"></property>
+            <property name="signature.key.alias" value="s1as"></property>
+            <property name="debug" value="false"></property>
+            <property name="security.config" value="${com.sun.aas.instanceRoot}/config/wss-server-config-1.0.xml"></property>
+          </provider-config>
+        </message-security-config>
+        <message-security-config auth-layer="HttpServlet">
+          <provider-config provider-type="server" provider-id="GFConsoleAuthModule" class-name="org.glassfish.admingui.common.security.AdminConsoleAuthModule">
+            <request-policy auth-source="sender"></request-policy>
+            <response-policy></response-policy>
+            <property name="restAuthURL" value="http://localhost:${ADMIN_LISTENER_PORT}/management/sessions"></property>
+            <property name="loginPage" value="/login.jsf"></property>
+            <property name="loginErrorPage" value="/loginError.jsf"></property>
+          </provider-config>
+        </message-security-config>
+        <property name="default-digest-algorithm" value="SHA-256"></property>
+      </security-service>
+      <transaction-service tx-log-dir="${com.sun.aas.instanceRoot}/logs"></transaction-service>
+      <java-config debug-options="-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=9009" debug-enabled="true" system-classpath="" classpath-suffix="">
+        <jvm-options>-XX:+DisableExplicitGC</jvm-options>
+        <jvm-options>-XX:+UseParallelOldGC</jvm-options>
+	<jvm-options>-Dcom.sun.grizzly.maxSelectors=32</jvm-options>
+        <jvm-options>-Djava.awt.headless=true</jvm-options>
+        <jvm-options>-XX:+UnlockDiagnosticVMOptions</jvm-options>
+        <jvm-options>-DANTLR_USE_DIRECT_CLASS_LOADING=true</jvm-options>
+        <jvm-options>-Dcom.sun.enterprise.security.httpsOutboundKeyAlias=s1as</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.disableConfigSave=false</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.poll=5000</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.log.level=2</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.bundles.startTransient=true</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.bundles.new.start=true</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.dir=${com.sun.aas.installRoot}/modules/autostart/</jvm-options>
+        <jvm-options>-Dosgi.shell.telnet.maxconn=1</jvm-options>
+        <jvm-options>-Dosgi.shell.telnet.ip=127.0.0.1</jvm-options>
+        <jvm-options>-Dosgi.shell.telnet.port=6666</jvm-options>
+        <jvm-options>-Dgosh.args=--nointeractive</jvm-options>
+        <jvm-options>-Dorg.glassfish.additionalOSGiBundlesToStart=org.apache.felix.shell,org.apache.felix.gogo.runtime,org.apache.felix.gogo.shell,org.apache.felix.gogo.command,org.apache.felix.fileinstall</jvm-options>
+        <jvm-options>-XX:NewRatio=2</jvm-options>
+        <jvm-options>-XX:MaxPermSize=384m</jvm-options>
+        <jvm-options>-Djava.security.policy=${com.sun.aas.instanceRoot}/config/server.policy</jvm-options>
+        <jvm-options>-Djava.security.auth.login.config=${com.sun.aas.instanceRoot}/config/login.conf</jvm-options>
+        <jvm-options>-Djavax.net.ssl.keyStore=${com.sun.aas.instanceRoot}/config/keystore.jks</jvm-options>
+        <jvm-options>-Djavax.net.ssl.trustStore=${com.sun.aas.instanceRoot}/config/cacerts.jks</jvm-options>
+        <jvm-options>-Dcom.sun.enterprise.config.config_environment_factory_class=com.sun.enterprise.config.serverbeans.AppserverConfigEnvironmentFactory</jvm-options>
+        <jvm-options>-Djdbc.drivers=org.apache.derby.jdbc.ClientDriver</jvm-options>
+        <jvm-options>-Djavax.management.builder.initial=com.sun.enterprise.v3.admin.AppServerMBeanServerBuilder</jvm-options>
+        <jvm-options>-Djava.endorsed.dirs=${com.sun.aas.installRoot}/modules/endorsed${path.separator}${com.sun.aas.installRoot}/lib/endorsed</jvm-options>
+        <jvm-options>-server</jvm-options>
+        <jvm-options>-Xmx2959m</jvm-options>
+        <jvm-options>-Djhove.conf.dir=${com.sun.aas.instanceRoot}/config</jvm-options>
+        <jvm-options>-Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext${path.separator}${com.sun.aas.derbyRoot}/lib</jvm-options>
+        <jvm-options>-Dcom.sun.enterprise.taglibs=appserv-jstl.jar,jsf-impl.jar</jvm-options>
+        <jvm-options>-Dcom.sun.enterprise.taglisteners=jsf-impl.jar</jvm-options>
+	<jvm-options>-Ddvn.inetAddress=jurzua-lenovo</jvm-options>
+        <jvm-options>-Ddvn.networkData.libPath=${com.sun.aas.instanceRoot}/config/networkData/lib</jvm-options>
+        <jvm-options>-Ddvn.index.location=${com.sun.aas.instanceRoot}/config</jvm-options>
+        <jvm-options>-Dvdc.dsb.host=localhost</jvm-options>
+        <jvm-options>-Dvdc.dsb.rserve.port=6311</jvm-options>
+        <jvm-options>-Dvdc.dsb.rserve.pwrd=rserve</jvm-options>
+        <jvm-options>-Dvdc.dsb.rserve.user=rserve</jvm-options>
+        <jvm-options>-Dvdc.import.log.dir=${com.sun.aas.instanceRoot}/logs/import</jvm-options>
+        <jvm-options>-Dvdc.export.log.dir=${com.sun.aas.instanceRoot}/logs/export</jvm-options>
+        <jvm-options>-Dvdc.temp.file.dir=${com.sun.aas.instanceRoot}/config/files/temp</jvm-options>
+        <jvm-options>-Dvdc.study.file.dir=${com.sun.aas.instanceRoot}/config/files/studies</jvm-options>
+      </java-config>
+      <network-config>
+        <protocols>
+          <protocol name="http-listener-1">
+            <http default-virtual-server="server" max-connections="250">
+              <file-cache></file-cache>
+            </http>
+          </protocol>
+          <protocol security-enabled="true" name="http-listener-2">
+            <http default-virtual-server="server" max-connections="250">
+              <file-cache></file-cache>
+            </http>
+            <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" ssl3-enabled="false" cert-nickname="s1as"></ssl>
+          </protocol>
+          <protocol name="admin-listener">
+            <http default-virtual-server="__asadmin" max-connections="250" encoded-slash-enabled="true">
+              <file-cache></file-cache>
+            </http>
+          </protocol>
+        </protocols>
+        <network-listeners>
+          <network-listener port="80" protocol="http-listener-1" transport="tcp" name="http-listener-1" thread-pool="http-thread-pool"></network-listener>
+          <network-listener port="443" protocol="http-listener-2" transport="tcp" name="http-listener-2" thread-pool="http-thread-pool"></network-listener>
+          <network-listener port="4848" protocol="admin-listener" transport="tcp" name="admin-listener" thread-pool="admin-thread-pool"></network-listener>
+        </network-listeners>
+        <transports>
+          <transport name="tcp" acceptor-threads="4"></transport>
+        </transports>
+      </network-config>
+      <thread-pools>
+        <thread-pool name="admin-thread-pool" max-thread-pool-size="50" max-queue-size="256"></thread-pool>
+        <thread-pool max-thread-pool-size="32" name="http-thread-pool"></thread-pool>
+        <thread-pool name="thread-pool-1" max-thread-pool-size="201"></thread-pool>
+      </thread-pools>
+      <monitoring-service>
+        <module-monitoring-levels></module-monitoring-levels>
+      </monitoring-service>
+      <group-management-service>
+        <failure-detection></failure-detection>
+      </group-management-service>
+    </config>
+    <config name="default-config">
+      <http-service>
+        <access-log></access-log>
+        <virtual-server id="server" network-listeners="http-listener-1, http-listener-2">
+          <property name="default-web-xml" value="${com.sun.aas.instanceRoot}/config/default-web.xml"></property>
+        </virtual-server>
+        <virtual-server id="__asadmin" network-listeners="admin-listener"></virtual-server>
+      </http-service>
+      <iiop-service>
+        <orb use-thread-pool-ids="thread-pool-1"></orb>
+        <iiop-listener port="${IIOP_LISTENER_PORT}" id="orb-listener-1" address="0.0.0.0"></iiop-listener>
+        <iiop-listener port="${IIOP_SSL_LISTENER_PORT}" id="SSL" address="0.0.0.0" security-enabled="true">
+          <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" cert-nickname="s1as"></ssl>
+        </iiop-listener>
+        <iiop-listener port="${IIOP_SSL_MUTUALAUTH_PORT}" id="SSL_MUTUALAUTH" address="0.0.0.0" security-enabled="true">
+          <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" cert-nickname="s1as" client-auth-enabled="true"></ssl>
+        </iiop-listener>
+      </iiop-service>
+      <admin-service system-jmx-connector-name="system">
+        <jmx-connector port="${JMX_SYSTEM_CONNECTOR_PORT}" address="0.0.0.0" security-enabled="false" auth-realm-name="admin-realm" name="system"></jmx-connector>
+        <property name="adminConsoleDownloadLocation" value="${com.sun.aas.installRoot}/lib/install/applications/admingui.war"></property>
+        <das-config></das-config>
+      </admin-service>
+      <web-container>
+        <session-config>
+          <session-manager>
+            <manager-properties></manager-properties>
+            <store-properties></store-properties>
+          </session-manager>
+          <session-properties></session-properties>
+        </session-config>
+      </web-container>
+      <ejb-container session-store="${com.sun.aas.instanceRoot}/session-store">
+        <ejb-timer-service></ejb-timer-service>
+      </ejb-container>
+      <mdb-container></mdb-container>
+      <jms-service addresslist-behavior="priority" default-jms-host="default_JMS_host" type="EMBEDDED">
+        <jms-host port="${JMS_PROVIDER_PORT}" host="localhost" name="default_JMS_host"></jms-host>
+      </jms-service>
+      <log-service log-rotation-limit-in-bytes="2000000" file="${com.sun.aas.instanceRoot}/logs/server.log">
+        <module-log-levels></module-log-levels>
+      </log-service>
+      <security-service>
+        <auth-realm classname="com.sun.enterprise.security.auth.realm.file.FileRealm" name="admin-realm">
+          <property name="file" value="${com.sun.aas.instanceRoot}/config/admin-keyfile"></property>
+          <property name="jaas-context" value="fileRealm"></property>
+        </auth-realm>
+        <auth-realm classname="com.sun.enterprise.security.auth.realm.file.FileRealm" name="file">
+          <property name="file" value="${com.sun.aas.instanceRoot}/config/keyfile"></property>
+          <property name="jaas-context" value="fileRealm"></property>
+        </auth-realm>
+        <auth-realm classname="com.sun.enterprise.security.auth.realm.certificate.CertificateRealm" name="certificate"></auth-realm>
+        <jacc-provider policy-provider="com.sun.enterprise.security.provider.PolicyWrapper" name="default" policy-configuration-factory-provider="com.sun.enterprise.security.provider.PolicyConfigurationFactoryImpl">
+          <property name="repository" value="${com.sun.aas.instanceRoot}/generated/policy"></property>
+        </jacc-provider>
+        <jacc-provider policy-provider="com.sun.enterprise.security.jacc.provider.SimplePolicyProvider" name="simple" policy-configuration-factory-provider="com.sun.enterprise.security.jacc.provider.SimplePolicyConfigurationFactory"></jacc-provider>
+        <audit-module classname="com.sun.enterprise.security.Audit" name="default">
+          <property name="auditOn" value="false"></property>
+        </audit-module>
+        <message-security-config auth-layer="SOAP">
+          <provider-config provider-type="client" provider-id="XWS_ClientProvider" class-name="com.sun.xml.wss.provider.ClientSecurityAuthModule">
+            <request-policy auth-source="content"></request-policy>
+            <response-policy auth-source="content"></response-policy>
+            <property name="encryption.key.alias" value="s1as"></property>
+            <property name="signature.key.alias" value="s1as"></property>
+            <property name="dynamic.username.password" value="false"></property>
+            <property name="debug" value="false"></property>
+          </provider-config>
+          <provider-config provider-type="client" provider-id="ClientProvider" class-name="com.sun.xml.wss.provider.ClientSecurityAuthModule">
+            <request-policy auth-source="content"></request-policy>
+            <response-policy auth-source="content"></response-policy>
+            <property name="encryption.key.alias" value="s1as"></property>
+            <property name="signature.key.alias" value="s1as"></property>
+            <property name="dynamic.username.password" value="false"></property>
+            <property name="debug" value="false"></property>
+            <property name="security.config" value="${com.sun.aas.instanceRoot}/config/wss-server-config-1.0.xml"></property>
+          </provider-config>
+          <provider-config provider-type="server" provider-id="XWS_ServerProvider" class-name="com.sun.xml.wss.provider.ServerSecurityAuthModule">
+            <request-policy auth-source="content"></request-policy>
+            <response-policy auth-source="content"></response-policy>
+            <property name="encryption.key.alias" value="s1as"></property>
+            <property name="signature.key.alias" value="s1as"></property>
+            <property name="debug" value="false"></property>
+          </provider-config>
+          <provider-config provider-type="server" provider-id="ServerProvider" class-name="com.sun.xml.wss.provider.ServerSecurityAuthModule">
+            <request-policy auth-source="content"></request-policy>
+            <response-policy auth-source="content"></response-policy>
+            <property name="encryption.key.alias" value="s1as"></property>
+            <property name="signature.key.alias" value="s1as"></property>
+            <property name="debug" value="false"></property>
+            <property name="security.config" value="${com.sun.aas.instanceRoot}/config/wss-server-config-1.0.xml"></property>
+          </provider-config>
+        </message-security-config>
+      </security-service>
+      <transaction-service tx-log-dir="${com.sun.aas.instanceRoot}/logs" automatic-recovery="true"></transaction-service>
+      <diagnostic-service></diagnostic-service>
+      <java-config debug-options="-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=${JAVA_DEBUGGER_PORT}" system-classpath="" classpath-suffix="">
+        <jvm-options>-XX:MaxPermSize=192m</jvm-options>
+        <jvm-options>-server</jvm-options>
+        <jvm-options>-Djava.awt.headless=true</jvm-options>
+        <jvm-options>-XX:+UnlockDiagnosticVMOptions</jvm-options>
+        <jvm-options>-Djava.endorsed.dirs=${com.sun.aas.installRoot}/modules/endorsed${path.separator}${com.sun.aas.installRoot}/lib/endorsed</jvm-options>
+        <jvm-options>-Djava.security.policy=${com.sun.aas.instanceRoot}/config/server.policy</jvm-options>
+        <jvm-options>-Djava.security.auth.login.config=${com.sun.aas.instanceRoot}/config/login.conf</jvm-options>
+        <jvm-options>-Dcom.sun.enterprise.security.httpsOutboundKeyAlias=s1as</jvm-options>
+        <jvm-options>-Djavax.net.ssl.keyStore=${com.sun.aas.instanceRoot}/config/keystore.jks</jvm-options>
+        <jvm-options>-Djavax.net.ssl.trustStore=${com.sun.aas.instanceRoot}/config/cacerts.jks</jvm-options>
+        <jvm-options>-Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext</jvm-options>
+        <jvm-options>-Djdbc.drivers=org.apache.derby.jdbc.ClientDriver</jvm-options>
+        <jvm-options>-DANTLR_USE_DIRECT_CLASS_LOADING=true</jvm-options>
+        <jvm-options>-Dcom.sun.enterprise.config.config_environment_factory_class=com.sun.enterprise.config.serverbeans.AppserverConfigEnvironmentFactory</jvm-options>
+        <jvm-options>-XX:NewRatio=2</jvm-options>
+        <jvm-options>-Xmx2959m</jvm-options>
+        <jvm-options>-Dorg.glassfish.additionalOSGiBundlesToStart=org.apache.felix.shell,org.apache.felix.gogo.runtime,org.apache.felix.gogo.shell,org.apache.felix.gogo.command,org.apache.felix.fileinstall</jvm-options>
+        <jvm-options>-Dosgi.shell.telnet.port=${OSGI_SHELL_TELNET_PORT}</jvm-options>
+        <jvm-options>-Dosgi.shell.telnet.maxconn=1</jvm-options>
+        <jvm-options>-Dosgi.shell.telnet.ip=127.0.0.1</jvm-options>
+        <jvm-options>-Dgosh.args=--noshutdown -c noop=true</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.dir=${com.sun.aas.installRoot}/modules/autostart/</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.poll=5000</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.log.level=3</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.bundles.new.start=true</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.bundles.startTransient=true</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.disableConfigSave=false</jvm-options>
+      </java-config>
+      <availability-service>
+        <web-container-availability></web-container-availability>
+        <ejb-container-availability sfsb-store-pool-name="jdbc/hastore"></ejb-container-availability>
+        <jms-availability></jms-availability>
+      </availability-service>
+      <network-config>
+        <protocols>
+          <protocol name="http-listener-1">
+            <http default-virtual-server="server">
+              <file-cache></file-cache>
+            </http>
+          </protocol>
+          <protocol security-enabled="true" name="http-listener-2">
+            <http default-virtual-server="server">
+              <file-cache></file-cache>
+            </http>
+            <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" ssl3-enabled="false" cert-nickname="s1as"></ssl>
+          </protocol>
+          <protocol name="admin-listener">
+            <http default-virtual-server="__asadmin" max-connections="250">
+              <file-cache></file-cache>
+            </http>
+          </protocol>
+          <protocol security-enabled="true" name="sec-admin-listener">
+            <http default-virtual-server="__asadmin" encoded-slash-enabled="true">
+              <file-cache></file-cache>
+            </http>
+            <ssl client-auth="want" classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" cert-nickname="glassfish-instance"></ssl>
+          </protocol>
+          <protocol name="admin-http-redirect">
+            <http-redirect secure="true"></http-redirect>
+          </protocol>
+          <protocol name="pu-protocol">
+            <port-unification>
+              <protocol-finder protocol="sec-admin-listener" name="http-finder" classname="com.sun.grizzly.config.HttpProtocolFinder"></protocol-finder>
+              <protocol-finder protocol="admin-http-redirect" name="admin-http-redirect" classname="com.sun.grizzly.config.HttpProtocolFinder"></protocol-finder>
+            </port-unification>
+          </protocol>
+        </protocols>
+
+        <network-listeners>
+          <network-listener port="${HTTP_LISTENER_PORT}" protocol="http-listener-1" transport="tcp" name="http-listener-1" thread-pool="http-thread-pool"></network-listener>
+          <network-listener port="${HTTP_SSL_LISTENER_PORT}" protocol="http-listener-2" transport="tcp" name="http-listener-2" thread-pool="http-thread-pool"></network-listener>
+          <network-listener port="${ASADMIN_LISTENER_PORT}" protocol="admin-listener" transport="tcp" name="admin-listener" thread-pool="admin-thread-pool"></network-listener>
+        </network-listeners>
+        <transports>
+          <transport name="tcp"></transport>
+        </transports>
+      </network-config>
+      <thread-pools>
+        <thread-pool name="http-thread-pool"></thread-pool>
+        <thread-pool max-thread-pool-size="200" name="thread-pool-1"></thread-pool>
+        <thread-pool name="admin-thread-pool" max-thread-pool-size="50" max-queue-size="256"></thread-pool>
+      </thread-pools>
+      <group-management-service>
+        <failure-detection></failure-detection>
+      </group-management-service>
+      <management-rules></management-rules>
+      <system-property name="ASADMIN_LISTENER_PORT" value="24848"></system-property>
+      <system-property name="HTTP_LISTENER_PORT" value="28080"></system-property>
+      <system-property name="HTTP_SSL_LISTENER_PORT" value="28181"></system-property>
+      <system-property name="JMS_PROVIDER_PORT" value="27676"></system-property>
+      <system-property name="IIOP_LISTENER_PORT" value="23700"></system-property>
+      <system-property name="IIOP_SSL_LISTENER_PORT" value="23820"></system-property>
+      <system-property name="IIOP_SSL_MUTUALAUTH_PORT" value="23920"></system-property>
+      <system-property name="JMX_SYSTEM_CONNECTOR_PORT" value="28686"></system-property>
+      <system-property name="OSGI_SHELL_TELNET_PORT" value="26666"></system-property>
+      <system-property name="JAVA_DEBUGGER_PORT" value="29009"></system-property>
+      <monitoring-service>
+        <module-monitoring-levels></module-monitoring-levels>
+      </monitoring-service>
+      <connector-service></connector-service>
+    </config>
+  </configs>
+  <property name="administrative.domain.name" value="domain1"></property>
+
+  <secure-admin special-admin-indicator="%GF_SPEC_INDICATOR%">
+      <secure-admin-principal dn="CN=jurzua-lenovo,OU=GlassFish,O=Oracle Corporation,L=Santa Clara,ST=California,C=US"></secure-admin-principal>
+      <secure-admin-principal dn="CN=jurzua-lenovo-instance,OU=GlassFish,O=Oracle Corporation,L=Santa Clara,ST=California,C=US"></secure-admin-principal>
+  </secure-admin>
+
+</domain>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/domain.xml.TEMPLATE	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,477 @@
+<domain log-root="${com.sun.aas.instanceRoot}/logs" application-root="${com.sun.aas.instanceRoot}/applications" version="12">
+  <system-applications>
+    <application context-root="" location="${com.sun.aas.installRootURI}/lib/install/applications/__admingui" name="__admingui" directory-deployed="true" object-type="system-admin">
+      <module name="__admingui">
+        <engine sniffer="web"></engine>
+        <engine sniffer="security"></engine>
+      </module>
+    </application>
+  </system-applications>
+  <applications></applications>
+  <resources>
+    <jdbc-resource pool-name="__TimerPool" jndi-name="jdbc/__TimerPool" object-type="system-admin"></jdbc-resource>
+    <jdbc-resource pool-name="DerbyPool" jndi-name="jdbc/__default"></jdbc-resource>
+    <jdbc-connection-pool datasource-classname="org.apache.derby.jdbc.EmbeddedXADataSource" res-type="javax.sql.XADataSource" name="__TimerPool">
+      <property name="databaseName" value="${com.sun.aas.instanceRoot}/lib/databases/ejbtimer"></property>
+      <property name="connectionAttributes" value=";create=true"></property>
+    </jdbc-connection-pool>
+    <jdbc-connection-pool is-isolation-level-guaranteed="false" datasource-classname="org.apache.derby.jdbc.ClientDataSource" res-type="javax.sql.DataSource" name="DerbyPool">
+      <property name="PortNumber" value="1527"></property>
+      <property name="Password" value="APP"></property>
+      <property name="User" value="APP"></property>
+      <property name="serverName" value="localhost"></property>
+      <property name="DatabaseName" value="sun-appserv-samples"></property>
+      <property name="connectionAttributes" value=";create=true"></property>
+    </jdbc-connection-pool>
+    <jdbc-connection-pool datasource-classname="org.apache.derby.jdbc.ClientDataSource" res-type="javax.sql.DataSource" name="SamplePool">
+      <property name="URL" value="jdbc:derby://localhost:1527/sample"></property>
+      <property name="PortNumber" value="1527"></property>
+      <property name="Password" value="app"></property>
+      <property name="DatabaseName" value="sample"></property>
+      <property name="serverName" value="localhost"></property>
+      <property name="User" value="app"></property>
+    </jdbc-connection-pool>
+    <jdbc-resource pool-name="SamplePool" jndi-name="jdbc/sample"></jdbc-resource>
+    <mail-resource from="do-not-reply@%HOST_DNS_ADDRESS%" host="%MAIL_SERVER%" description="" jndi-name="mail/notifyMailSession" user="dataversenotify"></mail-resource>
+    <jdbc-connection-pool driver-classname="" datasource-classname="org.postgresql.ds.PGPoolingDataSource" is-isolation-level-guaranteed="false" res-type="javax.sql.DataSource" description="" name="dvnDbPool">
+      <property name="ConnectionAttributes" value=";create=true"></property>
+      <property name="ServerName" value="%POSTGRES_SERVER%"/>
+      <property name="DatabaseName" value="%POSTGRES_DATABASE%"/>
+      <property name="PortNumber" value="%POSTGRES_PORT%"/>
+      <property name="User" value="%POSTGRES_USER%"/>
+      <property name="Password" value="%POSTGRES_PASSWORD%"/>
+      <property name="MaxConnections" value="0"></property>
+      <property name="InitialConnections" value="0"></property>
+      <property name="Ssl" value="false"></property>
+      <property name="LoginTimeout" value="0"></property>
+      <property name="PrepareThreshold" value="0"></property>
+    </jdbc-connection-pool>
+    <jdbc-resource pool-name="dvnDbPool" description="" jndi-name="jdbc/VDCNetDS"></jdbc-resource>
+    <connector-connection-pool description="" name="jms/DSBQueueConnectionFactory" resource-adapter-name="jmsra" connection-definition-name="javax.jms.QueueConnectionFactory" transaction-support=""></connector-connection-pool>
+    <connector-resource pool-name="jms/DSBQueueConnectionFactory" jndi-name="jms/DSBQueueConnectionFactory"></connector-resource>
+    <connector-connection-pool description="" name="jms/IndexMessageFactory" resource-adapter-name="jmsra" connection-definition-name="javax.jms.QueueConnectionFactory" transaction-support=""></connector-connection-pool>
+    <connector-resource pool-name="jms/IndexMessageFactory" jndi-name="jms/IndexMessageFactory"></connector-resource>
+    <admin-object-resource res-adapter="jmsra" res-type="javax.jms.Queue" description="" jndi-name="jms/DSBIngest">
+      <property description="null" name="Name" value="DSBIngest"></property>
+    </admin-object-resource>
+    <admin-object-resource res-adapter="jmsra" res-type="javax.jms.Queue" description="" jndi-name="jms/IndexMessage">
+      <property description="null" name="Name" value="IndexMessage"></property>
+    </admin-object-resource>
+  </resources>
+  <servers>
+    <server name="server" config-ref="server-config">
+      <application-ref ref="__admingui" virtual-servers="__asadmin"></application-ref>
+      <resource-ref ref="jdbc/__TimerPool"></resource-ref>
+      <resource-ref ref="jdbc/__default"></resource-ref>
+      <resource-ref ref="jdbc/sample"></resource-ref>
+      <resource-ref ref="mail/notifyMailSession"></resource-ref>
+      <resource-ref ref="jdbc/VDCNetDS"></resource-ref>
+      <resource-ref ref="jms/DSBQueueConnectionFactory"></resource-ref>
+      <resource-ref ref="jms/IndexMessageFactory"></resource-ref>
+      <resource-ref ref="jms/DSBIngest"></resource-ref>
+      <resource-ref ref="jms/IndexMessage"></resource-ref>
+    </server>
+  </servers>
+  <nodes>
+    <node node-host="localhost" name="localhost-domain1" type="CONFIG" install-dir="${com.sun.aas.productRoot}"></node>
+  </nodes>
+  <configs>
+    <config name="server-config">
+      <http-service access-logging-enabled="true">
+        <access-log rotation-interval-in-minutes="15"></access-log>
+        <virtual-server id="server" network-listeners="http-listener-1,http-listener-2">
+          <property name="allowLinking" value="true"></property>
+        </virtual-server>
+        <virtual-server id="__asadmin" network-listeners="admin-listener"></virtual-server>
+      </http-service>
+      <iiop-service>
+        <orb use-thread-pool-ids="thread-pool-1"></orb>
+        <iiop-listener port="3700" id="orb-listener-1" address="0.0.0.0" lazy-init="true"></iiop-listener>
+        <iiop-listener port="3820" id="SSL" address="0.0.0.0" security-enabled="true">
+          <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" cert-nickname="s1as"></ssl>
+        </iiop-listener>
+        <iiop-listener port="3920" id="SSL_MUTUALAUTH" address="0.0.0.0" security-enabled="true">
+          <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" cert-nickname="s1as" client-auth-enabled="true"></ssl>
+        </iiop-listener>
+      </iiop-service>
+      <admin-service system-jmx-connector-name="system" type="das-and-server">
+        <jmx-connector port="8686" address="0.0.0.0" security-enabled="false" auth-realm-name="admin-realm" name="system"></jmx-connector>
+        <property name="adminConsoleContextRoot" value="/admin"></property>
+        <property name="adminConsoleDownloadLocation" value="${com.sun.aas.installRoot}/lib/install/applications/admingui.war"></property>
+        <property name="ipsRoot" value="${com.sun.aas.installRoot}/.."></property>
+        <das-config></das-config>
+      </admin-service>
+      <connector-service></connector-service>
+      <web-container>
+        <session-config>
+          <session-manager>
+            <manager-properties></manager-properties>
+            <store-properties></store-properties>
+          </session-manager>
+          <session-properties></session-properties>
+        </session-config>
+      </web-container>
+      <ejb-container session-store="${com.sun.aas.instanceRoot}/session-store">
+        <ejb-timer-service timer-datasource="jdbc/VDCNetDS"></ejb-timer-service>
+      </ejb-container>
+      <mdb-container></mdb-container>
+      <jms-service default-jms-host="default_JMS_host" type="EMBEDDED">
+        <jms-host host="localhost" name="default_JMS_host"></jms-host>
+      </jms-service>
+      <security-service>
+        <auth-realm classname="com.sun.enterprise.security.auth.realm.file.FileRealm" name="admin-realm">
+          <property name="file" value="${com.sun.aas.instanceRoot}/config/admin-keyfile"></property>
+          <property name="jaas-context" value="fileRealm"></property>
+        </auth-realm>
+        <auth-realm classname="com.sun.enterprise.security.auth.realm.file.FileRealm" name="file">
+          <property name="file" value="${com.sun.aas.instanceRoot}/config/keyfile"></property>
+          <property name="jaas-context" value="fileRealm"></property>
+        </auth-realm>
+        <auth-realm classname="com.sun.enterprise.security.auth.realm.certificate.CertificateRealm" name="certificate"></auth-realm>
+        <jacc-provider policy-provider="com.sun.enterprise.security.provider.PolicyWrapper" name="default" policy-configuration-factory-provider="com.sun.enterprise.security.provider.PolicyConfigurationFactoryImpl">
+          <property name="repository" value="${com.sun.aas.instanceRoot}/generated/policy"></property>
+        </jacc-provider>
+        <jacc-provider policy-provider="com.sun.enterprise.security.jacc.provider.SimplePolicyProvider" name="simple" policy-configuration-factory-provider="com.sun.enterprise.security.jacc.provider.SimplePolicyConfigurationFactory"></jacc-provider>
+        <audit-module classname="com.sun.enterprise.security.Audit" name="default">
+          <property name="auditOn" value="false"></property>
+        </audit-module>
+        <message-security-config auth-layer="SOAP">
+          <provider-config provider-type="client" provider-id="XWS_ClientProvider" class-name="com.sun.xml.wss.provider.ClientSecurityAuthModule">
+            <request-policy auth-source="content"></request-policy>
+            <response-policy auth-source="content"></response-policy>
+            <property name="encryption.key.alias" value="s1as"></property>
+            <property name="signature.key.alias" value="s1as"></property>
+            <property name="dynamic.username.password" value="false"></property>
+            <property name="debug" value="false"></property>
+          </provider-config>
+          <provider-config provider-type="client" provider-id="ClientProvider" class-name="com.sun.xml.wss.provider.ClientSecurityAuthModule">
+            <request-policy auth-source="content"></request-policy>
+            <response-policy auth-source="content"></response-policy>
+            <property name="encryption.key.alias" value="s1as"></property>
+            <property name="signature.key.alias" value="s1as"></property>
+            <property name="dynamic.username.password" value="false"></property>
+            <property name="debug" value="false"></property>
+            <property name="security.config" value="${com.sun.aas.instanceRoot}/config/wss-server-config-1.0.xml"></property>
+          </provider-config>
+          <provider-config provider-type="server" provider-id="XWS_ServerProvider" class-name="com.sun.xml.wss.provider.ServerSecurityAuthModule">
+            <request-policy auth-source="content"></request-policy>
+            <response-policy auth-source="content"></response-policy>
+            <property name="encryption.key.alias" value="s1as"></property>
+            <property name="signature.key.alias" value="s1as"></property>
+            <property name="debug" value="false"></property>
+          </provider-config>
+          <provider-config provider-type="server" provider-id="ServerProvider" class-name="com.sun.xml.wss.provider.ServerSecurityAuthModule">
+            <request-policy auth-source="content"></request-policy>
+            <response-policy auth-source="content"></response-policy>
+            <property name="encryption.key.alias" value="s1as"></property>
+            <property name="signature.key.alias" value="s1as"></property>
+            <property name="debug" value="false"></property>
+            <property name="security.config" value="${com.sun.aas.instanceRoot}/config/wss-server-config-1.0.xml"></property>
+          </provider-config>
+        </message-security-config>
+        <message-security-config auth-layer="HttpServlet">
+          <provider-config provider-type="server" provider-id="GFConsoleAuthModule" class-name="org.glassfish.admingui.common.security.AdminConsoleAuthModule">
+            <request-policy auth-source="sender"></request-policy>
+            <response-policy></response-policy>
+            <property name="restAuthURL" value="http://localhost:${ADMIN_LISTENER_PORT}/management/sessions"></property>
+            <property name="loginPage" value="/login.jsf"></property>
+            <property name="loginErrorPage" value="/loginError.jsf"></property>
+          </provider-config>
+        </message-security-config>
+        <property name="default-digest-algorithm" value="SHA-256"></property>
+      </security-service>
+      <transaction-service tx-log-dir="${com.sun.aas.instanceRoot}/logs"></transaction-service>
+      <java-config debug-options="-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=9009" debug-enabled="true" system-classpath="" classpath-suffix="">
+        <jvm-options>-XX:+DisableExplicitGC</jvm-options>
+        <jvm-options>-XX:+UseParallelOldGC</jvm-options>
+	<jvm-options>-Dcom.sun.grizzly.maxSelectors=32</jvm-options>
+        <jvm-options>-Djava.awt.headless=true</jvm-options>
+        <jvm-options>-XX:+UnlockDiagnosticVMOptions</jvm-options>
+        <jvm-options>-DANTLR_USE_DIRECT_CLASS_LOADING=true</jvm-options>
+        <jvm-options>-Dcom.sun.enterprise.security.httpsOutboundKeyAlias=s1as</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.disableConfigSave=false</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.poll=5000</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.log.level=2</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.bundles.startTransient=true</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.bundles.new.start=true</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.dir=${com.sun.aas.installRoot}/modules/autostart/</jvm-options>
+        <jvm-options>-Dosgi.shell.telnet.maxconn=1</jvm-options>
+        <jvm-options>-Dosgi.shell.telnet.ip=127.0.0.1</jvm-options>
+        <jvm-options>-Dosgi.shell.telnet.port=6666</jvm-options>
+        <jvm-options>-Dgosh.args=--nointeractive</jvm-options>
+        <jvm-options>-Dorg.glassfish.additionalOSGiBundlesToStart=org.apache.felix.shell,org.apache.felix.gogo.runtime,org.apache.felix.gogo.shell,org.apache.felix.gogo.command,org.apache.felix.fileinstall</jvm-options>
+        <jvm-options>-XX:NewRatio=2</jvm-options>
+        <jvm-options>-XX:MaxPermSize=384m</jvm-options>
+        <jvm-options>-Djava.security.policy=${com.sun.aas.instanceRoot}/config/server.policy</jvm-options>
+        <jvm-options>-Djava.security.auth.login.config=${com.sun.aas.instanceRoot}/config/login.conf</jvm-options>
+        <jvm-options>-Djavax.net.ssl.keyStore=${com.sun.aas.instanceRoot}/config/keystore.jks</jvm-options>
+        <jvm-options>-Djavax.net.ssl.trustStore=${com.sun.aas.instanceRoot}/config/cacerts.jks</jvm-options>
+        <jvm-options>-Dcom.sun.enterprise.config.config_environment_factory_class=com.sun.enterprise.config.serverbeans.AppserverConfigEnvironmentFactory</jvm-options>
+        <jvm-options>-Djdbc.drivers=org.apache.derby.jdbc.ClientDriver</jvm-options>
+        <jvm-options>-Djavax.management.builder.initial=com.sun.enterprise.v3.admin.AppServerMBeanServerBuilder</jvm-options>
+        <jvm-options>-Djava.endorsed.dirs=${com.sun.aas.installRoot}/modules/endorsed${path.separator}${com.sun.aas.installRoot}/lib/endorsed</jvm-options>
+        <jvm-options>-server</jvm-options>
+        <jvm-options>-Xmx%DEF_MEM_SIZE%</jvm-options>
+        <jvm-options>-Djhove.conf.dir=${com.sun.aas.instanceRoot}/config</jvm-options>
+        <jvm-options>-Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext${path.separator}${com.sun.aas.derbyRoot}/lib</jvm-options>
+        <jvm-options>-Dcom.sun.enterprise.taglibs=appserv-jstl.jar,jsf-impl.jar</jvm-options>
+        <jvm-options>-Dcom.sun.enterprise.taglisteners=jsf-impl.jar</jvm-options>
+	<jvm-options>-Ddvn.inetAddress=%HOST_DNS_ADDRESS%</jvm-options>
+        <jvm-options>-Ddvn.networkData.libPath=${com.sun.aas.instanceRoot}/config/networkData/lib</jvm-options>
+        <jvm-options>-Ddvn.index.location=${com.sun.aas.instanceRoot}/config</jvm-options>
+        <jvm-options>-Dvdc.dsb.host=%RSERVE_HOST%</jvm-options>
+        <jvm-options>-Dvdc.dsb.rserve.port=%RSERVE_PORT%</jvm-options>
+        <jvm-options>-Dvdc.dsb.rserve.pwrd=%RSERVE_PASSWORD%</jvm-options>
+        <jvm-options>-Dvdc.dsb.rserve.user=%RSERVE_USER%</jvm-options>
+        <jvm-options>-Dvdc.import.log.dir=${com.sun.aas.instanceRoot}/logs/import</jvm-options>
+        <jvm-options>-Dvdc.export.log.dir=${com.sun.aas.instanceRoot}/logs/export</jvm-options>
+        <jvm-options>-Dvdc.temp.file.dir=${com.sun.aas.instanceRoot}/config/files/temp</jvm-options>
+        <jvm-options>-Dvdc.study.file.dir=${com.sun.aas.instanceRoot}/config/files/studies</jvm-options>
+      </java-config>
+      <network-config>
+        <protocols>
+          <protocol name="http-listener-1">
+            <http default-virtual-server="server" max-connections="250">
+              <file-cache></file-cache>
+            </http>
+          </protocol>
+          <protocol security-enabled="true" name="http-listener-2">
+            <http default-virtual-server="server" max-connections="250">
+              <file-cache></file-cache>
+            </http>
+            <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" ssl3-enabled="false" cert-nickname="s1as"></ssl>
+          </protocol>
+          <protocol name="admin-listener">
+            <http default-virtual-server="__asadmin" max-connections="250" encoded-slash-enabled="true">
+              <file-cache></file-cache>
+            </http>
+          </protocol>
+        </protocols>
+        <network-listeners>
+          <network-listener port="80" protocol="http-listener-1" transport="tcp" name="http-listener-1" thread-pool="http-thread-pool"></network-listener>
+          <network-listener port="443" protocol="http-listener-2" transport="tcp" name="http-listener-2" thread-pool="http-thread-pool"></network-listener>
+          <network-listener port="4848" protocol="admin-listener" transport="tcp" name="admin-listener" thread-pool="admin-thread-pool"></network-listener>
+        </network-listeners>
+        <transports>
+          <transport name="tcp" acceptor-threads="4"></transport>
+        </transports>
+      </network-config>
+      <thread-pools>
+        <thread-pool name="admin-thread-pool" max-thread-pool-size="50" max-queue-size="256"></thread-pool>
+        <thread-pool max-thread-pool-size="32" name="http-thread-pool"></thread-pool>
+        <thread-pool name="thread-pool-1" max-thread-pool-size="201"></thread-pool>
+      </thread-pools>
+      <monitoring-service>
+        <module-monitoring-levels></module-monitoring-levels>
+      </monitoring-service>
+      <group-management-service>
+        <failure-detection></failure-detection>
+      </group-management-service>
+    </config>
+    <config name="default-config">
+      <http-service>
+        <access-log></access-log>
+        <virtual-server id="server" network-listeners="http-listener-1, http-listener-2">
+          <property name="default-web-xml" value="${com.sun.aas.instanceRoot}/config/default-web.xml"></property>
+        </virtual-server>
+        <virtual-server id="__asadmin" network-listeners="admin-listener"></virtual-server>
+      </http-service>
+      <iiop-service>
+        <orb use-thread-pool-ids="thread-pool-1"></orb>
+        <iiop-listener port="${IIOP_LISTENER_PORT}" id="orb-listener-1" address="0.0.0.0"></iiop-listener>
+        <iiop-listener port="${IIOP_SSL_LISTENER_PORT}" id="SSL" address="0.0.0.0" security-enabled="true">
+          <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" cert-nickname="s1as"></ssl>
+        </iiop-listener>
+        <iiop-listener port="${IIOP_SSL_MUTUALAUTH_PORT}" id="SSL_MUTUALAUTH" address="0.0.0.0" security-enabled="true">
+          <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" cert-nickname="s1as" client-auth-enabled="true"></ssl>
+        </iiop-listener>
+      </iiop-service>
+      <admin-service system-jmx-connector-name="system">
+        <jmx-connector port="${JMX_SYSTEM_CONNECTOR_PORT}" address="0.0.0.0" security-enabled="false" auth-realm-name="admin-realm" name="system"></jmx-connector>
+        <property name="adminConsoleDownloadLocation" value="${com.sun.aas.installRoot}/lib/install/applications/admingui.war"></property>
+        <das-config></das-config>
+      </admin-service>
+      <web-container>
+        <session-config>
+          <session-manager>
+            <manager-properties></manager-properties>
+            <store-properties></store-properties>
+          </session-manager>
+          <session-properties></session-properties>
+        </session-config>
+      </web-container>
+      <ejb-container session-store="${com.sun.aas.instanceRoot}/session-store">
+        <ejb-timer-service></ejb-timer-service>
+      </ejb-container>
+      <mdb-container></mdb-container>
+      <jms-service addresslist-behavior="priority" default-jms-host="default_JMS_host" type="EMBEDDED">
+        <jms-host port="${JMS_PROVIDER_PORT}" host="localhost" name="default_JMS_host"></jms-host>
+      </jms-service>
+      <log-service log-rotation-limit-in-bytes="2000000" file="${com.sun.aas.instanceRoot}/logs/server.log">
+        <module-log-levels></module-log-levels>
+      </log-service>
+      <security-service>
+        <auth-realm classname="com.sun.enterprise.security.auth.realm.file.FileRealm" name="admin-realm">
+          <property name="file" value="${com.sun.aas.instanceRoot}/config/admin-keyfile"></property>
+          <property name="jaas-context" value="fileRealm"></property>
+        </auth-realm>
+        <auth-realm classname="com.sun.enterprise.security.auth.realm.file.FileRealm" name="file">
+          <property name="file" value="${com.sun.aas.instanceRoot}/config/keyfile"></property>
+          <property name="jaas-context" value="fileRealm"></property>
+        </auth-realm>
+        <auth-realm classname="com.sun.enterprise.security.auth.realm.certificate.CertificateRealm" name="certificate"></auth-realm>
+        <jacc-provider policy-provider="com.sun.enterprise.security.provider.PolicyWrapper" name="default" policy-configuration-factory-provider="com.sun.enterprise.security.provider.PolicyConfigurationFactoryImpl">
+          <property name="repository" value="${com.sun.aas.instanceRoot}/generated/policy"></property>
+        </jacc-provider>
+        <jacc-provider policy-provider="com.sun.enterprise.security.jacc.provider.SimplePolicyProvider" name="simple" policy-configuration-factory-provider="com.sun.enterprise.security.jacc.provider.SimplePolicyConfigurationFactory"></jacc-provider>
+        <audit-module classname="com.sun.enterprise.security.Audit" name="default">
+          <property name="auditOn" value="false"></property>
+        </audit-module>
+        <message-security-config auth-layer="SOAP">
+          <provider-config provider-type="client" provider-id="XWS_ClientProvider" class-name="com.sun.xml.wss.provider.ClientSecurityAuthModule">
+            <request-policy auth-source="content"></request-policy>
+            <response-policy auth-source="content"></response-policy>
+            <property name="encryption.key.alias" value="s1as"></property>
+            <property name="signature.key.alias" value="s1as"></property>
+            <property name="dynamic.username.password" value="false"></property>
+            <property name="debug" value="false"></property>
+          </provider-config>
+          <provider-config provider-type="client" provider-id="ClientProvider" class-name="com.sun.xml.wss.provider.ClientSecurityAuthModule">
+            <request-policy auth-source="content"></request-policy>
+            <response-policy auth-source="content"></response-policy>
+            <property name="encryption.key.alias" value="s1as"></property>
+            <property name="signature.key.alias" value="s1as"></property>
+            <property name="dynamic.username.password" value="false"></property>
+            <property name="debug" value="false"></property>
+            <property name="security.config" value="${com.sun.aas.instanceRoot}/config/wss-server-config-1.0.xml"></property>
+          </provider-config>
+          <provider-config provider-type="server" provider-id="XWS_ServerProvider" class-name="com.sun.xml.wss.provider.ServerSecurityAuthModule">
+            <request-policy auth-source="content"></request-policy>
+            <response-policy auth-source="content"></response-policy>
+            <property name="encryption.key.alias" value="s1as"></property>
+            <property name="signature.key.alias" value="s1as"></property>
+            <property name="debug" value="false"></property>
+          </provider-config>
+          <provider-config provider-type="server" provider-id="ServerProvider" class-name="com.sun.xml.wss.provider.ServerSecurityAuthModule">
+            <request-policy auth-source="content"></request-policy>
+            <response-policy auth-source="content"></response-policy>
+            <property name="encryption.key.alias" value="s1as"></property>
+            <property name="signature.key.alias" value="s1as"></property>
+            <property name="debug" value="false"></property>
+            <property name="security.config" value="${com.sun.aas.instanceRoot}/config/wss-server-config-1.0.xml"></property>
+          </provider-config>
+        </message-security-config>
+      </security-service>
+      <transaction-service tx-log-dir="${com.sun.aas.instanceRoot}/logs" automatic-recovery="true"></transaction-service>
+      <diagnostic-service></diagnostic-service>
+      <java-config debug-options="-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=${JAVA_DEBUGGER_PORT}" system-classpath="" classpath-suffix="">
+        <jvm-options>-XX:MaxPermSize=192m</jvm-options>
+        <jvm-options>-server</jvm-options>
+        <jvm-options>-Djava.awt.headless=true</jvm-options>
+        <jvm-options>-XX:+UnlockDiagnosticVMOptions</jvm-options>
+        <jvm-options>-Djava.endorsed.dirs=${com.sun.aas.installRoot}/modules/endorsed${path.separator}${com.sun.aas.installRoot}/lib/endorsed</jvm-options>
+        <jvm-options>-Djava.security.policy=${com.sun.aas.instanceRoot}/config/server.policy</jvm-options>
+        <jvm-options>-Djava.security.auth.login.config=${com.sun.aas.instanceRoot}/config/login.conf</jvm-options>
+        <jvm-options>-Dcom.sun.enterprise.security.httpsOutboundKeyAlias=s1as</jvm-options>
+        <jvm-options>-Djavax.net.ssl.keyStore=${com.sun.aas.instanceRoot}/config/keystore.jks</jvm-options>
+        <jvm-options>-Djavax.net.ssl.trustStore=${com.sun.aas.instanceRoot}/config/cacerts.jks</jvm-options>
+        <jvm-options>-Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext</jvm-options>
+        <jvm-options>-Djdbc.drivers=org.apache.derby.jdbc.ClientDriver</jvm-options>
+        <jvm-options>-DANTLR_USE_DIRECT_CLASS_LOADING=true</jvm-options>
+        <jvm-options>-Dcom.sun.enterprise.config.config_environment_factory_class=com.sun.enterprise.config.serverbeans.AppserverConfigEnvironmentFactory</jvm-options>
+        <jvm-options>-XX:NewRatio=2</jvm-options>
+        <jvm-options>-Xmx%DEF_MEM_SIZE%</jvm-options>
+        <jvm-options>-Dorg.glassfish.additionalOSGiBundlesToStart=org.apache.felix.shell,org.apache.felix.gogo.runtime,org.apache.felix.gogo.shell,org.apache.felix.gogo.command,org.apache.felix.fileinstall</jvm-options>
+        <jvm-options>-Dosgi.shell.telnet.port=${OSGI_SHELL_TELNET_PORT}</jvm-options>
+        <jvm-options>-Dosgi.shell.telnet.maxconn=1</jvm-options>
+        <jvm-options>-Dosgi.shell.telnet.ip=127.0.0.1</jvm-options>
+        <jvm-options>-Dgosh.args=--noshutdown -c noop=true</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.dir=${com.sun.aas.installRoot}/modules/autostart/</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.poll=5000</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.log.level=3</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.bundles.new.start=true</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.bundles.startTransient=true</jvm-options>
+        <jvm-options>-Dfelix.fileinstall.disableConfigSave=false</jvm-options>
+      </java-config>
+      <availability-service>
+        <web-container-availability></web-container-availability>
+        <ejb-container-availability sfsb-store-pool-name="jdbc/hastore"></ejb-container-availability>
+        <jms-availability></jms-availability>
+      </availability-service>
+      <network-config>
+        <protocols>
+          <protocol name="http-listener-1">
+            <http default-virtual-server="server">
+              <file-cache></file-cache>
+            </http>
+          </protocol>
+          <protocol security-enabled="true" name="http-listener-2">
+            <http default-virtual-server="server">
+              <file-cache></file-cache>
+            </http>
+            <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" ssl3-enabled="false" cert-nickname="s1as"></ssl>
+          </protocol>
+          <protocol name="admin-listener">
+            <http default-virtual-server="__asadmin" max-connections="250">
+              <file-cache></file-cache>
+            </http>
+          </protocol>
+          <protocol security-enabled="true" name="sec-admin-listener">
+            <http default-virtual-server="__asadmin" encoded-slash-enabled="true">
+              <file-cache></file-cache>
+            </http>
+            <ssl client-auth="want" classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" cert-nickname="glassfish-instance"></ssl>
+          </protocol>
+          <protocol name="admin-http-redirect">
+            <http-redirect secure="true"></http-redirect>
+          </protocol>
+          <protocol name="pu-protocol">
+            <port-unification>
+              <protocol-finder protocol="sec-admin-listener" name="http-finder" classname="com.sun.grizzly.config.HttpProtocolFinder"></protocol-finder>
+              <protocol-finder protocol="admin-http-redirect" name="admin-http-redirect" classname="com.sun.grizzly.config.HttpProtocolFinder"></protocol-finder>
+            </port-unification>
+          </protocol>
+        </protocols>
+
+        <network-listeners>
+          <network-listener port="${HTTP_LISTENER_PORT}" protocol="http-listener-1" transport="tcp" name="http-listener-1" thread-pool="http-thread-pool"></network-listener>
+          <network-listener port="${HTTP_SSL_LISTENER_PORT}" protocol="http-listener-2" transport="tcp" name="http-listener-2" thread-pool="http-thread-pool"></network-listener>
+          <network-listener port="${ASADMIN_LISTENER_PORT}" protocol="admin-listener" transport="tcp" name="admin-listener" thread-pool="admin-thread-pool"></network-listener>
+        </network-listeners>
+        <transports>
+          <transport name="tcp"></transport>
+        </transports>
+      </network-config>
+      <thread-pools>
+        <thread-pool name="http-thread-pool"></thread-pool>
+        <thread-pool max-thread-pool-size="200" name="thread-pool-1"></thread-pool>
+        <thread-pool name="admin-thread-pool" max-thread-pool-size="50" max-queue-size="256"></thread-pool>
+      </thread-pools>
+      <group-management-service>
+        <failure-detection></failure-detection>
+      </group-management-service>
+      <management-rules></management-rules>
+      <system-property name="ASADMIN_LISTENER_PORT" value="24848"></system-property>
+      <system-property name="HTTP_LISTENER_PORT" value="28080"></system-property>
+      <system-property name="HTTP_SSL_LISTENER_PORT" value="28181"></system-property>
+      <system-property name="JMS_PROVIDER_PORT" value="27676"></system-property>
+      <system-property name="IIOP_LISTENER_PORT" value="23700"></system-property>
+      <system-property name="IIOP_SSL_LISTENER_PORT" value="23820"></system-property>
+      <system-property name="IIOP_SSL_MUTUALAUTH_PORT" value="23920"></system-property>
+      <system-property name="JMX_SYSTEM_CONNECTOR_PORT" value="28686"></system-property>
+      <system-property name="OSGI_SHELL_TELNET_PORT" value="26666"></system-property>
+      <system-property name="JAVA_DEBUGGER_PORT" value="29009"></system-property>
+      <monitoring-service>
+        <module-monitoring-levels></module-monitoring-levels>
+      </monitoring-service>
+      <connector-service></connector-service>
+    </config>
+  </configs>
+  <property name="administrative.domain.name" value="domain1"></property>
+
+  <secure-admin special-admin-indicator="%GF_SPEC_INDICATOR%">
+      <secure-admin-principal dn="CN=%HOST_DNS_ADDRESS%,OU=GlassFish,O=Oracle Corporation,L=Santa Clara,ST=California,C=US"></secure-admin-principal>
+      <secure-admin-principal dn="CN=%HOST_DNS_ADDRESS%-instance,OU=GlassFish,O=Oracle Corporation,L=Santa Clara,ST=California,C=US"></secure-admin-principal>
+  </secure-admin>
+
+</domain>
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/install	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,1183 @@
+#!/usr/bin/perl
+
+use Getopt::Long;
+use Socket; 
+use File::Copy;
+
+my( %opts ) = ( );
+my( $rez ) = GetOptions( \%opts, "pg_only!");
+
+my $postgresonly = 0; 
+
+my @CONFIG_VARIABLES = (); 
+
+unless ($opts{pg_only})
+{
+    @CONFIG_VARIABLES = (       
+	    'HOST_DNS_ADDRESS',
+	    'GLASSFISH_DIRECTORY',
+	    'MAIL_SERVER',
+
+	    'POSTGRES_SERVER',
+	    'POSTGRES_PORT',
+	    'POSTGRES_DATABASE',
+	    'POSTGRES_USER',
+	    'POSTGRES_PASSWORD',
+
+	    'RSERVE_HOST',
+	    'RSERVE_PORT',
+	    'RSERVE_USER',
+	    'RSERVE_PASSWORD'
+
+	    ); 
+}
+else 
+{
+    @CONFIG_VARIABLES = (       
+	    'POSTGRES_SERVER',
+	    'POSTGRES_PORT',
+	    'POSTGRES_DATABASE',
+	    'POSTGRES_USER',
+	    'POSTGRES_PASSWORD'
+	    ); 
+
+    $postgresonly = 1; 
+}
+
+my %CONFIG_DEFAULTS = 
+    (       
+	    'HOST_DNS_ADDRESS', '',
+	    'GLASSFISH_DIRECTORY', '/home/glassfish/glassfish',
+	    'MAIL_SERVER',      'localhost',
+
+	    'POSTGRES_SERVER',  'localhost',
+	    'POSTGRES_PORT',    5432,
+	    'POSTGRES_DATABASE','dvnDb',
+	    'POSTGRES_USER',    'postgres',
+	    'POSTGRES_PASSWORD','admin',
+
+	    'RSERVE_HOST',      'localhost',
+	    'RSERVE_PORT',      6311,
+	    'RSERVE_USER',      'rserve',
+	    'RSERVE_PASSWORD',  'rserve'
+
+	    ); 
+
+
+my %CONFIG_PROMPTS = 
+    (       
+	    'HOST_DNS_ADDRESS', 'Internet Address of your host',
+	    'GLASSFISH_DIRECTORY', 'Glassfish Directory', 
+	    'MAIL_SERVER', 'SMTP (mail) server to relay notification messages',
+
+	    'POSTGRES_SERVER',  'Postgres Server',
+	    'POSTGRES_PORT',    'Postgres Server Port',
+	    'POSTGRES_DATABASE','Name of the Postgres Database',
+	    'POSTGRES_USER',    'Name of the Postgres User',
+	    'POSTGRES_PASSWORD','Postgres user password',
+
+	    'RSERVE_HOST',      'Rserve Server',
+	    'RSERVE_PORT',      'Rserve Server Port',
+	    'RSERVE_USER',      'Rserve User Name',
+	    'RSERVE_PASSWORD',  'Rserve User Password'
+
+	    ); 
+
+# Supported Posstgres JDBC drivers: 
+# (have to be configured explicitely, so that Perl "taint" (security) mode 
+# doesn't get paranoid)
+
+my $POSTGRES_DRIVER_8_3 = "postgresql-8.3-603.jdbc4.jar";  
+#my $POSTGRES_DRIVER_8_4 = "postgresql-8.4-703.jdbc4.jar";
+my $POSTGRES_DRIVER_8_4 = "postgresql-8.3-603.jdbc4.jar";  
+my $POSTGRES_DRIVER_9_0 = "postgresql-9.0-802.jdbc4.jar";
+my $POSTGRES_DRIVER_9_1 = "postgresql-9.1-902.jdbc4.jar";
+
+
+# A few preliminary checks: 
+
+# user -- must be root: 
+
+$user_real = `who am i`; 
+chop $user_real; 
+$user_real =~s/ .*$//; 
+
+if ( $< != 0 ) 
+{
+    print STDERR "\nERROR: You must be logged in as root to run the installer.\n\n";
+    exit 1; 
+}
+
+# OS: 
+
+my $uname_out = `uname -a`; 
+
+# hostname: 
+
+my $hostname_from_cmdline = `hostname`; 
+chop $hostname_from_cmdline; 
+
+$CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} = $hostname_from_cmdline;
+
+
+print "\nWelcome to the DVN installer.\n";
+unless ($opts{pg_only}) 
+{
+    print "You will be guided through the process of setting up a NEW\n";
+    print "instance of the DVN application\n";
+} 
+else 
+{
+    print "You will be guided through the process of configuring the\n";
+    print "LOCAL instance of PostgreSQL database for use by the DVN\n";
+    print "application.\n";
+}
+
+my @uname_tokens = split (" ", $uname_out); 
+
+if ( $uname_tokens[0] eq "Darwin" )
+{
+    print "\nThis appears to be a MacOS X system; good.\n";
+    # TODO: check the OS version
+
+    $WORKING_OS = "MacOSX"; 
+}
+elsif ( $uname_tokens[0] eq "Linux" )
+{
+    if ( -f "/etc/redhat-release" )
+    {
+	print "\nThis appears to be a RedHat system; good.\n";
+	$WORKING_OS = "RedHat"; 
+	# TODO: check the distro version
+    }
+    else 
+    {
+	print "\nThis appears to be a non-RedHat Linux system;\n";
+	print "this installation *may* succeed; but we're not making any promises!\n";
+	$WORKING_OS = "Linux"; 
+    }
+} 
+else 
+{
+    print "\nWARNING: This appears to be neither a Linux or MacOS X system!\n";
+    print "This installer script will most likely fail. Please refer to the\n";
+    print "DVN Installers Guide for more information.\n\n";
+
+    $WORKING_OS = "Unknown";
+
+    print "Do you wish to continue?\n [y/n] ";
+
+
+    my $yesnocont = <>; chop $yesnocont;
+
+    while ( $yesnocont ne "y" && $yesnocont ne "n" )
+    {
+	print "Please enter 'y' or 'n'!\n";
+	print "(or ctrl-C to exit the installer)\n";
+	$yesnocont = <>; chop $yesnocont;
+    }
+
+    if ( $yesnocont eq "n" )
+    {
+	exit 0;
+    }
+
+}
+
+ ENTERCONFIG: 
+
+print "\n";
+print "Please enter the following configuration values:\n";
+print "(hit [RETURN] to accept the default value)\n";
+print "\n";
+
+for $ENTRY (@CONFIG_VARIABLES)
+{
+    print $CONFIG_PROMPTS{$ENTRY} . ": ";
+    print "[" . $CONFIG_DEFAULTS{$ENTRY} . "] ";
+
+    $user_entry = <>; 
+    chop $user_entry; 
+
+    if ($user_entry ne "")
+    {
+	$CONFIG_DEFAULTS{$ENTRY} = $user_entry;
+    }
+
+    print "\n";
+}
+
+# CONFIRM VALUES ENTERED: 
+
+
+print "\nOK, please confirm what you've entered:\n\n";
+
+for $ENTRY (@CONFIG_VARIABLES)
+{
+    print $CONFIG_PROMPTS{$ENTRY} . ": " . $CONFIG_DEFAULTS{$ENTRY} . "\n";
+}
+
+print "\nIs this correct? [y/n] ";
+
+
+my $yesno = <>; chop $yesno;  
+
+while ( $yesno ne "y" && $yesno ne "n" )
+{
+    print "Please enter 'y' or 'n'!\n";
+    print "(or ctrl-C to exit the installer)\n";
+    $yesno = <>; chop $yesno;  
+}
+
+if ( $yesno eq "n" )
+{
+    goto ENTERCONFIG; 
+}
+
+# VALIDATION/VERIFICATION OF THE CONFIGURATION VALUES:                                               
+# 1. VERIFY MAIL SERVER THEY CONFIGURED: 
+
+=pod
+
+unless ( $postgresonly )
+{
+
+    my ( $mail_server_iaddr, $mail_server__paddr, $mail_server_proto, $mail_server_status );
+
+    $mail_server_status = 1; 
+
+    unless ( $mail_server_iaddr = inet_aton($CONFIG_DEFAULTS{'MAIL_SERVER'}) )
+    {
+	print STDERR "Could not look up $CONFIG_DEFAULTS{'MAIL_SERVER'},\n";
+	print STDERR "the host you specified as your mail server.\n";
+	$mail_server_status = 0; 
+    }
+
+    if ($mail_server_status) {
+	$mail_server_paddr = sockaddr_in(25, $mail_server_iaddr);
+	$mail_server_proto = getprotobyname('tcp');
+
+	unless ( socket(SOCK, PF_INET, SOCK_STREAM, $mail_server_proto) &&
+		 connect(SOCK, $mail_server_paddr) ) 
+	{
+	    print STDERR "Could not establish connection to $CONFIG_DEFAULTS{'MAIL_SERVER'},\n";
+	    print STDERR "the address you provided for your Mail server.\n";
+	    print STDERR "Please select a valid mail server, and try again.\n\n";
+
+	    $mail_server_status = 0;
+	}
+    
+    }
+
+    close (SOCK); 
+
+    unless ($mail_server_status) 
+    {
+	goto ENTERCONFIG; 
+    }
+}
+=cut
+
+# 2. CHECK IF THE WAR FILE IS AVAILABLE:                                                                
+unless ( -f "appdeploy/dist/DVN-web.war" )
+{
+    print "\nWARNING: Can't find the project .war file in appdeploy/dist/!\n";
+    print "\tAre you running the installer in the right directory?\n";
+    print "\tHave you built the war file?\n";
+    print "\t(if not, build the project and run the installer again)\n";
+
+    exit 0;
+}
+
+# check the working (installer) dir:
+my $cwd; 
+chomp($cwd = `pwd`);
+
+# 2b. CHECK IF THE SQL TEMPLATE IS IN PLACE AND CREATE THE SQL FILE 
+
+$SQL_REFERENCE_DATA = "referenceData.sql"; 
+$SQL_REFERENCE_TEMPLATE = "referenceData.sql.TEMPLATE"; 
+
+unless ( -f $SQL_REFERENCE_TEMPLATE  )
+{
+    print "\nWARNING: Can't find .sql data template!\n"; 
+    print "(are you running the installer in the right directory?)\n";
+    
+    exit 0; 
+}
+
+open DATATEMPLATEIN, $SQL_REFERENCE_TEMPLATE || die $@; 
+open SQLDATAOUT, '>'.$SQL_REFERENCE_DATA || die $@;
+
+while( <DATATEMPLATEIN> )
+{
+    s/%POSTGRES_USER%/$CONFIG_DEFAULTS{'POSTGRES_USER'}/g;
+    print SQLDATAOUT $_; 
+}
+
+
+close DATATEMPLATEIN; 
+close SQLDATAOUT; 
+
+# 3. CHECK POSTGRES AVAILABILITY: 
+
+my $pg_local_connection = 0; 
+
+if ( $CONFIG_DEFAULTS{'POSTGRES_SERVER'} eq 'localhost' )
+{
+    $pg_local_connection = 1; 
+
+    # 3a. CHECK FOR USER postgres:
+
+    print "\nChecking system user \"postgres\"... ";
+
+    $POSTGRES_SYS_NAME = "postgres";
+    $POSTGRES_SYS_UID = (getpwnam ("postgres"))[2]; 
+
+    if ($POSTGRES_SYS_UID == undef) {
+	print STDERR "\nERROR: I haven't been able to find user \"postgres\" on the system!\n";
+	print STDERR "(TODO: prompt the user instead to supply an alternative username, if\n";
+	print STDERR "available)\n";
+
+	exit 1; 
+    } 
+
+    print "OK.\n";
+
+    # 3b. LOCATE THE EXECUTABLE:
+
+    $sys_path = $ENV{'PATH'}; 
+    @sys_path_dirs = split ( ":", $sys_path ); 
+
+    $psql_exec = ""; 
+
+    for $sys_path_dir ( @sys_path_dirs )
+    {
+	if ( -x $sys_path_dir . "/psql" ) 
+	{
+	    $psql_exec = $sys_path_dir; 
+	    last; 
+	}
+    }
+
+    $pg_major_version = 0; 
+    $pg_minor_version = 0; 
+
+    if ( $psql_exec eq "" && $WORKING_OS eq "MacOSX" )
+    {
+	for $pg_minor_version ( "1", "0" )
+	{
+	    if ( -x "/Library/PostgreSQL/9." . $pg_minor_version . "/bin/psql" ) 
+	    {
+		$pg_major_version = 9;
+		$psql_exec = "/Library/PostgreSQL/9." . $pg_minor_version . "/bin";
+		last; 
+	    }
+	}
+	if (!$pg_major_version)
+	{
+	    for $pg_minor_version ( "4", "3" )
+	    {
+		if ( -x "/Library/PostgreSQL/8." . $pg_minor_version . "/bin/psql" ) 
+		{
+		    $pg_major_version = 8; 
+		    $psql_exec = "/Library/PostgreSQL/8." . $pg_minor_version . "/bin";
+		    last; 
+		}
+	    }
+	}
+    }
+
+    if ( $psql_exec eq "" )
+    {
+	print STDERR "\nERROR: I haven't been able to find the psql command in your PATH!\n";
+	print STDERR "Please make sure PostgresQL is properly installed and try again.\n\n";
+
+	exit 1; 
+    }
+
+    
+
+    # 3c. CHECK POSTGRES VERSION: 
+
+    open (PSQLOUT, $psql_exec . "/psql --version|"); 
+
+    $psql_version_line = <PSQLOUT>; 
+    chop $psql_version_line; 
+    close PSQLOUT; 
+
+    my ($postgresName, $postgresNameLong, $postgresVersion) = split ( " ", $psql_version_line ); 
+
+    unless ( $postgresName eq "psql" && $postgresVersion =~ /^[0-9][0-9\.]*$/ )
+    {
+	print STDERR "\nERROR: Unexpected output from psql command!\n";
+	print STDERR "Please make sure PostgresQL is properly installed and try again.\n\n";
+
+	exit 1; 
+    }
+
+
+    my (@postgres_version_tokens) = split ( '\.', $postgresVersion ); 
+
+    unless ( ($postgres_version_tokens[0] == 8 && $postgres_version_tokens[1] >= 3) || ($postgres_version_tokens[0] >= 9) )
+    {
+	print STDERR "\nERROR: PostgresQL version 8.3, or newer, is required!\n";
+	print STDERR "Found a copy of psql ($psql_exec/psql) that belongs to version $postgresVersion.\n\n";
+	print STDERR "Please make sure the right version of PostgresQL is properly installed,\n";
+	print STDERR "and the right version of psql comes first in the PATH,\n";
+	print STDERR "then try again.\n";
+
+	exit 1; 
+    }
+
+    print "\n\nFound Postgres psql command, version $postgresVersion. Good.\n\n";
+
+    $pg_major_version = $postgres_version_tokens[0];
+    $pg_minor_version = $postgres_version_tokens[1];
+
+    # 4. CONFIGURE POSTGRES: 
+
+    print "\nConfiguring Postgres Database:\n";
+
+    
+
+    $< = $POSTGRES_SYS_UID; 
+    $> = $POSTGRES_SYS_UID; 
+
+    # 4a. CHECK IF POSTGRES IS RUNNING:
+    print "Checking if a local instance of Postgres is running and accessible...\n";
+
+    # (change to /tmp before executing the command below - 
+    # we are trying to do it as user postgres, and it may not have 
+    # access to the current, installer directory; the command would still 
+    # work, but there would be an error message from the shell init on screen 
+    # - potentially confusing)
+    chdir ("/tmp");
+
+    if (!system ($psql_exec . "/psql -c 'SELECT * FROM pg_roles' > /dev/null 2>&1"))
+    {
+	print "Yes, it is.\n";
+    }
+    else
+    {
+	print "Nope, I haven't been able to connect to the local instance of PostgresQL.\n";
+	print "daemon. Is postgresql running? \n";
+	print "On a RedHat system, you can check the status of the daemon with\n\n";
+	print "   service postgresql status\n\n";
+	print "and, if it's not running, start the daemon with\n\n";
+	print "   service postgresql start\n\n";
+	print "On MacOSX, use Applications -> PostgresQL -> Start Server.\n";
+	print "Also, please make sure that the daemon is listening to network connections,\n";
+	print "at leaset on the localhost interface. (See \"Installing Postgres\" section\n";
+	print "of the installation manual).\n";
+	print "Finally, please make sure that the postgres user can make localhost \n";
+	print "connections without supplying a password. (That's controlled by the \n";
+	print "\"localhost ... ident\" line in pg_hba.conf; again, please consult the \n";
+	print "installation manual).\n";
+
+
+	exit 1; 
+    }
+
+
+    # 4c. CHECK IF THIS DB ALREADY EXISTS:
+    
+    $psql_command_dbcheck = $psql_exec . "/psql -c \"\" -d " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . ">/dev/null 2>&1"; 
+    if ( ($exitcode = system($psql_command_dbcheck)) == 0 )
+    {
+	# switch back to root uid:
+	$> = 0; 
+	$< = 0; 
+	chdir ($cwd); 
+
+	print "WARNING! Database " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " already exists!\n";
+	print "\nPlease note that you can only use this installer to create a blank, \n"; 
+	print "new and shiny DVN database. I.e., you cannot install on top of an \n";
+	print "existing database. Please enter a different name for the DVN database.\n";
+	print "\nPress any key to continue, or ctrl-C to exit the installer...\n\n";
+
+	system "stty cbreak </dev/tty >/dev/tty 2>&1";
+	my $key = getc(STDIN);
+	system "stty -cbreak </dev/tty >/dev/tty 2>&1";
+	print "\n";
+
+	goto ENTERCONFIG; 
+ 
+    }
+
+    # 4d. CHECK IF THIS USER ALREADY EXISTS:
+
+    $psql_command_rolecheck = $psql_exec . "/psql -c \"\" -d postgres " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " >/dev/null 2>&1";
+    if ( ($exitcode = system($psql_command_rolecheck)) == 0 )
+    {
+	print "User (role) . " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " already exists;\n";
+	print "Proceeding.";
+    }
+    else 
+    {
+	# 4e. CREATE DVN DB USER:
+
+	print "\nCreating Postgres user (role) for the DVN:\n";
+
+	open TMPCMD, ">/tmp/pgcmd.$$.tmp";
+	
+	# with unencrypted password: 
+	#print TMPCMD "CREATE ROLE ".$CONFIG_DEFAULTS{'POSTGRES_USER'}." UNENCRYPTED PASSWORD '".$CONFIG_DEFAULTS{'POSTGRES_PASSWORD'}."' NOSUPERUSER CREATEDB CREATEROLE NOINHERIT LOGIN";
+	
+	# with md5-encrypted password:
+	$pg_password_md5 = &create_pg_hash ($CONFIG_DEFAULTS{'POSTGRES_USER'},$CONFIG_DEFAULTS{'POSTGRES_PASSWORD'}); 
+	my $sql_command = "CREATE ROLE \"".$CONFIG_DEFAULTS{'POSTGRES_USER'}."\" PASSWORD 'md5". $pg_password_md5 ."' NOSUPERUSER CREATEDB CREATEROLE INHERIT LOGIN";
+	
+	print TMPCMD $sql_command; 
+	close TMPCMD; 
+
+	my $psql_commandline = $psql_exec . "/psql -f /tmp/pgcmd.$$.tmp";
+	
+	unless ( ($exitcode = system($psql_commandline)) == 0 )
+	{
+	    print STDERR "Could not create the DVN Postgres user role!\n";
+	    print STDERR "(SQL: " . $sql_command . ")\n";
+	    print STDERR "(psql exit code: " . $exitcode . ")\n";
+	    exit 1; 
+	}
+
+	unlink "/tmp/pgcmd.$$.tmp";
+	print "done.\n";
+    }
+    
+    # 4f. CREATE DVN DB: 
+
+    print "\nCreating Postgres database:\n";
+
+    $psql_command = $psql_exec . "/createdb ".$CONFIG_DEFAULTS{'POSTGRES_DATABASE'}." --owner=".$CONFIG_DEFAULTS{'POSTGRES_USER'};
+
+    unless ( ($exitcode = system("$psql_command")) == 0 ) 
+    {
+	print STDERR "Could not create Postgres database for the DVN app!\n";
+	print STDERR "(command: " . $psql_command . ")\n";
+	print STDERR "(psql exit code: " . $exitcode . ")\n";
+	print STDERR "\naborting the installation (sorry!)\n\n";
+	exit 1; 
+    }
+
+
+# Changing back to root UID: 
+
+    $> = 0; 
+    $< = 0; 
+     
+    chdir ($cwd); 
+
+}
+else 
+{
+    if (0) # DEV. INSTALLER ONLY: 
+    {
+    print "\nIt is strongly recommended that you use a local PostgresQL server,\n";
+    print "running on localhost, in your development environment!\n\n";
+
+    print "Do you wish to continue?\n [y/n] ";
+
+
+    my $yesnocont = <>; chop $yesnocont;
+
+    while ( $yesnocont ne "y" && $yesnocont ne "n" )
+    {
+	print "Please enter 'y' or 'n'!\n";
+	print "(or ctrl-C to exit the installer)\n";
+	$yesnocont = <>; chop $yesnocont;
+    }
+
+    if ( $yesnocont eq "n" )
+    {
+	print "(aborting the installation)\n".
+	exit 0;
+    }
+    }
+
+    if ( $opts{$pg_only} )
+    {
+        print "The script must be run in the --pg_only mode ONLY locally,\n";
+        print "i.e., on the server where PostgresQL is running.\n";
+
+        exit 1; 
+    }
+    
+    print "In order to use a PostgresQL database running on a remote server,\n";
+    print "Please run this installer on that host with the \"--pg_only\" option:\n\n";
+    print "./install --pg_only\n\n";
+
+    print "Press any key to continue the installation process once that has been\n";
+    print "done. Or press ctrl-C to exit the installer.\n\n";
+
+    chdir ("/tmp");
+    system "stty cbreak </dev/tty >/dev/tty 2>&1";
+    my $key = getc(STDIN);
+    system "stty -cbreak </dev/tty >/dev/tty 2>&1";
+    print "\n";
+    chdir ($cwd);
+
+    # Check if the role and database have been created on the remote server:
+    # -- TODO; 
+
+    # Find out what Postgres version is running remotely:
+
+    $pg_major_version = 9;
+    $pg_minor_version = 1;
+
+    print "What version of PostgresQL is installed on the remote server?\n [" . $pg_major_version . "." . $pg_minor_version . "] ";
+
+
+    my $postgresVersion = <>; chop $postgresVersion;
+
+    while ( $postgresVersion ne "" && !($postgresVersion =~/^[0-9]+\.[0-9]+$/) )
+    {
+	print "Please enter valid Postgres version!\n";
+	print "(or ctrl-C to exit the installer)\n";
+	$postgresVersion = <>; chop $postgresVersion;
+    }
+
+    unless ( $postgresVersion eq "" )
+    {
+	my (@postgres_version_tokens) = split ( '\.', $postgresVersion ); 
+
+	unless ( ($postgres_version_tokens[0] == 8 && $postgres_version_tokens[1] >= 3) || ($postgres_version_tokens[0] >= 9) )
+	{
+	    print STDERR "\nERROR: PostgresQL version 8.3, or newer, is required!\n";
+	    print STDERR "Please make sure the right version of PostgresQL is properly installed\n";
+	    print STDERR "on the remote server, then try again.\n";
+	    
+	    exit 1; 
+	}
+
+	$pg_major_version = $postgres_version_tokens[0];
+	$pg_minor_version = $postgres_version_tokens[1];
+    }
+
+}
+
+
+if ( $postgresonly )
+{
+    print "\nOK, done.\n";
+    print "You can now resume the installation on the main DVN host.\n\n";
+    
+    exit 0; 
+}
+
+
+# 5. CONFIGURE GLASSFISH
+
+print "\nProceeding with the Glassfish setup.\n";
+print "\nChecking your Glassfish installation..."; 
+
+my $glassfish_dir = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}; 
+
+# 5a. CHECK IF GLASSFISH DIR LOOKS OK:
+
+print $glassfish_dir."/glassfish/domains/domain1";
+
+unless ( -d $glassfish_dir."/glassfish/domains/domain1" )
+{
+    # TODO: need better check than this
+
+    while ( ! ( -d $glassfish_dir."/glassfish/domains/domain1" ) )
+    {
+	print "\nInvalid Glassfish directory " . $glassfish_dir . "!\n";
+	print "Enter the root directory of your Glassfish installation:\n";
+	print "(Or ctrl-C to exit the installer): "; 
+
+	$glassfish_dir = <>; 
+	chop $glassfish_dir; 
+    }
+}
+
+print "OK!\n";
+
+# 5b. DETERMINE HOW MUCH MEMORY TO GIVE TO GLASSFISH AS HEAP:
+
+$gf_heap_default = "2048m"; 
+$sys_mem_total = 0; 
+
+if ( -e "/proc/meminfo" && open MEMINFO, "/proc/meminfo" ) 
+{
+    # Linux 
+
+    while ( $mline = <MEMINFO> )
+    {
+	if ( $mline =~ /MemTotal:[ \t]*([0-9]*) kB/ )
+	{
+	    $sys_mem_total = $1; 
+	}
+    }
+
+    close MEMINFO; 
+
+} 
+elsif ( -x "/usr/sbin/sysctl" ) 
+{
+    # MacOS X, probably...
+
+    $sys_mem_total = `/usr/sbin/sysctl -n hw.memsize`; 
+    chop $sys_mem_total;
+    if ($sys_mem_total > 0) 
+    {
+	$sys_mem_total = int ($sys_mem_total / 1024); 
+	# size in kb
+    }
+}
+
+if ( $sys_mem_total > 0 )
+{
+    # setting the default heap size limit to 3/8 of the available 
+    # amount of memory: 
+    $gf_heap_default = ( int ($sys_mem_total / (8 / 3 * 1024) ) ); 
+
+    print "\nSetting the heap limit for Glassfish to " . $gf_heap_default . "MB. \n"; 
+    print "You may need to adjust this setting to better suit \n";
+    print "your system.\n\n";
+
+    $gf_heap_default .= "m";
+
+}
+else 
+{
+    print "\nCould not determine the amount of memory on your system.\n";
+    print "Setting the heap limit for Glassfish to 2GB. You may need \n"; 
+    print "to  adjust the value to better suit your system.\n\n";
+}
+
+push @CONFIG_VARIABLES, "DEF_MEM_SIZE"; 
+$CONFIG_DEFAULTS{"DEF_MEM_SIZE"} = $gf_heap_default; 
+
+print "\nPress any key to continue...\n\n";
+
+system "stty cbreak </dev/tty >/dev/tty 2>&1";
+	my $key = getc(STDIN);
+	system "stty -cbreak </dev/tty >/dev/tty 2>&1";
+	print "\n";
+
+# 5c. GENERATE GLASSFISH CONFIGURATION FILE:
+
+print "\nWriting glassfish configuration file (domain.xml)... ";
+
+# 5cc. FIND THE "special-admin-indicator" IN THE ORIGINAL GLASSFISH CONFIG:
+
+open ( GFCNFG, $glassfish_dir."/glassfish/domains/domain1/config/domain.xml") || die $@; 
+
+while ( <GFCNFG> )
+{
+    if (/<secure-admin special-admin-indicator=\"([^\"]*)\"/)
+    {
+	$CONFIG_DEFAULTS{'GF_SPEC_INDICATOR'} = $1;
+    }
+}
+
+# (is it really a problem if we haven't found it?)
+
+close GFCNFG;
+
+open TEMPLATEIN, 'domain.xml.TEMPLATE'; 
+open CONFIGOUT, '>domain.xml';
+
+while( <TEMPLATEIN> )
+{
+    for $ENTRY (@CONFIG_VARIABLES)
+    {
+	$patin = '%' . $ENTRY . '%'; 
+	$patout = $CONFIG_DEFAULTS{$ENTRY}; 
+	
+	s/$patin/$patout/g;
+    }
+
+    print CONFIGOUT $_; 
+
+}
+
+close TEMPLATEIN; 
+close CONFIGOUT; 
+
+print "done.\n";
+
+system ("/bin/cp -f domain.xml ".$glassfish_dir."/glassfish/domains/domain1/config"); 
+#diagnostics needed!
+
+# check if the supllied config files are in the right place: 
+
+unless ( -f "config/logging.properties" )
+{
+    print "\nERROR! Configuration files not found in config dir!\n";
+    print "(are you running the installer in the right directory?\n";
+    print "Aborting...\n";
+    exit 1; 
+}
+
+print "\nCopying additional configuration files... ";
+
+system ( "/bin/cp -Rf config/* ".$glassfish_dir."/glassfish/domains/domain1/config"); 
+#diagnostics needed!
+
+# install pre-configured robots.txt blocking bot crawlers:
+system ( "/bin/cp -f robots.txt ".$glassfish_dir."/glassfish/domains/domain1/docroot"); 
+
+# install the DVN guides (HTML) into the application docroot: 
+system ( "/bin/cp -Rf doc/guides/* ".$glassfish_dir."/glassfish/domains/domain1/docroot/guides"); 
+
+
+print "done!\n";
+
+print "\nInstalling the Glassfish PostgresQL driver... ";
+
+my $install_driver_jar = "";
+
+if ( $pg_major_version == 8 ) 
+{
+    if ( $pg_minor_version == 3 ) 
+    {
+	$install_driver_jar = $POSTGRES_DRIVER_8_3;
+    }
+    elsif ( $pg_minor_version == 4 ) 
+    {
+	$install_driver_jar = $POSTGRES_DRIVER_8_4;
+    }
+}
+elsif ( $pg_major_version == 9 )
+{
+    if ( $pg_minor_version == 0 ) 
+    {
+	$install_driver_jar = $POSTGRES_DRIVER_9_0;
+    }
+    elsif ( $pg_minor_version == 1 ) 
+    {
+	$install_driver_jar = $POSTGRES_DRIVER_9_1;
+    }
+} 
+
+=poc
+unless ( $install_driver_jar ) 
+{
+    die "Installer could not find POSTGRES JDBC driver for your version of PostgresQL!\n";
+
+} 
+=cut
+
+system ( "/bin/cp", "pgdriver/" . $install_driver_jar, $glassfish_dir."/glassfish/lib"); 
+#diagnostics needed!
+
+print "done!\n";
+
+# 5d. STOP GLASSFISH (OK IF NOT RUNNING):
+print "\nStopping glassfish...\n";
+
+unless ( ($exit_code=system ($glassfish_dir."/bin/asadmin stop-domain domain1")) == 0 )
+{
+    print STDERR "(that's OK!)\n";
+}
+
+# 5dd. INSTALL PATCHED WEBCORE GLASSFISH MODULE: 
+
+$gf_webcore_jar = $glassfish_dir."/glassfish/modules/web-core.jar";
+
+system ("/bin/mv -f ".$gf_webcore_jar . " " . $gf_webcore_jar.".PRESERVED");
+system ("/bin/cp web-core.jar ".$gf_webcore_jar); 
+
+# 5ddd. DELETE EJB TIMER APP LOCK FILE, if exists (just in case!): 
+
+system ( "/bin/rm -f ".$glassfish_dir."/glassfish/domains/domain1/generated/ejb-timer-service-app" ); 
+
+# 5e. START GLASSFISH:
+print "\nStarting glassfish.\n";
+
+unless ( ($exit_code=system ($glassfish_dir."/bin/asadmin start-domain domain1")) == 0 )
+{
+	print STDERR "Could not start glassfish!\n";
+	print STDERR "(exit code: " . $exitcode . ")\n";
+	exit 1; 
+}
+
+
+# check if glassfish is running: 
+# TODO. 
+
+# 6. DEPLOY APPLICATION:
+# 6a. DO WE HAVE ANT? 
+#  (we are no longer using ant to deply -- L.A.)
+#
+#$sys_path = $ENV{'PATH'}; 
+#@sys_path_dirs = split ( ":", $sys_path ); 
+
+#$ant_exec = ""; 
+#
+#for $sys_path_dir ( @sys_path_dirs )
+#{
+#    if ( -x $sys_path_dir . "/ant" ) 
+#    {
+#	$ant_exec = $sys_path_dir . "/ant"; 
+#	last; 
+#    }
+#}
+#
+#if ( $ant_exec eq "" )
+#{
+#    print STDERR "\nERROR: I haven't been able to find ant command in your PATH!\n";
+#    print STDERR "Please make sure and is installed and in your PATH; then try again.\n\n";
+#
+#    exit 1; 
+#}
+# 6b. TRY TO DEPLOY:
+
+print "\nAttempting to deploy the application:\n\n";
+
+$CONFIG_DEFAULTS{'GLASSFISH_ADMIN_PASSWORD'} = 'adminadmin'; 
+# TODO: ask for password! -- in case they have already changed it
+# (update: chances are we don't even need the password anymore, as 
+# long as we are deploying locally (?))
+
+my $glassfish_password = $CONFIG_DEFAULTS{'GLASSFISH_ADMIN_PASSWORD'}; 
+
+# create deployment properties files:
+#   (these properties files are no longer used, because we are no longer
+#   using ant to deploy the app. -- L.A.)
+
+#for $prop_file ('AS', 'glassfish') 
+#{
+#    open ( TEMPLIN, "appdeploy/" . $prop_file . ".properties.TEMPLATE" ) 
+#	|| die "failed to open appdeploy/" . $prop_file . ".properties.TEMPLATE";
+#    open ( PROPOUT, ">appdeploy/" . $prop_file . ".properties" ) 
+#	|| die "failed to open appdeploy/" . $prop_file . ".properties for writing";
+#
+#    while( <TEMPLIN> )
+#    {
+#	s/%GF_ADMIN_PASSWORD%/$glassfish_password/g;
+#	s/%GF_ROOT_DIR%/$glassfish_dir/g;
+#	print PROPOUT $_; 
+#    }
+#
+#    close TEMPLIN; 
+#    close PROPOUT; 
+#}
+
+# Create the .asadminpass file, or replace it, if exists:
+
+$asadminpass_file = $ENV{'HOME'} . "/.asadminpass";
+
+if ( -e $asadminpass_file )
+{
+    system ("/bin/mv -f " . $asadminpass_file . " " . $asadminpass_file . ".PRESERVED");
+}
+
+system ("echo 'asadmin://admin@localhost:4848 ' > " . $asadminpass_file); 
+
+$deploy_command = $glassfish_dir."/bin/asadmin deploy --force=true --name=DVN-web dist/DVN-web.war";
+
+unless ( ($exit_code = system ("cd appdeploy; " . $deploy_command)) == 0 )
+{
+	print STDERR "Could not deploy DVN application!\n";
+	print STDERR "(exit code: " . $exitcode . ")\n";
+	exit 1; 
+}
+
+if ( $pg_local_connection )
+{
+    print "\nOK; now we are going to stop glassfish and populate the database with\n";
+    print "some initial content, then start glassfish again.\n";
+}
+else
+{
+    print "\nOK; stopping glasfish.\n";
+}
+
+
+# 6c. SHUT DOWN:
+
+$gf_stop_command = $glassfish_dir."/bin/asadmin stop-domain domain1"; 
+
+unless ( ($exit_code = system ($gf_stop_command)) == 0 )
+{
+	print STDERR "Could not stop glassfish!\n";
+	print STDERR "(command line: " . $gf_stop_command . ")\n";
+	print STDERR "(exit code: " . $exitcode . ")\n";
+	print STDERR "\nPlease finish the installation process manually: \n";
+	print STDERR "stop/kill glassfish; then populate the database with \n";
+	print STDERR "the supplied initial content, by executing the following \n";
+	print STDERR "command, *as Unix user postgres*: \n\n";
+
+	$psql_command = $psql_exec . "/psql -d $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} -f " . $SQL_REFERENCE_DATA;
+
+	print STDERR $psql_command . "\n\n";
+	print STDERR "Then start glassfish again... Voila, you should then have \n";
+	print STDERR "a running DVN instance at the following URL:\n\n";
+	print STDERR "  http://" . $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} . "/dvn\n";
+
+	print STDERR "\naborting the installer... (sorry!)\n";
+
+	exit 1; 
+}
+
+# 7. POPULATE DATABASE:
+
+if ( $pg_local_connection )
+{
+    # 7a. POPULATE LOCALLY:
+    print "\nPopulating the database (local PostgresQL instance):\n\n";
+
+    # Copy the SQL file to /tmp, where user postgres will definitely 
+    # have read access to it: 
+
+    copy("referenceData.sql","/tmp") or die "Could not copy referenceData.sql to /tmp: $!";
+
+    $< = $POSTGRES_SYS_UID; 
+    $> = $POSTGRES_SYS_UID; 
+    chdir ("/tmp"); 
+    $psql_command = $psql_exec . "/psql -d $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} -f referenceData.sql";
+
+    unless ( ($exitcode = system("$psql_command")) == 0 ) 
+    {
+	print STDERR "Could not populate Postgres database for the DVN app!\n";
+	print STDERR "(command: " . $psql_command . ")\n";
+	print STDERR "(psql exit code: " . $exitcode . ")\n";
+	print STDERR "\nYou must populate the database before you can use your new\n";
+	print STDERR "DVN instance. Please consult the installation manual and/or\n";
+	print STDERR "seek support from the DVN team.\n\n";
+	exit 1; 
+	
+    }
+
+    chdir ($cwd); 
+    print "\nOK, done!\n";
+
+}
+else 
+{
+    # 7b. INSTRUCT THE USER TO POPULATE THE DB ON THE REMOTE SERVER:
+    # NOT SUPPORTED YET -- TODO
+    print "The database needs to be populated with some intial content \n"; 
+    print "before we restart the DVN one last time. \n";
+    print "However, populating a database on a remote PostgresQL server "; 
+    print "is not supported yet!\n";
+    print "Please copy the file referenceData.sql (found in this directory)\n";
+    print "onto the remote server and populate the database manually,\n";
+    print "as user postgres, with the following command:\n\n";
+    print "   psql -d $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} -f referenceData.sql\n";
+    print "then start glassfish again on this server with \n\n";
+    print "   " . $glassfish_dir."/bin/asadmin start-domain domain1\n\n";
+
+    $> = 0; 
+    $< = 0; 
+
+    exit 0; 
+    
+}
+
+# back to root:
+
+$> = 0; 
+$< = 0; 
+
+# 8. START GLASSFISH AGAIN:
+print "\nStarting glassfish, again:\n\n";
+
+$gf_start_command = $glassfish_dir."/bin/asadmin start-domain domain1"; 
+
+# delete the EJB TIMER app lock file, if exists (just in case!): 
+system ( "/bin/rm -f ".$glassfish_dir."/glassfish/domains/domain1/generated/ejb-timer-service-app" ); 
+
+unless ( ($exit_code = system ($gf_start_command)) == 0 )
+{
+	print STDERR "Could not start glassfish!\n";
+	print STDERR "(command line: " . $gf_start_command . ")\n";
+	print STDERR "(exit code: " . $exit_code . ")\n";
+	exit 1; 
+}
+
+
+print "\nYou should now have a running DVN instance;\n";
+print "Please go to the application at the following URL:\n\n";
+print "  http://" . $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} . "/dvn\n";
+print "\nand log in by using \"networkAdmin\" as both the user name\n";
+print "and password. Click the \"networkAdmin\" link on the right side\n";
+print "Of the main screen, then click \"Update Account\". Change this\n";
+print "default password and default e-mail address.\n";
+
+# 9. FINALLY, CHECK IF RSERVE IS RUNNING: 
+print "\n\nFinally, checking if Rserve is running and accessible...\n";
+
+unless ( $CONFIG_DEFAULTS{'RSERVE_PORT'}=~/^[0-9][0-9]*$/ )
+{
+    print $CONFIG_DEFAULTS{'RSERVE_HOST'} . " does not look like a valid port number,\n";
+    print "defaulting to 6311.\n\n";
+
+    $CONFIG_DEFAULTS{'RSERVE_PORT'} = 6311; 
+}
+    
+my ( $rserve_iaddr, $rserve_paddr, $rserve_proto );
+
+unless ( $rserve_iaddr = inet_aton($CONFIG_DEFAULTS{'RSERVE_HOST'}) )
+{
+    print STDERR "Could not look up $CONFIG_DEFAULTS{'RSERVE_HOST'},\n";
+    print STDERR "the host you specified as your R server.\n";
+    print STDERR "\nDVN can function without a working R server, but\n";
+    print STDERR "much of the functionality concerning running statistics\n";
+    print STDERR "and analysis on quantitative data will not be available.\n";
+    print STDERR "Please consult the Installers guide for more info.\n";
+
+    exit 0;
+}
+
+$rserve_paddr = sockaddr_in($CONFIG_DEFAULTS{'RSERVE_PORT'}, $rserve_iaddr);
+$rserve_proto = getprotobyname('tcp');
+
+unless ( socket(SOCK, PF_INET, SOCK_STREAM, $rserve_proto) &&
+	connect(SOCK, $rserve_paddr) ) 
+{
+    print STDERR "Could not establish connection to $CONFIG_DEFAULTS{'RSERVE_HOST'}\n";
+    print STDERR "on port $CONFIG_DEFAULTS{'RSERVE_PORT'}, the address you provided\n";
+    print STDERR "for your R server.\n";
+    print STDERR "DVN can function without a working R server, but\n";
+    print STDERR "much of the functionality concerning running statistics\n";
+    print STDERR "and analysis on quantitative data will not be available.\n";
+    print STDERR "Please consult the \"Installing R\" section in the Installers guide\n";
+    print STDERR "for more info.\n";
+
+    exit 0;
+    
+}
+
+close (SOCK); 
+print "\nOK!\n";
+
+exit 0; 
+
+
+sub create_pg_hash {
+    local $pg_username = shift @_; 
+    local $pg_password = shift @_; 
+
+    $encode_line = $pg_password . $pg_username; 
+
+    # for Redhat: 
+
+    ##print STDERR "executing /bin/echo -n $encode_line | md5sum\n"; 
+
+    if ( $WORKING_OS eq "MacOSX" )
+    {
+	$hash = `/bin/echo -n $encode_line | md5`; 
+    }
+    else 
+    {
+	$hash = `/bin/echo -n $encode_line | md5sum`; 
+    }
+
+    chop $hash; 
+
+    $hash =~s/  \-$//; 
+
+    if ( (length($hash) != 32) || ($hash !~ /^[0-9a-f]*$/) ) 
+    {
+	print STDERR "Failed to generate a MD5-encrypted password hash for the Postgres database.\n";
+	exit 1; 
+    }
+
+
+    return $hash;
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/install~	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,1183 @@
+#!/usr/bin/perl
+
+use Getopt::Long;
+use Socket; 
+use File::Copy;
+
+my( %opts ) = ( );
+my( $rez ) = GetOptions( \%opts, "pg_only!");
+
+my $postgresonly = 0; 
+
+my @CONFIG_VARIABLES = (); 
+
+unless ($opts{pg_only})
+{
+    @CONFIG_VARIABLES = (       
+	    'HOST_DNS_ADDRESS',
+	    'GLASSFISH_DIRECTORY',
+	    'MAIL_SERVER',
+
+	    'POSTGRES_SERVER',
+	    'POSTGRES_PORT',
+	    'POSTGRES_DATABASE',
+	    'POSTGRES_USER',
+	    'POSTGRES_PASSWORD',
+
+	    'RSERVE_HOST',
+	    'RSERVE_PORT',
+	    'RSERVE_USER',
+	    'RSERVE_PASSWORD'
+
+	    ); 
+}
+else 
+{
+    @CONFIG_VARIABLES = (       
+	    'POSTGRES_SERVER',
+	    'POSTGRES_PORT',
+	    'POSTGRES_DATABASE',
+	    'POSTGRES_USER',
+	    'POSTGRES_PASSWORD'
+	    ); 
+
+    $postgresonly = 1; 
+}
+
+my %CONFIG_DEFAULTS = 
+    (       
+	    'HOST_DNS_ADDRESS', '',
+	    'GLASSFISH_DIRECTORY', '/home/glassfish/glassfish',
+	    'MAIL_SERVER',      'localhost',
+
+	    'POSTGRES_SERVER',  'localhost',
+	    'POSTGRES_PORT',    5432,
+	    'POSTGRES_DATABASE','dvnDb',
+	    'POSTGRES_USER',    'postgres',
+	    'POSTGRES_PASSWORD','admin',
+
+	    'RSERVE_HOST',      'localhost',
+	    'RSERVE_PORT',      6311,
+	    'RSERVE_USER',      'rserve',
+	    'RSERVE_PASSWORD',  'rserve'
+
+	    ); 
+
+
+my %CONFIG_PROMPTS = 
+    (       
+	    'HOST_DNS_ADDRESS', 'Internet Address of your host',
+	    'GLASSFISH_DIRECTORY', 'Glassfish Directory', 
+	    'MAIL_SERVER', 'SMTP (mail) server to relay notification messages',
+
+	    'POSTGRES_SERVER',  'Postgres Server',
+	    'POSTGRES_PORT',    'Postgres Server Port',
+	    'POSTGRES_DATABASE','Name of the Postgres Database',
+	    'POSTGRES_USER',    'Name of the Postgres User',
+	    'POSTGRES_PASSWORD','Postgres user password',
+
+	    'RSERVE_HOST',      'Rserve Server',
+	    'RSERVE_PORT',      'Rserve Server Port',
+	    'RSERVE_USER',      'Rserve User Name',
+	    'RSERVE_PASSWORD',  'Rserve User Password'
+
+	    ); 
+
+# Supported Posstgres JDBC drivers: 
+# (have to be configured explicitely, so that Perl "taint" (security) mode 
+# doesn't get paranoid)
+
+my $POSTGRES_DRIVER_8_3 = "postgresql-8.3-603.jdbc4.jar";  
+#my $POSTGRES_DRIVER_8_4 = "postgresql-8.4-703.jdbc4.jar";
+my $POSTGRES_DRIVER_8_4 = "postgresql-8.3-603.jdbc4.jar";  
+my $POSTGRES_DRIVER_9_0 = "postgresql-9.0-802.jdbc4.jar";
+my $POSTGRES_DRIVER_9_1 = "postgresql-9.1-902.jdbc4.jar";
+
+
+# A few preliminary checks: 
+
+# user -- must be root: 
+
+$user_real = `who am i`; 
+chop $user_real; 
+$user_real =~s/ .*$//; 
+
+if ( $< != 0 ) 
+{
+    print STDERR "\nERROR: You must be logged in as root to run the installer.\n\n";
+    exit 1; 
+}
+
+# OS: 
+
+my $uname_out = `uname -a`; 
+
+# hostname: 
+
+my $hostname_from_cmdline = `hostname`; 
+chop $hostname_from_cmdline; 
+
+$CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} = $hostname_from_cmdline;
+
+
+print "\nWelcome to the DVN installer.\n";
+unless ($opts{pg_only}) 
+{
+    print "You will be guided through the process of setting up a NEW\n";
+    print "instance of the DVN application\n";
+} 
+else 
+{
+    print "You will be guided through the process of configuring the\n";
+    print "LOCAL instance of PostgreSQL database for use by the DVN\n";
+    print "application.\n";
+}
+
+my @uname_tokens = split (" ", $uname_out); 
+
+if ( $uname_tokens[0] eq "Darwin" )
+{
+    print "\nThis appears to be a MacOS X system; good.\n";
+    # TODO: check the OS version
+
+    $WORKING_OS = "MacOSX"; 
+}
+elsif ( $uname_tokens[0] eq "Linux" )
+{
+    if ( -f "/etc/redhat-release" )
+    {
+	print "\nThis appears to be a RedHat system; good.\n";
+	$WORKING_OS = "RedHat"; 
+	# TODO: check the distro version
+    }
+    else 
+    {
+	print "\nThis appears to be a non-RedHat Linux system;\n";
+	print "this installation *may* succeed; but we're not making any promises!\n";
+	$WORKING_OS = "Linux"; 
+    }
+} 
+else 
+{
+    print "\nWARNING: This appears to be neither a Linux or MacOS X system!\n";
+    print "This installer script will most likely fail. Please refer to the\n";
+    print "DVN Installers Guide for more information.\n\n";
+
+    $WORKING_OS = "Unknown";
+
+    print "Do you wish to continue?\n [y/n] ";
+
+
+    my $yesnocont = <>; chop $yesnocont;
+
+    while ( $yesnocont ne "y" && $yesnocont ne "n" )
+    {
+	print "Please enter 'y' or 'n'!\n";
+	print "(or ctrl-C to exit the installer)\n";
+	$yesnocont = <>; chop $yesnocont;
+    }
+
+    if ( $yesnocont eq "n" )
+    {
+	exit 0;
+    }
+
+}
+
+ ENTERCONFIG: 
+
+print "\n";
+print "Please enter the following configuration values:\n";
+print "(hit [RETURN] to accept the default value)\n";
+print "\n";
+
+for $ENTRY (@CONFIG_VARIABLES)
+{
+    print $CONFIG_PROMPTS{$ENTRY} . ": ";
+    print "[" . $CONFIG_DEFAULTS{$ENTRY} . "] ";
+
+    $user_entry = <>; 
+    chop $user_entry; 
+
+    if ($user_entry ne "")
+    {
+	$CONFIG_DEFAULTS{$ENTRY} = $user_entry;
+    }
+
+    print "\n";
+}
+
+# CONFIRM VALUES ENTERED: 
+
+
+print "\nOK, please confirm what you've entered:\n\n";
+
+for $ENTRY (@CONFIG_VARIABLES)
+{
+    print $CONFIG_PROMPTS{$ENTRY} . ": " . $CONFIG_DEFAULTS{$ENTRY} . "\n";
+}
+
+print "\nIs this correct? [y/n] ";
+
+
+my $yesno = <>; chop $yesno;  
+
+while ( $yesno ne "y" && $yesno ne "n" )
+{
+    print "Please enter 'y' or 'n'!\n";
+    print "(or ctrl-C to exit the installer)\n";
+    $yesno = <>; chop $yesno;  
+}
+
+if ( $yesno eq "n" )
+{
+    goto ENTERCONFIG; 
+}
+
+# VALIDATION/VERIFICATION OF THE CONFIGURATION VALUES:                                               
+# 1. VERIFY MAIL SERVER THEY CONFIGURED: 
+
+=pod
+
+unless ( $postgresonly )
+{
+
+    my ( $mail_server_iaddr, $mail_server__paddr, $mail_server_proto, $mail_server_status );
+
+    $mail_server_status = 1; 
+
+    unless ( $mail_server_iaddr = inet_aton($CONFIG_DEFAULTS{'MAIL_SERVER'}) )
+    {
+	print STDERR "Could not look up $CONFIG_DEFAULTS{'MAIL_SERVER'},\n";
+	print STDERR "the host you specified as your mail server.\n";
+	$mail_server_status = 0; 
+    }
+
+    if ($mail_server_status) {
+	$mail_server_paddr = sockaddr_in(25, $mail_server_iaddr);
+	$mail_server_proto = getprotobyname('tcp');
+
+	unless ( socket(SOCK, PF_INET, SOCK_STREAM, $mail_server_proto) &&
+		 connect(SOCK, $mail_server_paddr) ) 
+	{
+	    print STDERR "Could not establish connection to $CONFIG_DEFAULTS{'MAIL_SERVER'},\n";
+	    print STDERR "the address you provided for your Mail server.\n";
+	    print STDERR "Please select a valid mail server, and try again.\n\n";
+
+	    $mail_server_status = 0;
+	}
+    
+    }
+
+    close (SOCK); 
+
+    unless ($mail_server_status) 
+    {
+	goto ENTERCONFIG; 
+    }
+}
+=cut
+
+# 2. CHECK IF THE WAR FILE IS AVAILABLE:                                                                
+unless ( -f "appdeploy/dist/DVN-web.war" )
+{
+    print "\nWARNING: Can't find the project .war file in appdeploy/dist/!\n";
+    print "\tAre you running the installer in the right directory?\n";
+    print "\tHave you built the war file?\n";
+    print "\t(if not, build the project and run the installer again)\n";
+
+    exit 0;
+}
+
+# check the working (installer) dir:
+my $cwd; 
+chomp($cwd = `pwd`);
+
+# 2b. CHECK IF THE SQL TEMPLATE IS IN PLACE AND CREATE THE SQL FILE 
+
+$SQL_REFERENCE_DATA = "referenceData.sql"; 
+$SQL_REFERENCE_TEMPLATE = "referenceData.sql.TEMPLATE"; 
+
+unless ( -f $SQL_REFERENCE_TEMPLATE  )
+{
+    print "\nWARNING: Can't find .sql data template!\n"; 
+    print "(are you running the installer in the right directory?)\n";
+    
+    exit 0; 
+}
+
+open DATATEMPLATEIN, $SQL_REFERENCE_TEMPLATE || die $@; 
+open SQLDATAOUT, '>'.$SQL_REFERENCE_DATA || die $@;
+
+while( <DATATEMPLATEIN> )
+{
+    s/%POSTGRES_USER%/$CONFIG_DEFAULTS{'POSTGRES_USER'}/g;
+    print SQLDATAOUT $_; 
+}
+
+
+close DATATEMPLATEIN; 
+close SQLDATAOUT; 
+
+# 3. CHECK POSTGRES AVAILABILITY: 
+
+my $pg_local_connection = 0; 
+
+if ( $CONFIG_DEFAULTS{'POSTGRES_SERVER'} eq 'localhost' )
+{
+    $pg_local_connection = 1; 
+
+    # 3a. CHECK FOR USER postgres:
+
+    print "\nChecking system user \"postgres\"... ";
+
+    $POSTGRES_SYS_NAME = "postgres";
+    $POSTGRES_SYS_UID = (getpwnam ("postgres"))[2]; 
+
+    if ($POSTGRES_SYS_UID == undef) {
+	print STDERR "\nERROR: I haven't been able to find user \"postgres\" on the system!\n";
+	print STDERR "(TODO: prompt the user instead to supply an alternative username, if\n";
+	print STDERR "available)\n";
+
+	exit 1; 
+    } 
+
+    print "OK.\n";
+
+    # 3b. LOCATE THE EXECUTABLE:
+
+    $sys_path = $ENV{'PATH'}; 
+    @sys_path_dirs = split ( ":", $sys_path ); 
+
+    $psql_exec = ""; 
+
+    for $sys_path_dir ( @sys_path_dirs )
+    {
+	if ( -x $sys_path_dir . "/psql" ) 
+	{
+	    $psql_exec = $sys_path_dir; 
+	    last; 
+	}
+    }
+
+    $pg_major_version = 0; 
+    $pg_minor_version = 0; 
+
+    if ( $psql_exec eq "" && $WORKING_OS eq "MacOSX" )
+    {
+	for $pg_minor_version ( "1", "0" )
+	{
+	    if ( -x "/Library/PostgreSQL/9." . $pg_minor_version . "/bin/psql" ) 
+	    {
+		$pg_major_version = 9;
+		$psql_exec = "/Library/PostgreSQL/9." . $pg_minor_version . "/bin";
+		last; 
+	    }
+	}
+	if (!$pg_major_version)
+	{
+	    for $pg_minor_version ( "4", "3" )
+	    {
+		if ( -x "/Library/PostgreSQL/8." . $pg_minor_version . "/bin/psql" ) 
+		{
+		    $pg_major_version = 8; 
+		    $psql_exec = "/Library/PostgreSQL/8." . $pg_minor_version . "/bin";
+		    last; 
+		}
+	    }
+	}
+    }
+
+    if ( $psql_exec eq "" )
+    {
+	print STDERR "\nERROR: I haven't been able to find the psql command in your PATH!\n";
+	print STDERR "Please make sure PostgresQL is properly installed and try again.\n\n";
+
+	exit 1; 
+    }
+
+    
+
+    # 3c. CHECK POSTGRES VERSION: 
+
+    open (PSQLOUT, $psql_exec . "/psql --version|"); 
+
+    $psql_version_line = <PSQLOUT>; 
+    chop $psql_version_line; 
+    close PSQLOUT; 
+
+    my ($postgresName, $postgresNameLong, $postgresVersion) = split ( " ", $psql_version_line ); 
+
+    unless ( $postgresName eq "psql" && $postgresVersion =~ /^[0-9][0-9\.]*$/ )
+    {
+	print STDERR "\nERROR: Unexpected output from psql command!\n";
+	print STDERR "Please make sure PostgresQL is properly installed and try again.\n\n";
+
+	exit 1; 
+    }
+
+
+    my (@postgres_version_tokens) = split ( '\.', $postgresVersion ); 
+
+    unless ( ($postgres_version_tokens[0] == 8 && $postgres_version_tokens[1] >= 3) || ($postgres_version_tokens[0] >= 9) )
+    {
+	print STDERR "\nERROR: PostgresQL version 8.3, or newer, is required!\n";
+	print STDERR "Found a copy of psql ($psql_exec/psql) that belongs to version $postgresVersion.\n\n";
+	print STDERR "Please make sure the right version of PostgresQL is properly installed,\n";
+	print STDERR "and the right version of psql comes first in the PATH,\n";
+	print STDERR "then try again.\n";
+
+	exit 1; 
+    }
+
+    print "\n\nFound Postgres psql command, version $postgresVersion. Good.\n\n";
+
+    $pg_major_version = $postgres_version_tokens[0];
+    $pg_minor_version = $postgres_version_tokens[1];
+
+    # 4. CONFIGURE POSTGRES: 
+
+    print "\nConfiguring Postgres Database:\n";
+
+    
+
+    $< = $POSTGRES_SYS_UID; 
+    $> = $POSTGRES_SYS_UID; 
+
+    # 4a. CHECK IF POSTGRES IS RUNNING:
+    print "Checking if a local instance of Postgres is running and accessible...\n";
+
+    # (change to /tmp before executing the command below - 
+    # we are trying to do it as user postgres, and it may not have 
+    # access to the current, installer directory; the command would still 
+    # work, but there would be an error message from the shell init on screen 
+    # - potentially confusing)
+    chdir ("/tmp");
+
+    if (!system ($psql_exec . "/psql -c 'SELECT * FROM pg_roles' > /dev/null 2>&1"))
+    {
+	print "Yes, it is.\n";
+    }
+    else
+    {
+	print "Nope, I haven't been able to connect to the local instance of PostgresQL.\n";
+	print "daemon. Is postgresql running? \n";
+	print "On a RedHat system, you can check the status of the daemon with\n\n";
+	print "   service postgresql status\n\n";
+	print "and, if it's not running, start the daemon with\n\n";
+	print "   service postgresql start\n\n";
+	print "On MacOSX, use Applications -> PostgresQL -> Start Server.\n";
+	print "Also, please make sure that the daemon is listening to network connections,\n";
+	print "at leaset on the localhost interface. (See \"Installing Postgres\" section\n";
+	print "of the installation manual).\n";
+	print "Finally, please make sure that the postgres user can make localhost \n";
+	print "connections without supplying a password. (That's controlled by the \n";
+	print "\"localhost ... ident\" line in pg_hba.conf; again, please consult the \n";
+	print "installation manual).\n";
+
+
+	exit 1; 
+    }
+
+
+    # 4c. CHECK IF THIS DB ALREADY EXISTS:
+    
+    $psql_command_dbcheck = $psql_exec . "/psql -c \"\" -d " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . ">/dev/null 2>&1"; 
+    if ( ($exitcode = system($psql_command_dbcheck)) == 0 )
+    {
+	# switch back to root uid:
+	$> = 0; 
+	$< = 0; 
+	chdir ($cwd); 
+
+	print "WARNING! Database " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " already exists!\n";
+	print "\nPlease note that you can only use this installer to create a blank, \n"; 
+	print "new and shiny DVN database. I.e., you cannot install on top of an \n";
+	print "existing database. Please enter a different name for the DVN database.\n";
+	print "\nPress any key to continue, or ctrl-C to exit the installer...\n\n";
+
+	system "stty cbreak </dev/tty >/dev/tty 2>&1";
+	my $key = getc(STDIN);
+	system "stty -cbreak </dev/tty >/dev/tty 2>&1";
+	print "\n";
+
+	goto ENTERCONFIG; 
+ 
+    }
+
+    # 4d. CHECK IF THIS USER ALREADY EXISTS:
+
+    $psql_command_rolecheck = $psql_exec . "/psql -c \"\" -d postgres " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " >/dev/null 2>&1";
+    if ( ($exitcode = system($psql_command_rolecheck)) == 0 )
+    {
+	print "User (role) . " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " already exists;\n";
+	print "Proceeding.";
+    }
+    else 
+    {
+	# 4e. CREATE DVN DB USER:
+
+	print "\nCreating Postgres user (role) for the DVN:\n";
+
+	open TMPCMD, ">/tmp/pgcmd.$$.tmp";
+	
+	# with unencrypted password: 
+	#print TMPCMD "CREATE ROLE ".$CONFIG_DEFAULTS{'POSTGRES_USER'}." UNENCRYPTED PASSWORD '".$CONFIG_DEFAULTS{'POSTGRES_PASSWORD'}."' NOSUPERUSER CREATEDB CREATEROLE NOINHERIT LOGIN";
+	
+	# with md5-encrypted password:
+	$pg_password_md5 = &create_pg_hash ($CONFIG_DEFAULTS{'POSTGRES_USER'},$CONFIG_DEFAULTS{'POSTGRES_PASSWORD'}); 
+	my $sql_command = "CREATE ROLE \"".$CONFIG_DEFAULTS{'POSTGRES_USER'}."\" PASSWORD 'md5". $pg_password_md5 ."' NOSUPERUSER CREATEDB CREATEROLE INHERIT LOGIN";
+	
+	print TMPCMD $sql_command; 
+	close TMPCMD; 
+
+	my $psql_commandline = $psql_exec . "/psql -f /tmp/pgcmd.$$.tmp";
+	
+	unless ( ($exitcode = system($psql_commandline)) == 0 )
+	{
+	    print STDERR "Could not create the DVN Postgres user role!\n";
+	    print STDERR "(SQL: " . $sql_command . ")\n";
+	    print STDERR "(psql exit code: " . $exitcode . ")\n";
+	    exit 1; 
+	}
+
+	unlink "/tmp/pgcmd.$$.tmp";
+	print "done.\n";
+    }
+    
+    # 4f. CREATE DVN DB: 
+
+    print "\nCreating Postgres database:\n";
+
+    $psql_command = $psql_exec . "/createdb ".$CONFIG_DEFAULTS{'POSTGRES_DATABASE'}." --owner=".$CONFIG_DEFAULTS{'POSTGRES_USER'};
+
+    unless ( ($exitcode = system("$psql_command")) == 0 ) 
+    {
+	print STDERR "Could not create Postgres database for the DVN app!\n";
+	print STDERR "(command: " . $psql_command . ")\n";
+	print STDERR "(psql exit code: " . $exitcode . ")\n";
+	print STDERR "\naborting the installation (sorry!)\n\n";
+	exit 1; 
+    }
+
+
+# Changing back to root UID: 
+
+    $> = 0; 
+    $< = 0; 
+     
+    chdir ($cwd); 
+
+}
+else 
+{
+    if (0) # DEV. INSTALLER ONLY: 
+    {
+    print "\nIt is strongly recommended that you use a local PostgresQL server,\n";
+    print "running on localhost, in your development environment!\n\n";
+
+    print "Do you wish to continue?\n [y/n] ";
+
+
+    my $yesnocont = <>; chop $yesnocont;
+
+    while ( $yesnocont ne "y" && $yesnocont ne "n" )
+    {
+	print "Please enter 'y' or 'n'!\n";
+	print "(or ctrl-C to exit the installer)\n";
+	$yesnocont = <>; chop $yesnocont;
+    }
+
+    if ( $yesnocont eq "n" )
+    {
+	print "(aborting the installation)\n".
+	exit 0;
+    }
+    }
+
+    if ( $opts{$pg_only} )
+    {
+        print "The script must be run in the --pg_only mode ONLY locally,\n";
+        print "i.e., on the server where PostgresQL is running.\n";
+
+        exit 1; 
+    }
+    
+    print "In order to use a PostgresQL database running on a remote server,\n";
+    print "Please run this installer on that host with the \"--pg_only\" option:\n\n";
+    print "./install --pg_only\n\n";
+
+    print "Press any key to continue the installation process once that has been\n";
+    print "done. Or press ctrl-C to exit the installer.\n\n";
+
+    chdir ("/tmp");
+    system "stty cbreak </dev/tty >/dev/tty 2>&1";
+    my $key = getc(STDIN);
+    system "stty -cbreak </dev/tty >/dev/tty 2>&1";
+    print "\n";
+    chdir ($cwd);
+
+    # Check if the role and database have been created on the remote server:
+    # -- TODO; 
+
+    # Find out what Postgres version is running remotely:
+
+    $pg_major_version = 9;
+    $pg_minor_version = 1;
+
+    print "What version of PostgresQL is installed on the remote server?\n [" . $pg_major_version . "." . $pg_minor_version . "] ";
+
+
+    my $postgresVersion = <>; chop $postgresVersion;
+
+    while ( $postgresVersion ne "" && !($postgresVersion =~/^[0-9]+\.[0-9]+$/) )
+    {
+	print "Please enter valid Postgres version!\n";
+	print "(or ctrl-C to exit the installer)\n";
+	$postgresVersion = <>; chop $postgresVersion;
+    }
+
+    unless ( $postgresVersion eq "" )
+    {
+	my (@postgres_version_tokens) = split ( '\.', $postgresVersion ); 
+
+	unless ( ($postgres_version_tokens[0] == 8 && $postgres_version_tokens[1] >= 3) || ($postgres_version_tokens[0] >= 9) )
+	{
+	    print STDERR "\nERROR: PostgresQL version 8.3, or newer, is required!\n";
+	    print STDERR "Please make sure the right version of PostgresQL is properly installed\n";
+	    print STDERR "on the remote server, then try again.\n";
+	    
+	    exit 1; 
+	}
+
+	$pg_major_version = $postgres_version_tokens[0];
+	$pg_minor_version = $postgres_version_tokens[1];
+    }
+
+}
+
+
+if ( $postgresonly )
+{
+    print "\nOK, done.\n";
+    print "You can now resume the installation on the main DVN host.\n\n";
+    
+    exit 0; 
+}
+
+
+# 5. CONFIGURE GLASSFISH
+
+print "\nProceeding with the Glassfish setup.\n";
+print "\nChecking your Glassfish installation..."; 
+
+my $glassfish_dir = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}; 
+
+# 5a. CHECK IF GLASSFISH DIR LOOKS OK:
+
+print $glassfish_dir."/glassfish/domains/domain1";
+
+unless ( -d $glassfish_dir."/glassfish/domains/domain1" )
+{
+    # TODO: need better check than this
+
+    while ( ! ( -d $glassfish_dir."/glassfish/domains/domain1" ) )
+    {
+	print "\nInvalid Glassfish directory " . $glassfish_dir . "!\n";
+	print "Enter the root directory of your Glassfish installation:\n";
+	print "(Or ctrl-C to exit the installer): "; 
+
+	$glassfish_dir = <>; 
+	chop $glassfish_dir; 
+    }
+}
+
+print "OK!\n";
+
+# 5b. DETERMINE HOW MUCH MEMORY TO GIVE TO GLASSFISH AS HEAP:
+
+$gf_heap_default = "2048m"; 
+$sys_mem_total = 0; 
+
+if ( -e "/proc/meminfo" && open MEMINFO, "/proc/meminfo" ) 
+{
+    # Linux 
+
+    while ( $mline = <MEMINFO> )
+    {
+	if ( $mline =~ /MemTotal:[ \t]*([0-9]*) kB/ )
+	{
+	    $sys_mem_total = $1; 
+	}
+    }
+
+    close MEMINFO; 
+
+} 
+elsif ( -x "/usr/sbin/sysctl" ) 
+{
+    # MacOS X, probably...
+
+    $sys_mem_total = `/usr/sbin/sysctl -n hw.memsize`; 
+    chop $sys_mem_total;
+    if ($sys_mem_total > 0) 
+    {
+	$sys_mem_total = int ($sys_mem_total / 1024); 
+	# size in kb
+    }
+}
+
+if ( $sys_mem_total > 0 )
+{
+    # setting the default heap size limit to 3/8 of the available 
+    # amount of memory: 
+    $gf_heap_default = ( int ($sys_mem_total / (8 / 3 * 1024) ) ); 
+
+    print "\nSetting the heap limit for Glassfish to " . $gf_heap_default . "MB. \n"; 
+    print "You may need to adjust this setting to better suit \n";
+    print "your system.\n\n";
+
+    $gf_heap_default .= "m";
+
+}
+else 
+{
+    print "\nCould not determine the amount of memory on your system.\n";
+    print "Setting the heap limit for Glassfish to 2GB. You may need \n"; 
+    print "to  adjust the value to better suit your system.\n\n";
+}
+
+push @CONFIG_VARIABLES, "DEF_MEM_SIZE"; 
+$CONFIG_DEFAULTS{"DEF_MEM_SIZE"} = $gf_heap_default; 
+
+print "\nPress any key to continue...\n\n";
+
+system "stty cbreak </dev/tty >/dev/tty 2>&1";
+	my $key = getc(STDIN);
+	system "stty -cbreak </dev/tty >/dev/tty 2>&1";
+	print "\n";
+
+# 5c. GENERATE GLASSFISH CONFIGURATION FILE:
+
+print "\nWriting glassfish configuration file (domain.xml)... ";
+
+# 5cc. FIND THE "special-admin-indicator" IN THE ORIGINAL GLASSFISH CONFIG:
+
+open ( GFCNFG, $glassfish_dir."/glassfish/domains/domain1/config/domain.xml") || die $@; 
+
+while ( <GFCNFG> )
+{
+    if (/<secure-admin special-admin-indicator=\"([^\"]*)\"/)
+    {
+	$CONFIG_DEFAULTS{'GF_SPEC_INDICATOR'} = $1;
+    }
+}
+
+# (is it really a problem if we haven't found it?)
+
+close GFCNFG;
+
+open TEMPLATEIN, 'domain.xml.TEMPLATE'; 
+open CONFIGOUT, '>domain.xml';
+
+while( <TEMPLATEIN> )
+{
+    for $ENTRY (@CONFIG_VARIABLES)
+    {
+	$patin = '%' . $ENTRY . '%'; 
+	$patout = $CONFIG_DEFAULTS{$ENTRY}; 
+	
+	s/$patin/$patout/g;
+    }
+
+    print CONFIGOUT $_; 
+
+}
+
+close TEMPLATEIN; 
+close CONFIGOUT; 
+
+print "done.\n";
+
+system ("/bin/cp -f domain.xml ".$glassfish_dir."/glassfish/domains/domain1/config"); 
+#diagnostics needed!
+
+# check if the supllied config files are in the right place: 
+
+unless ( -f "config/logging.properties" )
+{
+    print "\nERROR! Configuration files not found in config dir!\n";
+    print "(are you running the installer in the right directory?\n";
+    print "Aborting...\n";
+    exit 1; 
+}
+
+print "\nCopying additional configuration files... ";
+
+system ( "/bin/cp -Rf config/* ".$glassfish_dir."/glassfish/domains/domain1/config"); 
+#diagnostics needed!
+
+# install pre-configured robots.txt blocking bot crawlers:
+system ( "/bin/cp -f robots.txt ".$glassfish_dir."/glassfish/domains/domain1/docroot"); 
+
+# install the DVN guides (HTML) into the application docroot: 
+system ( "/bin/cp -Rf doc/guides/* ".$glassfish_dir."/glassfish/domains/domain1/docroot/guides"); 
+
+
+print "done!\n";
+
+print "\nInstalling the Glassfish PostgresQL driver... ";
+
+my $install_driver_jar = "";
+
+if ( $pg_major_version == 8 ) 
+{
+    if ( $pg_minor_version == 3 ) 
+    {
+	$install_driver_jar = $POSTGRES_DRIVER_8_3;
+    }
+    elsif ( $pg_minor_version == 4 ) 
+    {
+	$install_driver_jar = $POSTGRES_DRIVER_8_4;
+    }
+}
+elsif ( $pg_major_version == 9 )
+{
+    if ( $pg_minor_version == 0 ) 
+    {
+	$install_driver_jar = $POSTGRES_DRIVER_9_0;
+    }
+    elsif ( $pg_minor_version == 1 ) 
+    {
+	$install_driver_jar = $POSTGRES_DRIVER_9_1;
+    }
+} 
+
+=poc
+unless ( $install_driver_jar ) 
+{
+    die "Installer could not find POSTGRES JDBC driver for your version of PostgresQL!\n";
+
+} 
+=cut
+
+system ( "/bin/cp", "pgdriver/" . $install_driver_jar, $glassfish_dir."/glassfish/lib"); 
+#diagnostics needed!
+
+print "done!\n";
+
+# 5d. STOP GLASSFISH (OK IF NOT RUNNING):
+print "\nStopping glassfish...\n";
+
+unless ( ($exit_code=system ($glassfish_dir."/bin/asadmin stop-domain domain1")) == 0 )
+{
+    print STDERR "(that's OK!)\n";
+}
+
+# 5dd. INSTALL PATCHED WEBCORE GLASSFISH MODULE: 
+
+$gf_webcore_jar = $glassfish_dir."/glassfish/modules/web-core.jar";
+
+system ("/bin/mv -f ".$gf_webcore_jar . " " . $gf_webcore_jar.".PRESERVED");
+system ("/bin/cp web-core.jar ".$gf_webcore_jar); 
+
+# 5ddd. DELETE EJB TIMER APP LOCK FILE, if exists (just in case!): 
+
+system ( "/bin/rm -f ".$glassfish_dir."/glassfish/domains/domain1/generated/ejb-timer-service-app" ); 
+
+# 5e. START GLASSFISH:
+print "\nStarting glassfish.\n";
+
+unless ( ($exit_code=system ($glassfish_dir."/bin/asadmin start-domain domain1")) == 0 )
+{
+	print STDERR "Could not start glassfish!\n";
+	print STDERR "(exit code: " . $exitcode . ")\n";
+	exit 1; 
+}
+
+
+# check if glassfish is running: 
+# TODO. 
+
+# 6. DEPLOY APPLICATION:
+# 6a. DO WE HAVE ANT? 
+#  (we are no longer using ant to deply -- L.A.)
+#
+#$sys_path = $ENV{'PATH'}; 
+#@sys_path_dirs = split ( ":", $sys_path ); 
+
+#$ant_exec = ""; 
+#
+#for $sys_path_dir ( @sys_path_dirs )
+#{
+#    if ( -x $sys_path_dir . "/ant" ) 
+#    {
+#	$ant_exec = $sys_path_dir . "/ant"; 
+#	last; 
+#    }
+#}
+#
+#if ( $ant_exec eq "" )
+#{
+#    print STDERR "\nERROR: I haven't been able to find ant command in your PATH!\n";
+#    print STDERR "Please make sure and is installed and in your PATH; then try again.\n\n";
+#
+#    exit 1; 
+#}
+# 6b. TRY TO DEPLOY:
+
+print "\nAttempting to deploy the application:\n\n";
+
+$CONFIG_DEFAULTS{'GLASSFISH_ADMIN_PASSWORD'} = 'adminadmin'; 
+# TODO: ask for password! -- in case they have already changed it
+# (update: chances are we don't even need the password anymore, as 
+# long as we are deploying locally (?))
+
+my $glassfish_password = $CONFIG_DEFAULTS{'GLASSFISH_ADMIN_PASSWORD'}; 
+
+# create deployment properties files:
+#   (these properties files are no longer used, because we are no longer
+#   using ant to deploy the app. -- L.A.)
+
+#for $prop_file ('AS', 'glassfish') 
+#{
+#    open ( TEMPLIN, "appdeploy/" . $prop_file . ".properties.TEMPLATE" ) 
+#	|| die "failed to open appdeploy/" . $prop_file . ".properties.TEMPLATE";
+#    open ( PROPOUT, ">appdeploy/" . $prop_file . ".properties" ) 
+#	|| die "failed to open appdeploy/" . $prop_file . ".properties for writing";
+#
+#    while( <TEMPLIN> )
+#    {
+#	s/%GF_ADMIN_PASSWORD%/$glassfish_password/g;
+#	s/%GF_ROOT_DIR%/$glassfish_dir/g;
+#	print PROPOUT $_; 
+#    }
+#
+#    close TEMPLIN; 
+#    close PROPOUT; 
+#}
+
+# Create the .asadminpass file, or replace it, if exists:
+
+$asadminpass_file = $ENV{'HOME'} . "/.asadminpass";
+
+if ( -e $asadminpass_file )
+{
+    system ("/bin/mv -f " . $asadminpass_file . " " . $asadminpass_file . ".PRESERVED");
+}
+
+system ("echo 'asadmin://admin@localhost:4848 ' > " . $asadminpass_file); 
+
+$deploy_command = $glassfish_dir."/bin/asadmin deploy --force=true --name=DVN-web dist/DVN-web.war";
+
+unless ( ($exit_code = system ("cd appdeploy; " . $deploy_command)) == 0 )
+{
+	print STDERR "Could not deploy DVN application!\n";
+	print STDERR "(exit code: " . $exitcode . ")\n";
+	exit 1; 
+}
+
+if ( $pg_local_connection )
+{
+    print "\nOK; now we are going to stop glassfish and populate the database with\n";
+    print "some initial content, then start glassfish again.\n";
+}
+else
+{
+    print "\nOK; stopping glasfish.\n";
+}
+
+
+# 6c. SHUT DOWN:
+
+$gf_stop_command = $glassfish_dir."/bin/asadmin stop-domain domain1"; 
+
+unless ( ($exit_code = system ($gf_stop_command)) == 0 )
+{
+	print STDERR "Could not stop glassfish!\n";
+	print STDERR "(command line: " . $gf_stop_command . ")\n";
+	print STDERR "(exit code: " . $exitcode . ")\n";
+	print STDERR "\nPlease finish the installation process manually: \n";
+	print STDERR "stop/kill glassfish; then populate the database with \n";
+	print STDERR "the supplied initial content, by executing the following \n";
+	print STDERR "command, *as Unix user postgres*: \n\n";
+
+	$psql_command = $psql_exec . "/psql -d $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} -f " . $SQL_REFERENCE_DATA;
+
+	print STDERR $psql_command . "\n\n";
+	print STDERR "Then start glassfish again... Voila, you should then have \n";
+	print STDERR "a running DVN instance at the following URL:\n\n";
+	print STDERR "  http://" . $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} . "/dvn\n";
+
+	print STDERR "\naborting the installer... (sorry!)\n";
+
+	exit 1; 
+}
+
+# 7. POPULATE DATABASE:
+
+if ( $pg_local_connection )
+{
+    # 7a. POPULATE LOCALLY:
+    print "\nPopulating the database (local PostgresQL instance):\n\n";
+
+    # Copy the SQL file to /tmp, where user postgres will definitely 
+    # have read access to it: 
+
+    copy("referenceData.sql","/tmp") or die "Could not copy referenceData.sql to /tmp: $!";
+
+    $< = $POSTGRES_SYS_UID; 
+    $> = $POSTGRES_SYS_UID; 
+    chdir ("/tmp"); 
+    $psql_command = $psql_exec . "/psql -d $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} -f referenceData.sql";
+
+    unless ( ($exitcode = system("$psql_command")) == 0 ) 
+    {
+	print STDERR "Could not populate Postgres database for the DVN app!\n";
+	print STDERR "(command: " . $psql_command . ")\n";
+	print STDERR "(psql exit code: " . $exitcode . ")\n";
+	print STDERR "\nYou must populate the database before you can use your new\n";
+	print STDERR "DVN instance. Please consult the installation manual and/or\n";
+	print STDERR "seek support from the DVN team.\n\n";
+	exit 1; 
+	
+    }
+
+    chdir ($cwd); 
+    print "\nOK, done!\n";
+
+}
+else 
+{
+    # 7b. INSTRUCT THE USER TO POPULATE THE DB ON THE REMOTE SERVER:
+    # NOT SUPPORTED YET -- TODO
+    print "The database needs to be populated with some intial content \n"; 
+    print "before we restart the DVN one last time. \n";
+    print "However, populating a database on a remote PostgresQL server "; 
+    print "is not supported yet!\n";
+    print "Please copy the file referenceData.sql (found in this directory)\n";
+    print "onto the remote server and populate the database manually,\n";
+    print "as user postgres, with the following command:\n\n";
+    print "   psql -d $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} -f referenceData.sql\n";
+    print "then start glassfish again on this server with \n\n";
+    print "   " . $glassfish_dir."/bin/asadmin start-domain domain1\n\n";
+
+    $> = 0; 
+    $< = 0; 
+
+    exit 0; 
+    
+}
+
+# back to root:
+
+$> = 0; 
+$< = 0; 
+
+# 8. START GLASSFISH AGAIN:
+print "\nStarting glassfish, again:\n\n";
+
+$gf_start_command = $glassfish_dir."/bin/asadmin start-domain domain1"; 
+
+# delete the EJB TIMER app lock file, if exists (just in case!): 
+system ( "/bin/rm -f ".$glassfish_dir."/glassfish/domains/domain1/generated/ejb-timer-service-app" ); 
+
+unless ( ($exit_code = system ($gf_start_command)) == 0 )
+{
+	print STDERR "Could not start glassfish!\n";
+	print STDERR "(command line: " . $gf_start_command . ")\n";
+	print STDERR "(exit code: " . $exit_code . ")\n";
+	exit 1; 
+}
+
+
+print "\nYou should now have a running DVN instance;\n";
+print "Please go to the application at the following URL:\n\n";
+print "  http://" . $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} . "/dvn\n";
+print "\nand log in by using \"networkAdmin\" as both the user name\n";
+print "and password. Click the \"networkAdmin\" link on the right side\n";
+print "Of the main screen, then click \"Update Account\". Change this\n";
+print "default password and default e-mail address.\n";
+
+# 9. FINALLY, CHECK IF RSERVE IS RUNNING: 
+print "\n\nFinally, checking if Rserve is running and accessible...\n";
+
+unless ( $CONFIG_DEFAULTS{'RSERVE_PORT'}=~/^[0-9][0-9]*$/ )
+{
+    print $CONFIG_DEFAULTS{'RSERVE_HOST'} . " does not look like a valid port number,\n";
+    print "defaulting to 6311.\n\n";
+
+    $CONFIG_DEFAULTS{'RSERVE_PORT'} = 6311; 
+}
+    
+my ( $rserve_iaddr, $rserve_paddr, $rserve_proto );
+
+unless ( $rserve_iaddr = inet_aton($CONFIG_DEFAULTS{'RSERVE_HOST'}) )
+{
+    print STDERR "Could not look up $CONFIG_DEFAULTS{'RSERVE_HOST'},\n";
+    print STDERR "the host you specified as your R server.\n";
+    print STDERR "\nDVN can function without a working R server, but\n";
+    print STDERR "much of the functionality concerning running statistics\n";
+    print STDERR "and analysis on quantitative data will not be available.\n";
+    print STDERR "Please consult the Installers guide for more info.\n";
+
+    exit 0;
+}
+
+$rserve_paddr = sockaddr_in($CONFIG_DEFAULTS{'RSERVE_PORT'}, $rserve_iaddr);
+$rserve_proto = getprotobyname('tcp');
+
+unless ( socket(SOCK, PF_INET, SOCK_STREAM, $rserve_proto) &&
+	connect(SOCK, $rserve_paddr) ) 
+{
+    print STDERR "Could not establish connection to $CONFIG_DEFAULTS{'RSERVE_HOST'}\n";
+    print STDERR "on port $CONFIG_DEFAULTS{'RSERVE_PORT'}, the address you provided\n";
+    print STDERR "for your R server.\n";
+    print STDERR "DVN can function without a working R server, but\n";
+    print STDERR "much of the functionality concerning running statistics\n";
+    print STDERR "and analysis on quantitative data will not be available.\n";
+    print STDERR "Please consult the \"Installing R\" section in the Installers guide\n";
+    print STDERR "for more info.\n";
+
+    exit 0;
+    
+}
+
+close (SOCK); 
+print "\nOK!\n";
+
+exit 0; 
+
+
+sub create_pg_hash {
+    local $pg_username = shift @_; 
+    local $pg_password = shift @_; 
+
+    $encode_line = $pg_password . $pg_username; 
+
+    # for Redhat: 
+
+    ##print STDERR "executing /bin/echo -n $encode_line | md5sum\n"; 
+
+    if ( $WORKING_OS eq "MacOSX" )
+    {
+	$hash = `/bin/echo -n $encode_line | md5`; 
+    }
+    else 
+    {
+	$hash = `/bin/echo -n $encode_line | md5sum`; 
+    }
+
+    chop $hash; 
+
+    $hash =~s/  \-$//; 
+
+    if ( (length($hash) != 32) || ($hash !~ /^[0-9a-f]*$/) ) 
+    {
+	print STDERR "Failed to generate a MD5-encrypted password hash for the Postgres database.\n";
+	exit 1; 
+    }
+
+
+    return $hash;
+}
Binary file DVN-web/installer/dvninstall/pgdriver/postgresql-8.3-603.jdbc4.jar has changed
Binary file DVN-web/installer/dvninstall/pgdriver/postgresql-8.4-703.jdbc4.jar has changed
Binary file DVN-web/installer/dvninstall/pgdriver/postgresql-9.0-802.jdbc4.jar has changed
Binary file DVN-web/installer/dvninstall/pgdriver/postgresql-9.1-902.jdbc4.jar has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/referenceData.sql	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,1223 @@
+--
+-- PostgreSQL database dump
+--
+
+-- Started on 2006-09-19 16:05:05 Eastern Standard Time
+
+SET client_encoding = 'UTF8';
+SET check_function_bodies = false;
+SET client_min_messages = warning;
+
+SET search_path = public, pg_catalog;
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('metadata', 'id'), 10, false);
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('template', 'id'), 10, false);
+
+
+--
+-- TOC entry 1840 (class 0 OID 0)
+-- Dependencies: 1304
+-- Name: datatable_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('pagedef', 'id'), 500, false);
+
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('datatable', 'id'), 1, false);
+
+
+--
+-- TOC entry 1841 (class 0 OID 0)
+-- Dependencies: 1291
+-- Name: datavariable_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('datavariable', 'id'), 1, false);
+
+
+--
+-- TOC entry 1842 (class 0 OID 0)
+-- Dependencies: 1297
+-- Name: fieldinputlevel_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('fieldinputlevel', 'id'), 10, false);
+
+
+
+
+
+--
+-- TOC entry 1844 (class 0 OID 0)
+-- Dependencies: 1287
+-- Name: logindomain_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('logindomain', 'id'), 1, false);
+
+
+
+
+--
+-- TOC entry 1846 (class 0 OID 0)
+-- Dependencies: 1312
+-- Name: role_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('"role"', 'id'), 10, false);
+
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('"networkrole"', 'id'), 10, false);
+
+--
+-- TOC entry 1848 (class 0 OID 0)
+-- Dependencies: 1272
+-- Name: studyfield_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('studyfield', 'id'), 150, true);
+
+
+
+
+
+--
+-- TOC entry 1851 (class 0 OID 0)
+-- Dependencies: 1270
+-- Name: studyfile_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('studyfile', 'id'), 1, false);
+
+
+
+
+
+--
+-- TOC entry 1856 (class 0 OID 0)
+-- Dependencies: 1302
+-- Name: usergroup_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('usergroup', 'id'), 1, false);
+
+
+
+--
+-- TOC entry 1859 (class 0 OID 0)
+-- Dependencies: 1299
+-- Name: vdcgroup_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('vdcgroup', 'id'), 1, false);
+
+
+--
+-- TOC entry 1860 (class 0 OID 0)
+-- Dependencies: 1289
+-- Name: vdcnetwork_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('vdcnetwork', 'id'), 1, false);
+
+
+--
+-- TOC entry 1861 (class 0 OID 0)
+-- Dependencies: 1294
+-- Name: vdcuser_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('vdcuser', 'id'), 10, false);
+
+
+
+--
+-- TOC entry 1813 (class 0 OID 113837)
+-- Dependencies: 1274
+-- Data for Name: coll_adv_search_fields; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE coll_adv_search_fields DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE coll_adv_search_fields ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1818 (class 0 OID 113863)
+-- Dependencies: 1281
+-- Data for Name: coll_any_search_fields; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE coll_any_search_fields DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE coll_any_search_fields ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1804 (class 0 OID 113774)
+-- Dependencies: 1259
+-- Data for Name: coll_search_result_fields; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE coll_search_result_fields DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE coll_search_result_fields ENABLE TRIGGER ALL;
+
+
+
+
+
+ALTER TABLE datatable ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1825 (class 0 OID 113902)
+-- Dependencies: 1292
+-- Data for Name: datavariable; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE datavariable DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE datavariable ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1829 (class 0 OID 113927)
+-- Dependencies: 1298
+-- Data for Name: fieldinputlevel; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE fieldinputlevel DISABLE TRIGGER ALL;
+
+INSERT INTO fieldinputlevel (id, name ) VALUES (1, 'required');
+INSERT INTO fieldinputlevel (id, name ) VALUES(2, 'recommended');
+INSERT INTO fieldinputlevel (id, name ) VALUES(3, 'optional');
+
+
+
+ALTER TABLE fieldinputlevel ENABLE TRIGGER ALL;
+
+
+
+--
+-- TOC entry 1823 (class 0 OID 113888)
+-- Dependencies: 1288
+-- Data for Name: logindomain; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE logindomain DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE logindomain ENABLE TRIGGER ALL;
+
+
+
+--
+-- TOC entry 1838 (class 0 OID 113987)
+-- Dependencies: 1313
+-- Data for Name: role; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE "role" DISABLE TRIGGER ALL;
+INSERT into role(id, name) VALUES (1, 'contributor');
+INSERT into role(id, name) VALUES (2, 'curator');
+INSERT into role(id, name) VALUES (3, 'admin');
+INSERT into role(id,name) VALUES (4, 'privileged viewer');
+ALTER TABLE "role" ENABLE TRIGGER ALL;
+
+ALTER TABLE "networkrole" DISABLE TRIGGER ALL;
+INSERT into networkrole(id, name) VALUES (1, 'Creator');
+INSERT into networkrole(id, name) VALUES (2, 'Admin');
+ALTER TABLE "networkrole" ENABLE TRIGGER ALL;
+
+ALTER TABLE pagedef DISABLE TRIGGER ALL;
+
+
+-- Pages that don't require role authorization
+INSERT INTO pagedef (name, path, role_id, networkrole_id ) VALUES  ( 'StudyPage', '/study/StudyPage.xhtml', null,null );
+INSERT INTO pagedef (name, path, role_id, networkrole_id ) VALUES  ( 'SubsettingPage', '/subsetting/SubsettingPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ExploreDataPage','/viz/ExploreDataPage.xhtml',null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ErrorPage', '/ErrorPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'HomePage', '/HomePage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'UnauthorizedPage', '/login/UnauthorizedPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'StudyLockedPage', '/login/StudyLockedPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'LogoutPage', '/login/LogoutPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AddAccountPage', '/login/AddAccountPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditAccountPage', '/login/EditAccountPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AccountOptionsPage', '/login/AccountOptionsPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AccountPage', '/login/AccountPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'LoginPage', '/login/LoginPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ForgotPasswordPage', '/login/ForgotPasswordPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ContributorRequestPage', '/login/ContributorRequestPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ContributorRequestInfoPage', '/login/ContributorRequestInfoPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'CreatorRequestPage','/login/CreatorRequestPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'CreatorRequestInfoPage','/login/CreatorRequestInfoPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'TermsOfUsePage','/login/TermsOfUsePage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AccountTermsOfUsePage','/login/AccountTermsOfUsePage.xhtml', null,null );
+INSERT INTO pagedef (name, path, role_id, networkrole_id ) VALUES  ( 'StudyVersionDifferencesPage', '/study/StudyVersionDifferencesPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'OptionsPage','/admin/OptionsPage.xhtml',null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageStudiesPage','/study/ManageStudiesPage.xhtml',null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManifestPage', '/ManifestPage.xhtml', null,null );
+
+-- Pages that require VDC Role authorization:
+-- Contributor Role 
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditStudyPage','/study/EditStudyPage.xhtml',1,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditStudyFilesPage','/study/EditStudyFilesPage.xhtml',1,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AddFilesPage','/study/AddFilesPage.xhtml',1,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'SetUpDataExplorationPage','/study/SetUpDataExplorationPage.xhtml',1,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'DeleteStudyPage','/study/DeleteStudyPage.xhtml',1,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'MyDataversePage','/networkAdmin/MyDataversePage.xhtml',null,null );
+
+-- Curator Role
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditCollectionPage','/collection/EditCollectionPage.xhtml',2,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageCollectionsPage','/collection/ManageCollectionsPage.xhtml',2,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'StudyPermissionsPage','/study/StudyPermissionsPage.xhtml',2,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'DeaccessionStudyPage', '/study/DeaccessionStudyPage.xhtml', 2, null );
+
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageTemplatesPage', '/admin/ManageTemplatesPage.xhtml', 2,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'TemplateFormPage','/study/TemplateFormPage.xhtml',2,2 );
+
+-- Admin Role
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditSitePage', '/site/EditSitePage.xhtml', 3,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditBannerFooterPage','/admin/EditBannerFooterPage.xhtml',3,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditContactUsPage','/admin/EditContactUsPage.xhtml',3,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditHomePanelsPage','/admin/EditHomePanelsPage.xhtml',3,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditStudyCommentsPage', '/admin/EditStudyCommentsPage.xhtml', 3,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditUserTermsPage','/admin/EditUseTermsPage.xhtml',3,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditDepositUseTermsPage','/admin/EditDepositUseTermsPage.xhtml',3,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'PrivilegedUsersPage','/admin/PrivilegedUsersPage.xhtml',3,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'SearchFieldsPage','/admin/SearchFieldsPage.xhtml',3,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'PromotionalLinkSearchBoxPage','/admin/PromotionalLinkSearchBoxPage.xhtml',3,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditLockssConfigPage','/admin/EditLockssConfigPage.xhtml',3,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditGuestbookQuestionnairePage', '/admin/EditGuestbookQuestionnairePage.xhtml', 3,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'GuestBookResponseDataPage', '/admin/GuestBookResponseDataPage.xhtml', 3,2 );
+-- Pages that require Network Role authorization
+-- Creator Role 
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AddSitePage', '/site/AddSitePage.xhtml', null,1 );
+-- Admin Role
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'NetworkOptionsPage', '/networkAdmin/NetworkOptionsPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'HarvestSitesPage', '/site/HarvestSitesPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AddClassificationsPage', '/networkAdmin/AddClassificationsPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageClassificationsPage', '/networkAdmin/ManageClassificationsPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageControlledVocabularyPage', '/admin/ManageControlledVocabularyPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'CommentReviewPage', '/networkAdmin/CommentReviewPage.xhtml', null, 2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageDataversesPage', '/networkAdmin/ManageDataversesPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditHarvestSitePage', '/site/EditHarvestSitePage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditNetworkNamePage', '/networkAdmin/EditNetworkNamePage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'NetworkPrivilegedUsersPage', '/networkAdmin/NetworkPrivilegedUsersPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AllUsersPage', '/networkAdmin/AllUsersPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditNetworkAnnouncementsPage', '/networkAdmin/EditNetworkAnnouncementsPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditNetworkBannerFooterPage', '/networkAdmin/EditNetworkBannerFooterPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditExportSchedulePage', '/networkAdmin/EditExportSchedulePage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditOAISetPage', '/networkAdmin/EditOAISetPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditNetworkDownloadUseTermsPage', '/networkAdmin/EditNetworkDownloadUseTermsPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditNetworkDepositUseTermsPage', '/networkAdmin/EditNetworkDepositUseTermsPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditAccountUseTermsPage', '/networkAdmin/EditAccountUseTermsPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditUserGroupPage', '/networkAdmin/EditUserGroupPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'UserGroupsPage', '/networkAdmin/UserGroupsPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ImportStudyPage', '/networkAdmin/ImportStudyPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'UtilitiesPage', '/networkAdmin/UtilitiesPage.xhtml', null, 2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditSubnetworkPage', '/networkAdmin/EditSubnetworkPage.xhtml', null, 2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageSubnetworksPage', '/networkAdmin/ManageSubnetworksPage.xhtml', null, 2 );
+
+ALTER TABLE pagedef ENABLE TRIGGER ALL;
+
+
+--
+-- TOC entry 1821 (class 0 OID 113878)
+-- Dependencies: 1285
+-- Data for Name: search_result_fields; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE search_result_fields DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE search_result_fields ENABLE TRIGGER ALL;
+
+
+--
+-- TOC entry 1826 (class 0 OID 113907)
+-- Dependencies: 1293
+-- Data for Name: study_studyfield; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE study_studyfield DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE study_studyfield ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1817 (class 0 OID 113859)
+-- Dependencies: 1280
+-- Data for Name: study_usergroup; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE study_usergroup DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE study_usergroup ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1812 (class 0 OID 113829)
+-- Dependencies: 1273
+-- Data for Name: studyfield; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE studyfield DISABLE TRIGGER ALL;
+
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (1, 'Title', 'Title', 'title', TRUE, TRUE, TRUE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (2, 'Study ID', 'Study ID', 'studyId', TRUE, TRUE, TRUE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (3, 'Author', 'Author', 'author', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (4, 'Author Affiliation', 'Author Affiliation', 'authorAffiliation', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (5, 'Producer', 'Producer', 'producer', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (6, 'Producer URL', 'Producer URL', 'producerURL', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (7, 'URL to Producer Logo', 'URL to Producer Logo', 'producerLogo', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (8, 'Producer Name Abbreviation', 'Producer Name Abbreviation', 'producerAbbreviation', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (9, 'Production Date', 'Production Date', 'productionDate', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (10, 'Software', 'Software', 'software', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (11, 'Software Version', 'Software Version', 'softwareVersion', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (12, 'Funding Agency', 'Funding Agency', 'fundingAgency', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (13, 'Grant Number', 'Grant Number', 'grantNumber', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (14, 'Grant Number Agency', 'Grant Number Agency', 'grantNumberAgency', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (15, '', '', 'distributor', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (16, '', '', 'distributorURL', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (17, '', '', 'distributorLogo', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (18, '', '', 'distributionDate', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (19, '', '', 'distributorContact', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (20, '', '', 'distributorContactAffiliation', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (21, '', '', 'distributorContactEmail', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (22, '', '', 'depositor', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (23, '', '', 'dateOfDeposit', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (24, '', '', 'series', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (25, '', '', 'seriesInformation', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (26, '', '', 'studyVersion', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (27, '', '', 'keyword', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (28, '', '', 'keywordVocab', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (29, '', '', 'keywordVocabURI', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (30, '', '', 'topicClassification', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (31, '', '', 'topicClassVocab', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (32, '', '', 'topicClassVocabURI', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (33, '', '', 'description', FALSE, TRUE, TRUE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (34, '', '', 'descriptionDate', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (35, '', '', 'timePeriodCoveredStart', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (36, '', '', 'timePeriodCoveredEnd', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (37, '', '', 'dateOfCollectionStart', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (38, '', '', 'dateOfCollectionEnd', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (39, '', '', 'country', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (40, '', '', 'geographicCoverage', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (41, '', '', 'geographicUnit', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (42, '', '', 'unitOfAnalysis', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (43, '', '', 'universe', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (44, '', '', 'kindOfData', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (45, '', '', 'timeMethod', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (46, '', '', 'dataCollector', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (47, '', '', 'frequencyOfDataCollection', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (48, '', '', 'samplingProcedure', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (49, '', '', 'deviationsFromSampleDesign', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (50, '', '', 'collectionMode', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (51, '', '', 'researchInstrument', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (52, '', '', 'dataSources', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (53, '', '', 'originOfSources', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (54, '', '', 'characteristicOfSources', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (55, '', '', 'accessToSources', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (56, '', '', 'dataCollectionSituation', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (57, '', '', 'actionsToMinimizeLoss', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (58, '', '', 'controlOperations', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (59, '', '', 'weighting', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (60, '', '', 'cleaningOperations', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (61, '', '', 'studyLevelErrorNotes', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (62, '', '', 'responseRate', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (63, '', '', 'samplingErrorEstimates', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (64, '', '', 'otherDataAppraisal', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (65, '', '', 'placeOfAccess', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (66, '', '', 'originalArchive', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (67, '', '', 'availabilityStatus', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (68, '', '', 'collectionSize', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (69, '', '', 'studyCompletion', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (70, '', '', 'confidentialityDeclaration', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (71, '', '', 'specialPermissions', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (72, '', '', 'restrictions', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (73, '', '', 'contact', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (74, '', '', 'citationRequirements', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (75, '', '', 'depositorRequirements', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (76, '', '', 'conditions', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (77, '', '', 'disclaimer', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (78, '', '', 'relatedMaterial', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (79, '', '', 'publication', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (80, '', '', 'relatedStudies', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (81, '', '', 'otherReferences', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (82, '', '', 'notesText', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (83, '', '', 'note', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (84, '', '', 'notesInformationSubject', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (85, '', '', 'otherId', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (86, '', '', 'otherIdAgency', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (87, '', '', 'productionPlace', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (88, '', '', 'numberOfFiles', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (89, '', '', 'publicationReplicationData', FALSE, TRUE, FALSE, FALSE, FALSE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (90, '', '', 'subTitle', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (91, '', '', 'versionDate', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (92, '', '', 'geographicBoundingBox', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (93, '', '', 'eastLongitude', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (94, '', '', 'northLatitude', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (95, '', '', 'southLatitude', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (96, '', '', 'producerAffiliation', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (97, '', '', 'distributorAffiliation', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (98, '', '', 'distributorAbbreviation', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (99, 'Author', 'Author', 'authorName', TRUE, TRUE, TRUE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (100, '', '', 'producerName', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (101, '', '', 'distributorName', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (102, '', '', 'distributorContactName', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (103, '', '', 'descriptionText', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (104, '', '', 'keywordValue', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (105, '', '', 'topicClassValue', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (106, '', '', 'otherIdValue', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (107, '', '', 'softwareName', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (108, '', '', 'grantNumberValue', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (109, '', '', 'seriesName', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (110, '', '', 'studyVersionValue', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (111, '', '', 'westLongitude', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (112, '', '', 'noteInformationType', FALSE, FALSE, FALSE, FALSE, TRUE );
+
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (113, '', '', 'publicationCitation', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (114, '', '', 'publicationIDType', FALSE, FALSE, FALSE, FALSE, FALSE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (115, '', '', 'publicationIDNumber', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (116, '', '', 'publicationURL', FALSE, FALSE, FALSE, FALSE, TRUE );
+
+--set the parent child relationship
+update studyfield set parentstudyfield_id = 3 where id = 99;
+update studyfield set parentstudyfield_id = 3 where id = 4;
+
+update studyfield set parentstudyfield_id = 5 where id = 100;
+update studyfield set parentstudyfield_id = 5 where id = 6;
+update studyfield set parentstudyfield_id = 5 where id = 7;
+update studyfield set parentstudyfield_id = 5 where id = 8;
+update studyfield set parentstudyfield_id = 5 where id = 96;
+
+update studyfield set parentstudyfield_id = 15 where id = 101;
+update studyfield set parentstudyfield_id = 15 where id = 16;
+update studyfield set parentstudyfield_id = 15 where id = 17;
+update studyfield set parentstudyfield_id = 15 where id = 97;
+update studyfield set parentstudyfield_id = 15 where id = 98;
+
+update studyfield set parentstudyfield_id = 19 where id = 102;
+update studyfield set parentstudyfield_id = 19 where id = 20;
+update studyfield set parentstudyfield_id = 19 where id = 21;
+
+update studyfield set parentstudyfield_id = 33 where id = 103;
+update studyfield set parentstudyfield_id = 33 where id = 34;
+
+update studyfield set parentstudyfield_id = 27 where id = 104;
+update studyfield set parentstudyfield_id = 27 where id = 28;
+update studyfield set parentstudyfield_id = 27 where id = 29;
+
+update studyfield set parentstudyfield_id = 30 where id = 105;
+update studyfield set parentstudyfield_id = 30 where id = 31;
+update studyfield set parentstudyfield_id = 30 where id = 32;
+
+update studyfield set parentstudyfield_id = 85 where id = 106;
+update studyfield set parentstudyfield_id = 85 where id = 86;
+
+update studyfield set parentstudyfield_id = 10 where id = 107;
+update studyfield set parentstudyfield_id = 10 where id = 11;
+
+update studyfield set parentstudyfield_id = 13 where id = 108;
+update studyfield set parentstudyfield_id = 13 where id = 14;
+
+update studyfield set parentstudyfield_id = 24 where id = 109;
+update studyfield set parentstudyfield_id = 24 where id = 25;
+
+update studyfield set parentstudyfield_id = 26 where id = 110;
+update studyfield set parentstudyfield_id = 26 where id = 91;
+
+update studyfield set parentstudyfield_id = 92 where id = 111;
+update studyfield set parentstudyfield_id = 92 where id = 93;
+update studyfield set parentstudyfield_id = 92 where id = 94;
+update studyfield set parentstudyfield_id = 92 where id = 95;
+
+update studyfield set parentstudyfield_id = 83 where id = 112;
+update studyfield set parentstudyfield_id = 83 where id = 82;
+update studyfield set parentstudyfield_id = 83 where id = 84;
+
+update studyfield set parentstudyfield_id = 79 where id = 113;
+update studyfield set parentstudyfield_id = 79 where id = 114;
+update studyfield set parentstudyfield_id = 79 where id = 115;
+update studyfield set parentstudyfield_id = 79 where id = 116;
+update studyfield set parentstudyfield_id = 79 where id = 89;
+
+update studyfield set displayorder = 0 where name = 'authorName';
+update studyfield set displayorder = 2 where name = 'authorAffiliation';
+update studyfield set displayorder = 2 where name = 'producerAbbreviation';
+update studyfield set displayorder = 1 where name = 'producerName';
+update studyfield set displayorder = 3 where name = 'producerAffiliation';
+update studyfield set displayorder = 4 where name = 'producerURL';
+update studyfield set displayorder = 5 where name = 'producerLogo';
+update studyfield set displayorder = 2 where name = 'softwareVersion';
+update studyfield set displayorder = 1 where name = 'softwareName';
+update studyfield set displayorder = 1 where name = 'grantNumberValue';
+update studyfield set displayorder = 2 where name = 'grantNumberAgency';
+update studyfield set displayorder = 1 where name = 'distributorName';
+update studyfield set displayorder = 4 where name = 'distributorURL';
+update studyfield set displayorder = 5 where name = 'distributorLogo';
+update studyfield set displayorder = 3 where name = 'distributorAffiliation';
+update studyfield set displayorder = 2 where name = 'distributorAbbreviation';
+update studyfield set displayorder = 1 where name = 'distributorContactName';
+update studyfield set displayorder = 2 where name = 'distributorContactAffiliation';
+update studyfield set displayorder = 3 where name = 'distributorContactEmail';
+update studyfield set displayorder = 2 where name = 'seriesInformation';
+update studyfield set displayorder = 1 where name = 'seriesName';
+update studyfield set displayorder = 1 where name = 'studyVersionValue';
+update studyfield set displayorder = 2 where name = 'versionDate';
+update studyfield set displayorder = 1 where name = 'keywordValue';
+update studyfield set displayorder = 3 where name = 'keywordVocabURI';
+update studyfield set displayorder = 2 where name = 'keywordVocab';
+update studyfield set displayorder = 1 where name = 'topicClassValue';
+update studyfield set displayorder = 2 where name = 'topicClassVocab';
+update studyfield set displayorder = 3 where name = 'topicClassVocabURI';
+update studyfield set displayorder = 1 where name = 'descriptionText';
+update studyfield set displayorder = 2 where name = 'descriptionDate';
+update studyfield set displayorder = 1 where name = 'publicationCitation';
+update studyfield set displayorder = 2 where name = 'publicationIDNumber';
+update studyfield set displayorder = 3 where name = 'publicationURL';
+update studyfield set displayorder = 3 where name = 'notesText';
+update studyfield set displayorder = 1 where name = 'noteInformationType';
+update studyfield set displayorder = 2 where name = 'notesInformationSubject';
+update studyfield set displayorder = 2 where name = 'otherIdAgency';
+update studyfield set displayorder = 1 where name = 'otherIdValue';
+
+
+update studyfield set fieldtype = 'date' where id = 9;
+update studyfield set fieldtype = 'date' where id = 18;
+update studyfield set fieldtype = 'date' where id = 23;
+update studyfield set fieldtype = 'date' where id = 34;
+update studyfield set fieldtype = 'date' where id = 35;
+update studyfield set fieldtype = 'date' where id = 36;
+update studyfield set fieldtype = 'date' where id = 37;
+update studyfield set fieldtype = 'date' where id = 38;
+update studyfield set fieldtype = 'date' where id = 91;
+update studyfield set fieldtype = 'email' where id = 21;
+update studyfield set fieldtype = 'textBox' where id = 4;
+update studyfield set fieldtype = 'textBox' where id = 8;
+update studyfield set fieldtype = 'textBox' where id = 11;
+update studyfield set fieldtype = 'textBox' where id = 12;
+update studyfield set fieldtype = 'textBox' where id = 13;
+update studyfield set fieldtype = 'textBox' where id = 14;
+update studyfield set fieldtype = 'textBox' where id = 19;
+update studyfield set fieldtype = 'textBox' where id = 20;
+update studyfield set fieldtype = 'textBox' where id = 22;
+update studyfield set fieldtype = 'textBox' where id = 24;
+update studyfield set fieldtype = 'textBox' where id = 25;
+update studyfield set fieldtype = 'textBox' where id = 26;
+update studyfield set fieldtype = 'textBox' where id = 27;
+update studyfield set fieldtype = 'textBox' where id = 28;
+update studyfield set fieldtype = 'textBox' where id = 30;
+update studyfield set fieldtype = 'textBox' where id = 31;
+update studyfield set fieldtype = 'textBox' where id = 33;
+update studyfield set fieldtype = 'textBox' where id = 39;
+update studyfield set fieldtype = 'textBox' where id = 40;
+update studyfield set fieldtype = 'textBox' where id = 41;
+update studyfield set fieldtype = 'textBox' where id = 42;
+update studyfield set fieldtype = 'textBox' where id = 43;
+update studyfield set fieldtype = 'textBox' where id = 44;
+update studyfield set fieldtype = 'textBox' where id = 45;
+update studyfield set fieldtype = 'textBox' where id = 46;
+update studyfield set fieldtype = 'textBox' where id = 47;
+update studyfield set fieldtype = 'textBox' where id = 48;
+update studyfield set fieldtype = 'textBox' where id = 49;
+update studyfield set fieldtype = 'textBox' where id = 50;
+update studyfield set fieldtype = 'textBox' where id = 51;
+update studyfield set fieldtype = 'textBox' where id = 52;
+update studyfield set fieldtype = 'textBox' where id = 53;
+update studyfield set fieldtype = 'textBox' where id = 54;
+update studyfield set fieldtype = 'textBox' where id = 55;
+update studyfield set fieldtype = 'textBox' where id = 56;
+update studyfield set fieldtype = 'textBox' where id = 57;
+update studyfield set fieldtype = 'textBox' where id = 58;
+update studyfield set fieldtype = 'textBox' where id = 59;
+update studyfield set fieldtype = 'textBox' where id = 60;
+update studyfield set fieldtype = 'textBox' where id = 61;
+update studyfield set fieldtype = 'textBox' where id = 62;
+update studyfield set fieldtype = 'textBox' where id = 63;
+update studyfield set fieldtype = 'textBox' where id = 64;
+update studyfield set fieldtype = 'textBox' where id = 65;
+update studyfield set fieldtype = 'textBox' where id = 66;
+update studyfield set fieldtype = 'textBox' where id = 67;
+update studyfield set fieldtype = 'textBox' where id = 68;
+update studyfield set fieldtype = 'textBox' where id = 69;
+update studyfield set fieldtype = 'textBox' where id = 70;
+update studyfield set fieldtype = 'textBox' where id = 71;
+update studyfield set fieldtype = 'textBox' where id = 72;
+update studyfield set fieldtype = 'textBox' where id = 73;
+update studyfield set fieldtype = 'textBox' where id = 74;
+update studyfield set fieldtype = 'textBox' where id = 75;
+update studyfield set fieldtype = 'textBox' where id = 76;
+update studyfield set fieldtype = 'textBox' where id = 77;
+update studyfield set fieldtype = 'textBox' where id = 78;
+update studyfield set fieldtype = 'textBox' where id = 79;
+update studyfield set fieldtype = 'textBox' where id = 80;
+update studyfield set fieldtype = 'textBox' where id = 81;
+update studyfield set fieldtype = 'textBox' where id = 82;
+update studyfield set fieldtype = 'textBox' where id = 83;
+update studyfield set fieldtype = 'textBox' where id = 84;
+update studyfield set fieldtype = 'textBox' where id = 85;
+update studyfield set fieldtype = 'textBox' where id = 86;
+update studyfield set fieldtype = 'textBox' where id = 87;
+update studyfield set fieldtype = 'textBox' where id = 88;
+update studyfield set fieldtype = 'textBox' where id = 89;
+update studyfield set fieldtype = 'textBox' where id = 92;
+update studyfield set fieldtype = 'textBox' where id = 96;
+update studyfield set fieldtype = 'textBox' where id = 97;
+update studyfield set fieldtype = 'textBox' where id = 98;
+update studyfield set fieldtype = 'textBox' where id = 99;
+update studyfield set fieldtype = 'textBox' where id = 100;
+update studyfield set fieldtype = 'textBox' where id = 101;
+update studyfield set fieldtype = 'textBox' where id = 102;
+update studyfield set fieldtype = 'textBox' where id = 103;
+update studyfield set fieldtype = 'textBox' where id = 104;
+update studyfield set fieldtype = 'textBox' where id = 105;
+update studyfield set fieldtype = 'textBox' where id = 106;
+update studyfield set fieldtype = 'textBox' where id = 107;
+update studyfield set fieldtype = 'textBox' where id = 108;
+update studyfield set fieldtype = 'textBox' where id = 109;
+update studyfield set fieldtype = 'textBox' where id = 110;
+update studyfield set fieldtype = 'textBox' where id = 112;
+update studyfield set fieldtype = 'textBox' where id = 113;
+update studyfield set fieldtype = 'textBox' where id = 115;
+update studyfield set fieldtype = 'url' where id = 6;
+update studyfield set fieldtype = 'url' where id = 7;
+update studyfield set fieldtype = 'url' where id = 16;
+update studyfield set fieldtype = 'url' where id = 17;
+update studyfield set fieldtype = 'url' where id = 29;
+update studyfield set fieldtype = 'url' where id = 32;
+update studyfield set fieldtype = 'url' where id = 116;
+
+ALTER TABLE studyfield ENABLE TRIGGER ALL;
+
+
+
+
+
+--
+-- TOC entry 1811 (class 0 OID 113819)
+-- Dependencies: 1271
+-- Data for Name: studyfile; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE studyfile DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE studyfile ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1810 (class 0 OID 113813)
+-- Dependencies: 1269
+-- Data for Name: studyfile_usergroup; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE studyfile_usergroup DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE studyfile_usergroup ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1835 (class 0 OID 113964)
+-- Dependencies: 1307
+-- Data for Name: summary_fields; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE summary_fields DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE summary_fields ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1809 (class 0 OID 113808)
+-- Dependencies: 1268
+-- Data for Name: template; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE "template" DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE "template" ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1807 (class 0 OID 113797)
+-- Dependencies: 1265
+-- Data for Name: templatefield; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE templatefield DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE templatefield ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1832 (class 0 OID 113945)
+-- Dependencies: 1303
+-- Data for Name: usergroup; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE usergroup DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE usergroup ENABLE TRIGGER ALL;
+
+
+--
+-- TOC entry 1831 (class 0 OID 113939)
+-- Dependencies: 1301
+-- Data for Name: vdc_adv_search_fields; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE vdc_adv_search_fields DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE vdc_adv_search_fields ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1801 (class 0 OID 113756)
+-- Dependencies: 1255
+-- Data for Name: vdc_any_search_fields; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE vdc_any_search_fields DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE vdc_any_search_fields ENABLE TRIGGER ALL;
+
+
+--
+-- TOC entry 1808 (class 0 OID 113802)
+-- Dependencies: 1266
+-- Data for Name: vdc_usergroup; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE vdc_usergroup DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE vdc_usergroup ENABLE TRIGGER ALL;
+
+
+--
+-- TOC entry 1830 (class 0 OID 113934)
+-- Dependencies: 1300
+-- Data for Name: vdcgroup; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE vdcgroup DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE vdcgroup ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1828 (class 0 OID 113921)
+-- Dependencies: 1296
+-- Data for Name: vdcgrouprelationship; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE vdcgrouprelationship DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE vdcgrouprelationship ENABLE TRIGGER ALL;
+
+
+
+
+
+--
+-- TOC entry 1827 (class 0 OID 113913)
+-- Dependencies: 1295
+-- Data for Name: vdcuser; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE vdcuser DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE vdcuser ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1834 (class 0 OID 113960)
+-- Dependencies: 1306
+-- Data for Name: vdcuser_usergroup; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE vdcuser_usergroup DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE vdcuser_usergroup ENABLE TRIGGER ALL;
+
+
+ALTER TABLE variableintervaltype DISABLE TRIGGER ALL;
+
+INSERT INTO variableintervaltype (id, name ) VALUES (1, 'discrete');
+INSERT INTO variableintervaltype (id, name ) VALUES(2, 'continuous');
+INSERT INTO variableintervaltype (id, name ) VALUES(3, 'nominal');
+INSERT INTO variableintervaltype (id, name ) VALUES(4, 'dichotomous');
+
+ALTER TABLE variableintervaltype ENABLE TRIGGER ALL;
+
+
+ALTER TABLE variableformattype DISABLE TRIGGER ALL;
+
+INSERT INTO variableformattype (id, name ) VALUES (1, 'numeric');
+INSERT INTO variableformattype (id, name ) VALUES(2, 'character');
+
+ALTER TABLE variableformattype ENABLE TRIGGER ALL;
+
+
+ALTER TABLE variablerangetype DISABLE TRIGGER ALL;
+
+INSERT INTO variablerangetype (id, name ) VALUES(1, 'min');
+INSERT INTO variablerangetype (id, name ) VALUES(2, 'max');
+INSERT INTO variablerangetype (id, name ) VALUES(3, 'min exclusive');
+INSERT INTO variablerangetype (id, name ) VALUES(4, 'max exclusive');
+INSERT INTO variablerangetype (id, name ) VALUES(5, 'point');
+
+ALTER TABLE variablerangetype ENABLE TRIGGER ALL;
+
+ALTER TABLE summarystatistictype DISABLE TRIGGER ALL;
+
+INSERT INTO summarystatistictype (id, name ) VALUES(1, 'mean');
+INSERT INTO summarystatistictype (id, name ) VALUES(2, 'medn');
+INSERT INTO summarystatistictype (id, name ) VALUES(3, 'mode');
+INSERT INTO summarystatistictype (id, name ) VALUES(4, 'min');
+INSERT INTO summarystatistictype (id, name ) VALUES(5, 'max');
+INSERT INTO summarystatistictype (id, name ) VALUES(6, 'stdev');
+INSERT INTO summarystatistictype (id, name ) VALUES(7, 'vald');
+INSERT INTO summarystatistictype (id, name ) VALUES(8, 'invd');
+
+ALTER TABLE variablerangetype ENABLE TRIGGER ALL;
+
+ALTER TABLE vdcuser DISABLE TRIGGER ALL;
+
+insert into vdcuser(id, version, email,  firstname, lastname, username,  encryptedpassword, networkRole_id,active, agreedtermsofuse ) VALUES ( 1, 1, 'dataverse@lists.hmdc.harvard.edu','Network','Admin', 'networkAdmin' ,'tf0bLmzOFx5JrBhe2EIraS5GBnI=' ,2,true, true);
+
+ALTER TABLE vdcuser ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1809 (class 0 OID 113808)
+-- Dependencies: 1268
+-- Data for Name: template; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE "metadata" DISABLE TRIGGER ALL;
+-- Default metadata - contains no metadata values
+INSERT INTO metadata( id, version ) VALUES ( 1, 1);
+
+ALTER TABLE "metadata" ENABLE TRIGGER ALL;
+
+
+ALTER TABLE "template" DISABLE TRIGGER ALL;
+
+INSERT INTO template( id, version, vdcnetwork_id, name,metadata_id,enabled) VALUES (1, 1, 0, 'Dataverse Network Default Template',1,true);
+
+ALTER TABLE "template" ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1824 (class 0 OID 113895)
+-- Dependencies: 1290
+-- Data for Name: vdcnetwork; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE vdcnetwork DISABLE TRIGGER ALL;
+
+INSERT INTO vdcnetwork (id, version, name, networkpageheader, networkpagefooter, announcements, displayannouncements, aboutthisdataversenetwork, contactemail, systememail, defaultvdcheader, defaultvdcfooter, defaultvdcabouttext, defaultvdcannouncements, displayvdcannouncements, displayvdcrecentstudies, defaulttemplate_id, allowcreaterequest, defaultnetworkadmin_id,protocol,authority,handleregistration,termsofuseenabled, deposittermsofuseenabled, downloadtermsofuseenabled, defaultdisplaynumber, exportperiod, exporthourofday) VALUES (0, 1, '[Your]', ' ', ' ', 'A description of your Dataverse Network or announcements may be added here. Use Network Options to edit or remove this text.', TRUE, 'This About page is not used anymore in the DVN application.', 'dataverse@lists.hmdc.harvard.edu','dataverse@lists.hmdc.harvard.edu', ' ', ' ', 'This About page is not used anymore in the DVN application.', '', TRUE, TRUE, 1, FALSE,1,'hdl','TEST',false,false,false,false,16,'daily',3);
+
+update vdcnetwork set defaultvdcheader='<style type="text/css">
+body {margin:0; padding:0;}
+</style>
+<div style="width:100%; height:40px; background: url(/dvn/resources/images/customizationpattern.png) repeat-x left -35px #698DA2;"></div>
+<div style="margin:0 auto; max-width:1000px;">';
+
+update vdcnetwork set defaultvdcfooter='</div>';
+
+
+
+update vdcnetwork set  requireDVDescription = false,
+ requireDVaffiliation = false,
+ requireDVclassification = false,
+ requireDVstudiesforrelease = false;
+
+ALTER TABLE vdcnetwork ENABLE TRIGGER ALL;
+
+
+--
+-- TOC entry 1807 (class 0 OID 113797)
+-- Dependencies: 1265
+-- Data for Name: templatefield; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE templatefield DISABLE TRIGGER ALL;
+
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(1,1,1,1,'required',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(2,1,1,2,'required',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(3,1,1,3,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(4,1,1,4,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(5,1,1,5,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(6,1,1,6,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(7,1,1,7,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(8,1,1,8,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(9,1,1,9,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(10,1,1,10,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(11,1,1,11,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(12,1,1,12,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(13,1,1,13,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(14,1,1,14,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(15,1,1,15,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(16,1,1,16,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(17,1,1,17,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(18,1,1,18,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(19,1,1,19,'recommended',-1);
+
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(20,1,1,20,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(21,1,1,21,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(22,1,1,22,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(23,1,1,23,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(24,1,1,24,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(25,1,1,25,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(26,1,1,26,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(27,1,1,27,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(28,1,1,28,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(29,1,1,29,'optional',-1);
+
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(30,1,1,30,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(31,1,1,31,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(32,1,1,32,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(33,1,1,33,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(34,1,1,34,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(35,1,1,35,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(36,1,1,36,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(37,1,1,37,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(38,1,1,38,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(39,1,1,39,'recommended',-1);
+
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(40,1,1,40,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(41,1,1,41,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(42,1,1,42,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(43,1,1,43,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(44,1,1,44,'recommended',-1);
+
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(45,1,1,45,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(46,1,1,46,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(47,1,1,47,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(48,1,1,48,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(49,1,1,49,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(50,1,1,50,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(51,1,1,51,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(52,1,1,52,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(53,1,1,53,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(54,1,1,54,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(55,1,1,55,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(56,1,1,56,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(57,1,1,57,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(58,1,1,58,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(59,1,1,59,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(60,1,1,60,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(61,1,1,61,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(62,1,1,62,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(63,1,1,63,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(64,1,1,64,'optional',-1);
+
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(65,1,1,65,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(66,1,1,66,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(67,1,1,67,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(68,1,1,68,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(69,1,1,69,'optional',-1);
+
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(70,1,1,70,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(71,1,1,71,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(72,1,1,72,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(73,1,1,73,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(74,1,1,74,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(75,1,1,75,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(76,1,1,76,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(77,1,1,77,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(78,1,1,78,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(79,1,1,79,'recommended',-1);
+
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(80,1,1,80,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(81,1,1,81,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(82,1,1,82,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(83,1,1,83,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(84,1,1,84,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(85,1,1,85,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(86,1,1,86,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(87,1,1,87,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(88,1,1,88,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(89,1,1,89,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(90,1,1,90,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(91,1,1,91,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(92,1,1,92,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(93,1,1,93,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(94,1,1,94,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(95,1,1,95,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(96,1,1,96,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(97,1,1,97,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(98,1,1,98,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(99,1,1,99,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(100,1,1,100,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(101,1,1,101,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(102,1,1,102,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(103,1,1,103,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(104,1,1,104,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(105,1,1,105,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(106,1,1,106,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(107,1,1,107,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(108,1,1,108,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(109,1,1,109,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(110,1,1,110,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(111,1,1,111,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(112,1,1,112,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(113,1,1,113,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(114,1,1,114,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(115,1,1,115,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(116,1,1,116,'optional',-1);
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('templatefield', 'id'), 150, false);
+
+
+ALTER TABLE templatefield ENABLE TRIGGER ALL;
+
+
+
+--
+-- TOC entry 1814 (class 0 OID 113843)
+-- Dependencies: 1276
+-- Data for Name: templatefilecategory; Type: TABLE DATA; Schema: public; Owner: postgres
+--
+
+ALTER TABLE templatefilecategory DISABLE TRIGGER ALL;
+
+INSERT INTO templatefilecategory(id, template_id, name, displayorder) VALUES(1,1,'Documentation',1);
+INSERT INTO templatefilecategory(id, template_id, name, displayorder) VALUES(2,1,'Data Files',2);
+
+ALTER TABLE templatefilecategory ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1855 (class 0 OID 0)
+-- Dependencies: 1275
+-- Name: templatefilecategory_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('templatefilecategory', 'id'), 5, false);
+
+
+-- Completed on 2006-09-19 16:05:06 Eastern Standard Time
+
+--
+-- PostgreSQL database dump complete
+--
+
+
+-- Sequence: studyid_seq
+
+-- DROP SEQUENCE studyid_seq;
+
+CREATE SEQUENCE studyid_seq
+  INCREMENT 1
+  MINVALUE 1
+  MAXVALUE 9223372036854775807
+  START 10000
+  CACHE 1;
+ALTER TABLE studyid_seq OWNER TO "postgres";
+
+-- Sequence: filesystemname_seq
+
+-- DROP SEQUENCE filesystemname_seq;
+
+CREATE SEQUENCE filesystemname_seq
+  INCREMENT 1
+  MINVALUE 1
+  MAXVALUE 9223372036854775807
+  START 2
+  CACHE 1;
+ALTER TABLE filesystemname_seq OWNER TO "postgres";
+
+
+INSERT INTO DataFileFormatType (id, value, name, mimeType) VALUES (1, 'D02', 'Splus', 'text/plain');
+INSERT INTO DataFileFormatType (id, value, name, mimeType) VALUES (2, 'D03', 'Stata', 'application/x-stata');
+INSERT INTO DataFileFormatType (id, value, name, mimeType) VALUES (3, 'D04', 'R', 'application/x-rlang-transport');
+
+INSERT INTO harvestformattype (id, metadataprefix, name, stylesheetfilename) VALUES (0, 'ddi', 'DDI', null);
+INSERT INTO harvestformattype (id, metadataprefix, name, stylesheetfilename) VALUES (1, 'oai_etdms', 'MIF', 'mif2ddi.xsl');
+INSERT INTO harvestformattype (id, metadataprefix, name, stylesheetfilename) VALUES (2, 'oai_dc', 'DC', 'oai_dc2ddi.xsl');
+INSERT INTO harvestformattype (id, metadataprefix, name, stylesheetfilename) VALUES (3, 'oai_fgdc', 'FGDC', 'fgdc2ddi.xsl');
+INSERT INTO harvestformattype (id, metadataprefix, name, stylesheetfilename) VALUES (4, 'dcmi_terms', 'DCMI_terms', 'dcmi_terms2ddi.xsl');
+
+create index datavariable_id_index on  datavariable (id);
+create index summarystatistic_id_index on  summarystatistic (id);
+create index summarystatistic_datavariable_id_index on  summarystatistic (datavariable_id);
+create index variablecategory_id_index on  variablecategory (id);
+create index variablecategory_datavariable_id_index on  variablecategory (datavariable_id);
+create index variablerange_id_index on  variablerange (id);
+create index study_id_index on study(id);
+create index study_owner_id_index on study(owner_id);
+create index weightedvarrelationship_id_index on  weightedvarrelationship (weighted_variable_id,variable_id);
+create index studyfile_id_index on studyfile(id);
+create index datavariable_datatable_id_index on datavariable(datatable_id);
+create index variablerange_datavariable_id_index on  variablerange (datavariable_id);
+create index metadata_id_index on metadata(id);
+create index studyabstract_metadata_id_index on studyabstract(metadata_id);
+create index studyauthor_metadata_id_index on studyauthor(metadata_id);
+create index studydistributor_metadata_id_index on studydistributor(metadata_id);
+create index studygeobounding_metadata_id_index on studygeobounding(metadata_id);
+create index studygrant_metadata_id_index on studygrant(metadata_id);
+create index studykeyword_metadata_id_index on studykeyword(metadata_id);
+create index studynote_metadata_id_index on studynote(metadata_id);
+create index studyotherid_metadata_id_index on studyotherid(metadata_id);
+create index studyotherref_metadata_id_index on studyotherref(metadata_id);
+create index studyproducer_metadata_id_index on studyproducer(metadata_id);
+create index studyrelmaterial_metadata_id_index on studyrelmaterial(metadata_id);
+create index studyrelpublication_metadata_id_index on studyrelpublication(metadata_id);
+create index studyrelstudy_metadata_id_index on studyrelstudy(metadata_id);
+create index studysoftware_metadata_id_index on studysoftware(metadata_id);
+create index studytopicclass_metadata_id_index on studytopicclass(metadata_id);
+create index template_metadata_id_index on template(metadata_id);
+create index studyfileactivity_id_index on studyfileactivity(id);
+create index studyfileactivity_studyfile_id_index on studyfileactivity(studyfile_id);
+create index studyfileactivity_study_id_index on studyfileactivity(study_id);
+
+
+
+
+
+INSERT INTO vdcnetworkstats (id,vdcnetwork_id,downloadcount,studycount,filecount) values (0,0,0,0,0);
+
+ insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 1, 'cc by', 'CC Attribution (cc by)', 'http://creativecommons.org/licenses/by/3.0/', 'http://creativecommons.org/licenses/by/3.0/rdf', 'http://i.creativecommons.org/l/by/3.0/88x31.png' );
+-- removed until we support cc0
+--insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 2, 'cc0','CC Zero (cc0)','http://creativecommons.org/publicdomain/zero/1.0/','http://creativecommons.org/publicdomain/zero/1.0/rdf','http://i.creativecommons.org/l/zero/1.0/88x31.png');
+insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 3, 'cc by-sa','CC Attribution Share Alike (cc by-sa)','http://creativecommons.org/licenses/by-sa/3.0/', 'http://creativecommons.org/licenses/by-sa/3.0/rdf', 'http://i.creativecommons.org/l/by-sa/3.0/88x31.png' );
+insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 4, 'cc by-nd','CC Attribution No Derivatives (cc by-nd)','http://creativecommons.org/licenses/by-nd/3.0/', 'http://creativecommons.org/licenses/by-nd/3.0/rdf', 'http://i.creativecommons.org/l/by-nd/3.0/88x31.png' );
+insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 5, 'cc by-nc','CC Attribution Non-Commercial (cc by-nc)','http://creativecommons.org/licenses/by-nc/3.0/', 'http://creativecommons.org/licenses/by-nc/3.0/rdf', 'http://i.creativecommons.org/l/by-nc/3.0/88x31.png' );
+insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 6, 'cc by-nc-sa','CC Attribution Non-Commercial Share Alike (cc by-nc-sa)','http://creativecommons.org/licenses/by-nc-sa/3.0/', 'http://creativecommons.org/licenses/by-nc-sa/3.0/rdf', 'http://i.creativecommons.org/l/by-nc-sa/3.0/88x31.png' );
+insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 7, 'cc by-nc-nd','CC Attribution Non-Commercial No Derivatives (cc by-nc-nd)','http://creativecommons.org/licenses/by-nc-sa/3.0/', 'http://creativecommons.org/licenses/by-nc-sa/3.0/rdf', 'http://i.creativecommons.org/l/by-nc-sa/3.0/88x31.png' );
+
+INSERT INTO metadataformattype (id, name, mimetype, namespace, formatschema, partialexcludesupported, partialselectsupported) VALUES (1, 'ddi', 'application/xml', 'http://www.icpsr.umich.edu/DDI', 'http://www.icpsr.umich.edu/DDI/Version2-0.xsd', true, true);
+INSERT INTO metadataformattype (id, name, mimetype, namespace, formatschema, partialexcludesupported, partialselectsupported) VALUES (2, 'oai_dc', 'application/xml', 'http://www.openarchives.org/OAI/2.0/oai_dc/', 'http://www.openarchives.org/OAI/2.0/oai_dc.xsd', false, false);
+INSERT INTO metadataformattype (id, name, mimetype, namespace, formatschema, partialexcludesupported, partialselectsupported) VALUES (3, 'marc', 'application/octet-stream', 'http://www.loc.gov/marc/', 'MARC 21', false, false);
+
+/*create network guest book*/
+
+INSERT INTO guestbookquestionnaire(enabled,firstnamerequired, lastnamerequired, emailrequired, institutionrequired,  positionrequired, vdc_id) VALUES (true, true, true, true, false, false, null);
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/referenceData.sql.TEMPLATE	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,1223 @@
+--
+-- PostgreSQL database dump
+--
+
+-- Started on 2006-09-19 16:05:05 Eastern Standard Time
+
+SET client_encoding = 'UTF8';
+SET check_function_bodies = false;
+SET client_min_messages = warning;
+
+SET search_path = public, pg_catalog;
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('metadata', 'id'), 10, false);
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('template', 'id'), 10, false);
+
+
+--
+-- TOC entry 1840 (class 0 OID 0)
+-- Dependencies: 1304
+-- Name: datatable_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER%
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('pagedef', 'id'), 500, false);
+
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('datatable', 'id'), 1, false);
+
+
+--
+-- TOC entry 1841 (class 0 OID 0)
+-- Dependencies: 1291
+-- Name: datavariable_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER%
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('datavariable', 'id'), 1, false);
+
+
+--
+-- TOC entry 1842 (class 0 OID 0)
+-- Dependencies: 1297
+-- Name: fieldinputlevel_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER%
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('fieldinputlevel', 'id'), 10, false);
+
+
+
+
+
+--
+-- TOC entry 1844 (class 0 OID 0)
+-- Dependencies: 1287
+-- Name: logindomain_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER%
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('logindomain', 'id'), 1, false);
+
+
+
+
+--
+-- TOC entry 1846 (class 0 OID 0)
+-- Dependencies: 1312
+-- Name: role_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER%
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('"role"', 'id'), 10, false);
+
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('"networkrole"', 'id'), 10, false);
+
+--
+-- TOC entry 1848 (class 0 OID 0)
+-- Dependencies: 1272
+-- Name: studyfield_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER%
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('studyfield', 'id'), 150, true);
+
+
+
+
+
+--
+-- TOC entry 1851 (class 0 OID 0)
+-- Dependencies: 1270
+-- Name: studyfile_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER%
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('studyfile', 'id'), 1, false);
+
+
+
+
+
+--
+-- TOC entry 1856 (class 0 OID 0)
+-- Dependencies: 1302
+-- Name: usergroup_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER%
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('usergroup', 'id'), 1, false);
+
+
+
+--
+-- TOC entry 1859 (class 0 OID 0)
+-- Dependencies: 1299
+-- Name: vdcgroup_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER%
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('vdcgroup', 'id'), 1, false);
+
+
+--
+-- TOC entry 1860 (class 0 OID 0)
+-- Dependencies: 1289
+-- Name: vdcnetwork_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER%
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('vdcnetwork', 'id'), 1, false);
+
+
+--
+-- TOC entry 1861 (class 0 OID 0)
+-- Dependencies: 1294
+-- Name: vdcuser_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER%
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('vdcuser', 'id'), 10, false);
+
+
+
+--
+-- TOC entry 1813 (class 0 OID 113837)
+-- Dependencies: 1274
+-- Data for Name: coll_adv_search_fields; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE coll_adv_search_fields DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE coll_adv_search_fields ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1818 (class 0 OID 113863)
+-- Dependencies: 1281
+-- Data for Name: coll_any_search_fields; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE coll_any_search_fields DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE coll_any_search_fields ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1804 (class 0 OID 113774)
+-- Dependencies: 1259
+-- Data for Name: coll_search_result_fields; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE coll_search_result_fields DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE coll_search_result_fields ENABLE TRIGGER ALL;
+
+
+
+
+
+ALTER TABLE datatable ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1825 (class 0 OID 113902)
+-- Dependencies: 1292
+-- Data for Name: datavariable; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE datavariable DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE datavariable ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1829 (class 0 OID 113927)
+-- Dependencies: 1298
+-- Data for Name: fieldinputlevel; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE fieldinputlevel DISABLE TRIGGER ALL;
+
+INSERT INTO fieldinputlevel (id, name ) VALUES (1, 'required');
+INSERT INTO fieldinputlevel (id, name ) VALUES(2, 'recommended');
+INSERT INTO fieldinputlevel (id, name ) VALUES(3, 'optional');
+
+
+
+ALTER TABLE fieldinputlevel ENABLE TRIGGER ALL;
+
+
+
+--
+-- TOC entry 1823 (class 0 OID 113888)
+-- Dependencies: 1288
+-- Data for Name: logindomain; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE logindomain DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE logindomain ENABLE TRIGGER ALL;
+
+
+
+--
+-- TOC entry 1838 (class 0 OID 113987)
+-- Dependencies: 1313
+-- Data for Name: role; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE "role" DISABLE TRIGGER ALL;
+INSERT into role(id, name) VALUES (1, 'contributor');
+INSERT into role(id, name) VALUES (2, 'curator');
+INSERT into role(id, name) VALUES (3, 'admin');
+INSERT into role(id,name) VALUES (4, 'privileged viewer');
+ALTER TABLE "role" ENABLE TRIGGER ALL;
+
+ALTER TABLE "networkrole" DISABLE TRIGGER ALL;
+INSERT into networkrole(id, name) VALUES (1, 'Creator');
+INSERT into networkrole(id, name) VALUES (2, 'Admin');
+ALTER TABLE "networkrole" ENABLE TRIGGER ALL;
+
+ALTER TABLE pagedef DISABLE TRIGGER ALL;
+
+
+-- Pages that don't require role authorization
+INSERT INTO pagedef (name, path, role_id, networkrole_id ) VALUES  ( 'StudyPage', '/study/StudyPage.xhtml', null,null );
+INSERT INTO pagedef (name, path, role_id, networkrole_id ) VALUES  ( 'SubsettingPage', '/subsetting/SubsettingPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ExploreDataPage','/viz/ExploreDataPage.xhtml',null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ErrorPage', '/ErrorPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'HomePage', '/HomePage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'UnauthorizedPage', '/login/UnauthorizedPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'StudyLockedPage', '/login/StudyLockedPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'LogoutPage', '/login/LogoutPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AddAccountPage', '/login/AddAccountPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditAccountPage', '/login/EditAccountPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AccountOptionsPage', '/login/AccountOptionsPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AccountPage', '/login/AccountPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'LoginPage', '/login/LoginPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ForgotPasswordPage', '/login/ForgotPasswordPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ContributorRequestPage', '/login/ContributorRequestPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ContributorRequestInfoPage', '/login/ContributorRequestInfoPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'CreatorRequestPage','/login/CreatorRequestPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'CreatorRequestInfoPage','/login/CreatorRequestInfoPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'TermsOfUsePage','/login/TermsOfUsePage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AccountTermsOfUsePage','/login/AccountTermsOfUsePage.xhtml', null,null );
+INSERT INTO pagedef (name, path, role_id, networkrole_id ) VALUES  ( 'StudyVersionDifferencesPage', '/study/StudyVersionDifferencesPage.xhtml', null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'OptionsPage','/admin/OptionsPage.xhtml',null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageStudiesPage','/study/ManageStudiesPage.xhtml',null,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManifestPage', '/ManifestPage.xhtml', null,null );
+
+-- Pages that require VDC Role authorization:
+-- Contributor Role 
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditStudyPage','/study/EditStudyPage.xhtml',1,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditStudyFilesPage','/study/EditStudyFilesPage.xhtml',1,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AddFilesPage','/study/AddFilesPage.xhtml',1,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'SetUpDataExplorationPage','/study/SetUpDataExplorationPage.xhtml',1,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'DeleteStudyPage','/study/DeleteStudyPage.xhtml',1,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'MyDataversePage','/networkAdmin/MyDataversePage.xhtml',null,null );
+
+-- Curator Role
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditCollectionPage','/collection/EditCollectionPage.xhtml',2,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageCollectionsPage','/collection/ManageCollectionsPage.xhtml',2,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'StudyPermissionsPage','/study/StudyPermissionsPage.xhtml',2,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'DeaccessionStudyPage', '/study/DeaccessionStudyPage.xhtml', 2, null );
+
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageTemplatesPage', '/admin/ManageTemplatesPage.xhtml', 2,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'TemplateFormPage','/study/TemplateFormPage.xhtml',2,2 );
+
+-- Admin Role
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditSitePage', '/site/EditSitePage.xhtml', 3,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditBannerFooterPage','/admin/EditBannerFooterPage.xhtml',3,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditContactUsPage','/admin/EditContactUsPage.xhtml',3,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditHomePanelsPage','/admin/EditHomePanelsPage.xhtml',3,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditStudyCommentsPage', '/admin/EditStudyCommentsPage.xhtml', 3,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditUserTermsPage','/admin/EditUseTermsPage.xhtml',3,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditDepositUseTermsPage','/admin/EditDepositUseTermsPage.xhtml',3,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'PrivilegedUsersPage','/admin/PrivilegedUsersPage.xhtml',3,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'SearchFieldsPage','/admin/SearchFieldsPage.xhtml',3,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'PromotionalLinkSearchBoxPage','/admin/PromotionalLinkSearchBoxPage.xhtml',3,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditLockssConfigPage','/admin/EditLockssConfigPage.xhtml',3,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditGuestbookQuestionnairePage', '/admin/EditGuestbookQuestionnairePage.xhtml', 3,null );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'GuestBookResponseDataPage', '/admin/GuestBookResponseDataPage.xhtml', 3,2 );
+-- Pages that require Network Role authorization
+-- Creator Role 
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AddSitePage', '/site/AddSitePage.xhtml', null,1 );
+-- Admin Role
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'NetworkOptionsPage', '/networkAdmin/NetworkOptionsPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'HarvestSitesPage', '/site/HarvestSitesPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AddClassificationsPage', '/networkAdmin/AddClassificationsPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageClassificationsPage', '/networkAdmin/ManageClassificationsPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageControlledVocabularyPage', '/admin/ManageControlledVocabularyPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'CommentReviewPage', '/networkAdmin/CommentReviewPage.xhtml', null, 2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageDataversesPage', '/networkAdmin/ManageDataversesPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditHarvestSitePage', '/site/EditHarvestSitePage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditNetworkNamePage', '/networkAdmin/EditNetworkNamePage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'NetworkPrivilegedUsersPage', '/networkAdmin/NetworkPrivilegedUsersPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AllUsersPage', '/networkAdmin/AllUsersPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditNetworkAnnouncementsPage', '/networkAdmin/EditNetworkAnnouncementsPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditNetworkBannerFooterPage', '/networkAdmin/EditNetworkBannerFooterPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditExportSchedulePage', '/networkAdmin/EditExportSchedulePage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditOAISetPage', '/networkAdmin/EditOAISetPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditNetworkDownloadUseTermsPage', '/networkAdmin/EditNetworkDownloadUseTermsPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditNetworkDepositUseTermsPage', '/networkAdmin/EditNetworkDepositUseTermsPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditAccountUseTermsPage', '/networkAdmin/EditAccountUseTermsPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditUserGroupPage', '/networkAdmin/EditUserGroupPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'UserGroupsPage', '/networkAdmin/UserGroupsPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ImportStudyPage', '/networkAdmin/ImportStudyPage.xhtml', null,2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'UtilitiesPage', '/networkAdmin/UtilitiesPage.xhtml', null, 2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditSubnetworkPage', '/networkAdmin/EditSubnetworkPage.xhtml', null, 2 );
+INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageSubnetworksPage', '/networkAdmin/ManageSubnetworksPage.xhtml', null, 2 );
+
+ALTER TABLE pagedef ENABLE TRIGGER ALL;
+
+
+--
+-- TOC entry 1821 (class 0 OID 113878)
+-- Dependencies: 1285
+-- Data for Name: search_result_fields; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE search_result_fields DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE search_result_fields ENABLE TRIGGER ALL;
+
+
+--
+-- TOC entry 1826 (class 0 OID 113907)
+-- Dependencies: 1293
+-- Data for Name: study_studyfield; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE study_studyfield DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE study_studyfield ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1817 (class 0 OID 113859)
+-- Dependencies: 1280
+-- Data for Name: study_usergroup; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE study_usergroup DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE study_usergroup ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1812 (class 0 OID 113829)
+-- Dependencies: 1273
+-- Data for Name: studyfield; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE studyfield DISABLE TRIGGER ALL;
+
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (1, 'Title', 'Title', 'title', TRUE, TRUE, TRUE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (2, 'Study ID', 'Study ID', 'studyId', TRUE, TRUE, TRUE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (3, 'Author', 'Author', 'author', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (4, 'Author Affiliation', 'Author Affiliation', 'authorAffiliation', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (5, 'Producer', 'Producer', 'producer', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (6, 'Producer URL', 'Producer URL', 'producerURL', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (7, 'URL to Producer Logo', 'URL to Producer Logo', 'producerLogo', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (8, 'Producer Name Abbreviation', 'Producer Name Abbreviation', 'producerAbbreviation', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (9, 'Production Date', 'Production Date', 'productionDate', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (10, 'Software', 'Software', 'software', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (11, 'Software Version', 'Software Version', 'softwareVersion', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (12, 'Funding Agency', 'Funding Agency', 'fundingAgency', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (13, 'Grant Number', 'Grant Number', 'grantNumber', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (14, 'Grant Number Agency', 'Grant Number Agency', 'grantNumberAgency', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (15, '', '', 'distributor', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (16, '', '', 'distributorURL', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (17, '', '', 'distributorLogo', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (18, '', '', 'distributionDate', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (19, '', '', 'distributorContact', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (20, '', '', 'distributorContactAffiliation', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (21, '', '', 'distributorContactEmail', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (22, '', '', 'depositor', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (23, '', '', 'dateOfDeposit', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (24, '', '', 'series', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (25, '', '', 'seriesInformation', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (26, '', '', 'studyVersion', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (27, '', '', 'keyword', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (28, '', '', 'keywordVocab', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (29, '', '', 'keywordVocabURI', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (30, '', '', 'topicClassification', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (31, '', '', 'topicClassVocab', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (32, '', '', 'topicClassVocabURI', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (33, '', '', 'description', FALSE, TRUE, TRUE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (34, '', '', 'descriptionDate', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (35, '', '', 'timePeriodCoveredStart', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (36, '', '', 'timePeriodCoveredEnd', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (37, '', '', 'dateOfCollectionStart', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (38, '', '', 'dateOfCollectionEnd', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (39, '', '', 'country', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (40, '', '', 'geographicCoverage', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (41, '', '', 'geographicUnit', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (42, '', '', 'unitOfAnalysis', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (43, '', '', 'universe', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (44, '', '', 'kindOfData', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (45, '', '', 'timeMethod', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (46, '', '', 'dataCollector', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (47, '', '', 'frequencyOfDataCollection', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (48, '', '', 'samplingProcedure', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (49, '', '', 'deviationsFromSampleDesign', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (50, '', '', 'collectionMode', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (51, '', '', 'researchInstrument', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (52, '', '', 'dataSources', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (53, '', '', 'originOfSources', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (54, '', '', 'characteristicOfSources', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (55, '', '', 'accessToSources', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (56, '', '', 'dataCollectionSituation', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (57, '', '', 'actionsToMinimizeLoss', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (58, '', '', 'controlOperations', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (59, '', '', 'weighting', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (60, '', '', 'cleaningOperations', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (61, '', '', 'studyLevelErrorNotes', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (62, '', '', 'responseRate', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (63, '', '', 'samplingErrorEstimates', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (64, '', '', 'otherDataAppraisal', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (65, '', '', 'placeOfAccess', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (66, '', '', 'originalArchive', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (67, '', '', 'availabilityStatus', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (68, '', '', 'collectionSize', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (69, '', '', 'studyCompletion', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (70, '', '', 'confidentialityDeclaration', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (71, '', '', 'specialPermissions', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (72, '', '', 'restrictions', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (73, '', '', 'contact', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (74, '', '', 'citationRequirements', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (75, '', '', 'depositorRequirements', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (76, '', '', 'conditions', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (77, '', '', 'disclaimer', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (78, '', '', 'relatedMaterial', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (79, '', '', 'publication', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (80, '', '', 'relatedStudies', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (81, '', '', 'otherReferences', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (82, '', '', 'notesText', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (83, '', '', 'note', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (84, '', '', 'notesInformationSubject', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (85, '', '', 'otherId', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (86, '', '', 'otherIdAgency', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (87, '', '', 'productionPlace', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (88, '', '', 'numberOfFiles', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (89, '', '', 'publicationReplicationData', FALSE, TRUE, FALSE, FALSE, FALSE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (90, '', '', 'subTitle', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (91, '', '', 'versionDate', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (92, '', '', 'geographicBoundingBox', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (93, '', '', 'eastLongitude', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (94, '', '', 'northLatitude', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (95, '', '', 'southLatitude', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (96, '', '', 'producerAffiliation', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (97, '', '', 'distributorAffiliation', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (98, '', '', 'distributorAbbreviation', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (99, 'Author', 'Author', 'authorName', TRUE, TRUE, TRUE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (100, '', '', 'producerName', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (101, '', '', 'distributorName', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (102, '', '', 'distributorContactName', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (103, '', '', 'descriptionText', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (104, '', '', 'keywordValue', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (105, '', '', 'topicClassValue', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (106, '', '', 'otherIdValue', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (107, '', '', 'softwareName', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (108, '', '', 'grantNumberValue', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (109, '', '', 'seriesName', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (110, '', '', 'studyVersionValue', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (111, '', '', 'westLongitude', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (112, '', '', 'noteInformationType', FALSE, FALSE, FALSE, FALSE, TRUE );
+
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (113, '', '', 'publicationCitation', FALSE, TRUE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (114, '', '', 'publicationIDType', FALSE, FALSE, FALSE, FALSE, FALSE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (115, '', '', 'publicationIDNumber', FALSE, FALSE, FALSE, FALSE, TRUE );
+INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (116, '', '', 'publicationURL', FALSE, FALSE, FALSE, FALSE, TRUE );
+
+--set the parent child relationship
+update studyfield set parentstudyfield_id = 3 where id = 99;
+update studyfield set parentstudyfield_id = 3 where id = 4;
+
+update studyfield set parentstudyfield_id = 5 where id = 100;
+update studyfield set parentstudyfield_id = 5 where id = 6;
+update studyfield set parentstudyfield_id = 5 where id = 7;
+update studyfield set parentstudyfield_id = 5 where id = 8;
+update studyfield set parentstudyfield_id = 5 where id = 96;
+
+update studyfield set parentstudyfield_id = 15 where id = 101;
+update studyfield set parentstudyfield_id = 15 where id = 16;
+update studyfield set parentstudyfield_id = 15 where id = 17;
+update studyfield set parentstudyfield_id = 15 where id = 97;
+update studyfield set parentstudyfield_id = 15 where id = 98;
+
+update studyfield set parentstudyfield_id = 19 where id = 102;
+update studyfield set parentstudyfield_id = 19 where id = 20;
+update studyfield set parentstudyfield_id = 19 where id = 21;
+
+update studyfield set parentstudyfield_id = 33 where id = 103;
+update studyfield set parentstudyfield_id = 33 where id = 34;
+
+update studyfield set parentstudyfield_id = 27 where id = 104;
+update studyfield set parentstudyfield_id = 27 where id = 28;
+update studyfield set parentstudyfield_id = 27 where id = 29;
+
+update studyfield set parentstudyfield_id = 30 where id = 105;
+update studyfield set parentstudyfield_id = 30 where id = 31;
+update studyfield set parentstudyfield_id = 30 where id = 32;
+
+update studyfield set parentstudyfield_id = 85 where id = 106;
+update studyfield set parentstudyfield_id = 85 where id = 86;
+
+update studyfield set parentstudyfield_id = 10 where id = 107;
+update studyfield set parentstudyfield_id = 10 where id = 11;
+
+update studyfield set parentstudyfield_id = 13 where id = 108;
+update studyfield set parentstudyfield_id = 13 where id = 14;
+
+update studyfield set parentstudyfield_id = 24 where id = 109;
+update studyfield set parentstudyfield_id = 24 where id = 25;
+
+update studyfield set parentstudyfield_id = 26 where id = 110;
+update studyfield set parentstudyfield_id = 26 where id = 91;
+
+update studyfield set parentstudyfield_id = 92 where id = 111;
+update studyfield set parentstudyfield_id = 92 where id = 93;
+update studyfield set parentstudyfield_id = 92 where id = 94;
+update studyfield set parentstudyfield_id = 92 where id = 95;
+
+update studyfield set parentstudyfield_id = 83 where id = 112;
+update studyfield set parentstudyfield_id = 83 where id = 82;
+update studyfield set parentstudyfield_id = 83 where id = 84;
+
+update studyfield set parentstudyfield_id = 79 where id = 113;
+update studyfield set parentstudyfield_id = 79 where id = 114;
+update studyfield set parentstudyfield_id = 79 where id = 115;
+update studyfield set parentstudyfield_id = 79 where id = 116;
+update studyfield set parentstudyfield_id = 79 where id = 89;
+
+update studyfield set displayorder = 0 where name = 'authorName';
+update studyfield set displayorder = 2 where name = 'authorAffiliation';
+update studyfield set displayorder = 2 where name = 'producerAbbreviation';
+update studyfield set displayorder = 1 where name = 'producerName';
+update studyfield set displayorder = 3 where name = 'producerAffiliation';
+update studyfield set displayorder = 4 where name = 'producerURL';
+update studyfield set displayorder = 5 where name = 'producerLogo';
+update studyfield set displayorder = 2 where name = 'softwareVersion';
+update studyfield set displayorder = 1 where name = 'softwareName';
+update studyfield set displayorder = 1 where name = 'grantNumberValue';
+update studyfield set displayorder = 2 where name = 'grantNumberAgency';
+update studyfield set displayorder = 1 where name = 'distributorName';
+update studyfield set displayorder = 4 where name = 'distributorURL';
+update studyfield set displayorder = 5 where name = 'distributorLogo';
+update studyfield set displayorder = 3 where name = 'distributorAffiliation';
+update studyfield set displayorder = 2 where name = 'distributorAbbreviation';
+update studyfield set displayorder = 1 where name = 'distributorContactName';
+update studyfield set displayorder = 2 where name = 'distributorContactAffiliation';
+update studyfield set displayorder = 3 where name = 'distributorContactEmail';
+update studyfield set displayorder = 2 where name = 'seriesInformation';
+update studyfield set displayorder = 1 where name = 'seriesName';
+update studyfield set displayorder = 1 where name = 'studyVersionValue';
+update studyfield set displayorder = 2 where name = 'versionDate';
+update studyfield set displayorder = 1 where name = 'keywordValue';
+update studyfield set displayorder = 3 where name = 'keywordVocabURI';
+update studyfield set displayorder = 2 where name = 'keywordVocab';
+update studyfield set displayorder = 1 where name = 'topicClassValue';
+update studyfield set displayorder = 2 where name = 'topicClassVocab';
+update studyfield set displayorder = 3 where name = 'topicClassVocabURI';
+update studyfield set displayorder = 1 where name = 'descriptionText';
+update studyfield set displayorder = 2 where name = 'descriptionDate';
+update studyfield set displayorder = 1 where name = 'publicationCitation';
+update studyfield set displayorder = 2 where name = 'publicationIDNumber';
+update studyfield set displayorder = 3 where name = 'publicationURL';
+update studyfield set displayorder = 3 where name = 'notesText';
+update studyfield set displayorder = 1 where name = 'noteInformationType';
+update studyfield set displayorder = 2 where name = 'notesInformationSubject';
+update studyfield set displayorder = 2 where name = 'otherIdAgency';
+update studyfield set displayorder = 1 where name = 'otherIdValue';
+
+
+update studyfield set fieldtype = 'date' where id = 9;
+update studyfield set fieldtype = 'date' where id = 18;
+update studyfield set fieldtype = 'date' where id = 23;
+update studyfield set fieldtype = 'date' where id = 34;
+update studyfield set fieldtype = 'date' where id = 35;
+update studyfield set fieldtype = 'date' where id = 36;
+update studyfield set fieldtype = 'date' where id = 37;
+update studyfield set fieldtype = 'date' where id = 38;
+update studyfield set fieldtype = 'date' where id = 91;
+update studyfield set fieldtype = 'email' where id = 21;
+update studyfield set fieldtype = 'textBox' where id = 4;
+update studyfield set fieldtype = 'textBox' where id = 8;
+update studyfield set fieldtype = 'textBox' where id = 11;
+update studyfield set fieldtype = 'textBox' where id = 12;
+update studyfield set fieldtype = 'textBox' where id = 13;
+update studyfield set fieldtype = 'textBox' where id = 14;
+update studyfield set fieldtype = 'textBox' where id = 19;
+update studyfield set fieldtype = 'textBox' where id = 20;
+update studyfield set fieldtype = 'textBox' where id = 22;
+update studyfield set fieldtype = 'textBox' where id = 24;
+update studyfield set fieldtype = 'textBox' where id = 25;
+update studyfield set fieldtype = 'textBox' where id = 26;
+update studyfield set fieldtype = 'textBox' where id = 27;
+update studyfield set fieldtype = 'textBox' where id = 28;
+update studyfield set fieldtype = 'textBox' where id = 30;
+update studyfield set fieldtype = 'textBox' where id = 31;
+update studyfield set fieldtype = 'textBox' where id = 33;
+update studyfield set fieldtype = 'textBox' where id = 39;
+update studyfield set fieldtype = 'textBox' where id = 40;
+update studyfield set fieldtype = 'textBox' where id = 41;
+update studyfield set fieldtype = 'textBox' where id = 42;
+update studyfield set fieldtype = 'textBox' where id = 43;
+update studyfield set fieldtype = 'textBox' where id = 44;
+update studyfield set fieldtype = 'textBox' where id = 45;
+update studyfield set fieldtype = 'textBox' where id = 46;
+update studyfield set fieldtype = 'textBox' where id = 47;
+update studyfield set fieldtype = 'textBox' where id = 48;
+update studyfield set fieldtype = 'textBox' where id = 49;
+update studyfield set fieldtype = 'textBox' where id = 50;
+update studyfield set fieldtype = 'textBox' where id = 51;
+update studyfield set fieldtype = 'textBox' where id = 52;
+update studyfield set fieldtype = 'textBox' where id = 53;
+update studyfield set fieldtype = 'textBox' where id = 54;
+update studyfield set fieldtype = 'textBox' where id = 55;
+update studyfield set fieldtype = 'textBox' where id = 56;
+update studyfield set fieldtype = 'textBox' where id = 57;
+update studyfield set fieldtype = 'textBox' where id = 58;
+update studyfield set fieldtype = 'textBox' where id = 59;
+update studyfield set fieldtype = 'textBox' where id = 60;
+update studyfield set fieldtype = 'textBox' where id = 61;
+update studyfield set fieldtype = 'textBox' where id = 62;
+update studyfield set fieldtype = 'textBox' where id = 63;
+update studyfield set fieldtype = 'textBox' where id = 64;
+update studyfield set fieldtype = 'textBox' where id = 65;
+update studyfield set fieldtype = 'textBox' where id = 66;
+update studyfield set fieldtype = 'textBox' where id = 67;
+update studyfield set fieldtype = 'textBox' where id = 68;
+update studyfield set fieldtype = 'textBox' where id = 69;
+update studyfield set fieldtype = 'textBox' where id = 70;
+update studyfield set fieldtype = 'textBox' where id = 71;
+update studyfield set fieldtype = 'textBox' where id = 72;
+update studyfield set fieldtype = 'textBox' where id = 73;
+update studyfield set fieldtype = 'textBox' where id = 74;
+update studyfield set fieldtype = 'textBox' where id = 75;
+update studyfield set fieldtype = 'textBox' where id = 76;
+update studyfield set fieldtype = 'textBox' where id = 77;
+update studyfield set fieldtype = 'textBox' where id = 78;
+update studyfield set fieldtype = 'textBox' where id = 79;
+update studyfield set fieldtype = 'textBox' where id = 80;
+update studyfield set fieldtype = 'textBox' where id = 81;
+update studyfield set fieldtype = 'textBox' where id = 82;
+update studyfield set fieldtype = 'textBox' where id = 83;
+update studyfield set fieldtype = 'textBox' where id = 84;
+update studyfield set fieldtype = 'textBox' where id = 85;
+update studyfield set fieldtype = 'textBox' where id = 86;
+update studyfield set fieldtype = 'textBox' where id = 87;
+update studyfield set fieldtype = 'textBox' where id = 88;
+update studyfield set fieldtype = 'textBox' where id = 89;
+update studyfield set fieldtype = 'textBox' where id = 92;
+update studyfield set fieldtype = 'textBox' where id = 96;
+update studyfield set fieldtype = 'textBox' where id = 97;
+update studyfield set fieldtype = 'textBox' where id = 98;
+update studyfield set fieldtype = 'textBox' where id = 99;
+update studyfield set fieldtype = 'textBox' where id = 100;
+update studyfield set fieldtype = 'textBox' where id = 101;
+update studyfield set fieldtype = 'textBox' where id = 102;
+update studyfield set fieldtype = 'textBox' where id = 103;
+update studyfield set fieldtype = 'textBox' where id = 104;
+update studyfield set fieldtype = 'textBox' where id = 105;
+update studyfield set fieldtype = 'textBox' where id = 106;
+update studyfield set fieldtype = 'textBox' where id = 107;
+update studyfield set fieldtype = 'textBox' where id = 108;
+update studyfield set fieldtype = 'textBox' where id = 109;
+update studyfield set fieldtype = 'textBox' where id = 110;
+update studyfield set fieldtype = 'textBox' where id = 112;
+update studyfield set fieldtype = 'textBox' where id = 113;
+update studyfield set fieldtype = 'textBox' where id = 115;
+update studyfield set fieldtype = 'url' where id = 6;
+update studyfield set fieldtype = 'url' where id = 7;
+update studyfield set fieldtype = 'url' where id = 16;
+update studyfield set fieldtype = 'url' where id = 17;
+update studyfield set fieldtype = 'url' where id = 29;
+update studyfield set fieldtype = 'url' where id = 32;
+update studyfield set fieldtype = 'url' where id = 116;
+
+ALTER TABLE studyfield ENABLE TRIGGER ALL;
+
+
+
+
+
+--
+-- TOC entry 1811 (class 0 OID 113819)
+-- Dependencies: 1271
+-- Data for Name: studyfile; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE studyfile DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE studyfile ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1810 (class 0 OID 113813)
+-- Dependencies: 1269
+-- Data for Name: studyfile_usergroup; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE studyfile_usergroup DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE studyfile_usergroup ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1835 (class 0 OID 113964)
+-- Dependencies: 1307
+-- Data for Name: summary_fields; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE summary_fields DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE summary_fields ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1809 (class 0 OID 113808)
+-- Dependencies: 1268
+-- Data for Name: template; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE "template" DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE "template" ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1807 (class 0 OID 113797)
+-- Dependencies: 1265
+-- Data for Name: templatefield; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE templatefield DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE templatefield ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1832 (class 0 OID 113945)
+-- Dependencies: 1303
+-- Data for Name: usergroup; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE usergroup DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE usergroup ENABLE TRIGGER ALL;
+
+
+--
+-- TOC entry 1831 (class 0 OID 113939)
+-- Dependencies: 1301
+-- Data for Name: vdc_adv_search_fields; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE vdc_adv_search_fields DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE vdc_adv_search_fields ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1801 (class 0 OID 113756)
+-- Dependencies: 1255
+-- Data for Name: vdc_any_search_fields; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE vdc_any_search_fields DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE vdc_any_search_fields ENABLE TRIGGER ALL;
+
+
+--
+-- TOC entry 1808 (class 0 OID 113802)
+-- Dependencies: 1266
+-- Data for Name: vdc_usergroup; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE vdc_usergroup DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE vdc_usergroup ENABLE TRIGGER ALL;
+
+
+--
+-- TOC entry 1830 (class 0 OID 113934)
+-- Dependencies: 1300
+-- Data for Name: vdcgroup; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE vdcgroup DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE vdcgroup ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1828 (class 0 OID 113921)
+-- Dependencies: 1296
+-- Data for Name: vdcgrouprelationship; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE vdcgrouprelationship DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE vdcgrouprelationship ENABLE TRIGGER ALL;
+
+
+
+
+
+--
+-- TOC entry 1827 (class 0 OID 113913)
+-- Dependencies: 1295
+-- Data for Name: vdcuser; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE vdcuser DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE vdcuser ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1834 (class 0 OID 113960)
+-- Dependencies: 1306
+-- Data for Name: vdcuser_usergroup; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE vdcuser_usergroup DISABLE TRIGGER ALL;
+
+
+
+ALTER TABLE vdcuser_usergroup ENABLE TRIGGER ALL;
+
+
+ALTER TABLE variableintervaltype DISABLE TRIGGER ALL;
+
+INSERT INTO variableintervaltype (id, name ) VALUES (1, 'discrete');
+INSERT INTO variableintervaltype (id, name ) VALUES(2, 'continuous');
+INSERT INTO variableintervaltype (id, name ) VALUES(3, 'nominal');
+INSERT INTO variableintervaltype (id, name ) VALUES(4, 'dichotomous');
+
+ALTER TABLE variableintervaltype ENABLE TRIGGER ALL;
+
+
+ALTER TABLE variableformattype DISABLE TRIGGER ALL;
+
+INSERT INTO variableformattype (id, name ) VALUES (1, 'numeric');
+INSERT INTO variableformattype (id, name ) VALUES(2, 'character');
+
+ALTER TABLE variableformattype ENABLE TRIGGER ALL;
+
+
+ALTER TABLE variablerangetype DISABLE TRIGGER ALL;
+
+INSERT INTO variablerangetype (id, name ) VALUES(1, 'min');
+INSERT INTO variablerangetype (id, name ) VALUES(2, 'max');
+INSERT INTO variablerangetype (id, name ) VALUES(3, 'min exclusive');
+INSERT INTO variablerangetype (id, name ) VALUES(4, 'max exclusive');
+INSERT INTO variablerangetype (id, name ) VALUES(5, 'point');
+
+ALTER TABLE variablerangetype ENABLE TRIGGER ALL;
+
+ALTER TABLE summarystatistictype DISABLE TRIGGER ALL;
+
+INSERT INTO summarystatistictype (id, name ) VALUES(1, 'mean');
+INSERT INTO summarystatistictype (id, name ) VALUES(2, 'medn');
+INSERT INTO summarystatistictype (id, name ) VALUES(3, 'mode');
+INSERT INTO summarystatistictype (id, name ) VALUES(4, 'min');
+INSERT INTO summarystatistictype (id, name ) VALUES(5, 'max');
+INSERT INTO summarystatistictype (id, name ) VALUES(6, 'stdev');
+INSERT INTO summarystatistictype (id, name ) VALUES(7, 'vald');
+INSERT INTO summarystatistictype (id, name ) VALUES(8, 'invd');
+
+ALTER TABLE variablerangetype ENABLE TRIGGER ALL;
+
+ALTER TABLE vdcuser DISABLE TRIGGER ALL;
+
+insert into vdcuser(id, version, email,  firstname, lastname, username,  encryptedpassword, networkRole_id,active, agreedtermsofuse ) VALUES ( 1, 1, 'dataverse@lists.hmdc.harvard.edu','Network','Admin', 'networkAdmin' ,'tf0bLmzOFx5JrBhe2EIraS5GBnI=' ,2,true, true);
+
+ALTER TABLE vdcuser ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1809 (class 0 OID 113808)
+-- Dependencies: 1268
+-- Data for Name: template; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE "metadata" DISABLE TRIGGER ALL;
+-- Default metadata - contains no metadata values
+INSERT INTO metadata( id, version ) VALUES ( 1, 1);
+
+ALTER TABLE "metadata" ENABLE TRIGGER ALL;
+
+
+ALTER TABLE "template" DISABLE TRIGGER ALL;
+
+INSERT INTO template( id, version, vdcnetwork_id, name,metadata_id,enabled) VALUES (1, 1, 0, 'Dataverse Network Default Template',1,true);
+
+ALTER TABLE "template" ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1824 (class 0 OID 113895)
+-- Dependencies: 1290
+-- Data for Name: vdcnetwork; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE vdcnetwork DISABLE TRIGGER ALL;
+
+INSERT INTO vdcnetwork (id, version, name, networkpageheader, networkpagefooter, announcements, displayannouncements, aboutthisdataversenetwork, contactemail, systememail, defaultvdcheader, defaultvdcfooter, defaultvdcabouttext, defaultvdcannouncements, displayvdcannouncements, displayvdcrecentstudies, defaulttemplate_id, allowcreaterequest, defaultnetworkadmin_id,protocol,authority,handleregistration,termsofuseenabled, deposittermsofuseenabled, downloadtermsofuseenabled, defaultdisplaynumber, exportperiod, exporthourofday) VALUES (0, 1, '[Your]', ' ', ' ', 'A description of your Dataverse Network or announcements may be added here. Use Network Options to edit or remove this text.', TRUE, 'This About page is not used anymore in the DVN application.', 'dataverse@lists.hmdc.harvard.edu','dataverse@lists.hmdc.harvard.edu', ' ', ' ', 'This About page is not used anymore in the DVN application.', '', TRUE, TRUE, 1, FALSE,1,'hdl','TEST',false,false,false,false,16,'daily',3);
+
+update vdcnetwork set defaultvdcheader='<style type="text/css">
+body {margin:0; padding:0;}
+</style>
+<div style="width:100%; height:40px; background: url(/dvn/resources/images/customizationpattern.png) repeat-x left -35px #698DA2;"></div>
+<div style="margin:0 auto; max-width:1000px;">';
+
+update vdcnetwork set defaultvdcfooter='</div>';
+
+
+
+update vdcnetwork set  requireDVDescription = false,
+ requireDVaffiliation = false,
+ requireDVclassification = false,
+ requireDVstudiesforrelease = false;
+
+ALTER TABLE vdcnetwork ENABLE TRIGGER ALL;
+
+
+--
+-- TOC entry 1807 (class 0 OID 113797)
+-- Dependencies: 1265
+-- Data for Name: templatefield; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE templatefield DISABLE TRIGGER ALL;
+
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(1,1,1,1,'required',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(2,1,1,2,'required',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(3,1,1,3,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(4,1,1,4,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(5,1,1,5,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(6,1,1,6,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(7,1,1,7,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(8,1,1,8,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(9,1,1,9,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(10,1,1,10,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(11,1,1,11,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(12,1,1,12,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(13,1,1,13,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(14,1,1,14,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(15,1,1,15,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(16,1,1,16,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(17,1,1,17,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(18,1,1,18,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(19,1,1,19,'recommended',-1);
+
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(20,1,1,20,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(21,1,1,21,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(22,1,1,22,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(23,1,1,23,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(24,1,1,24,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(25,1,1,25,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(26,1,1,26,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(27,1,1,27,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(28,1,1,28,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(29,1,1,29,'optional',-1);
+
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(30,1,1,30,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(31,1,1,31,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(32,1,1,32,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(33,1,1,33,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(34,1,1,34,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(35,1,1,35,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(36,1,1,36,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(37,1,1,37,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(38,1,1,38,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(39,1,1,39,'recommended',-1);
+
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(40,1,1,40,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(41,1,1,41,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(42,1,1,42,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(43,1,1,43,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(44,1,1,44,'recommended',-1);
+
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(45,1,1,45,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(46,1,1,46,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(47,1,1,47,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(48,1,1,48,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(49,1,1,49,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(50,1,1,50,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(51,1,1,51,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(52,1,1,52,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(53,1,1,53,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(54,1,1,54,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(55,1,1,55,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(56,1,1,56,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(57,1,1,57,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(58,1,1,58,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(59,1,1,59,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(60,1,1,60,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(61,1,1,61,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(62,1,1,62,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(63,1,1,63,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(64,1,1,64,'optional',-1);
+
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(65,1,1,65,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(66,1,1,66,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(67,1,1,67,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(68,1,1,68,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(69,1,1,69,'optional',-1);
+
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(70,1,1,70,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(71,1,1,71,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(72,1,1,72,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(73,1,1,73,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(74,1,1,74,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(75,1,1,75,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(76,1,1,76,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(77,1,1,77,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(78,1,1,78,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(79,1,1,79,'recommended',-1);
+
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(80,1,1,80,'recommended',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(81,1,1,81,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(82,1,1,82,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(83,1,1,83,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(84,1,1,84,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(85,1,1,85,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(86,1,1,86,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(87,1,1,87,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(88,1,1,88,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(89,1,1,89,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(90,1,1,90,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(91,1,1,91,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(92,1,1,92,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(93,1,1,93,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(94,1,1,94,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(95,1,1,95,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(96,1,1,96,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(97,1,1,97,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(98,1,1,98,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(99,1,1,99,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(100,1,1,100,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(101,1,1,101,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(102,1,1,102,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(103,1,1,103,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(104,1,1,104,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(105,1,1,105,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(106,1,1,106,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(107,1,1,107,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(108,1,1,108,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(109,1,1,109,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(110,1,1,110,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(111,1,1,111,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(112,1,1,112,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(113,1,1,113,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(114,1,1,114,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(115,1,1,115,'optional',-1);
+INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(116,1,1,116,'optional',-1);
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('templatefield', 'id'), 150, false);
+
+
+ALTER TABLE templatefield ENABLE TRIGGER ALL;
+
+
+
+--
+-- TOC entry 1814 (class 0 OID 113843)
+-- Dependencies: 1276
+-- Data for Name: templatefilecategory; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER%
+--
+
+ALTER TABLE templatefilecategory DISABLE TRIGGER ALL;
+
+INSERT INTO templatefilecategory(id, template_id, name, displayorder) VALUES(1,1,'Documentation',1);
+INSERT INTO templatefilecategory(id, template_id, name, displayorder) VALUES(2,1,'Data Files',2);
+
+ALTER TABLE templatefilecategory ENABLE TRIGGER ALL;
+
+--
+-- TOC entry 1855 (class 0 OID 0)
+-- Dependencies: 1275
+-- Name: templatefilecategory_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER%
+--
+
+SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('templatefilecategory', 'id'), 5, false);
+
+
+-- Completed on 2006-09-19 16:05:06 Eastern Standard Time
+
+--
+-- PostgreSQL database dump complete
+--
+
+
+-- Sequence: studyid_seq
+
+-- DROP SEQUENCE studyid_seq;
+
+CREATE SEQUENCE studyid_seq
+  INCREMENT 1
+  MINVALUE 1
+  MAXVALUE 9223372036854775807
+  START 10000
+  CACHE 1;
+ALTER TABLE studyid_seq OWNER TO "%POSTGRES_USER%";
+
+-- Sequence: filesystemname_seq
+
+-- DROP SEQUENCE filesystemname_seq;
+
+CREATE SEQUENCE filesystemname_seq
+  INCREMENT 1
+  MINVALUE 1
+  MAXVALUE 9223372036854775807
+  START 2
+  CACHE 1;
+ALTER TABLE filesystemname_seq OWNER TO "%POSTGRES_USER%";
+
+
+INSERT INTO DataFileFormatType (id, value, name, mimeType) VALUES (1, 'D02', 'Splus', 'text/plain');
+INSERT INTO DataFileFormatType (id, value, name, mimeType) VALUES (2, 'D03', 'Stata', 'application/x-stata');
+INSERT INTO DataFileFormatType (id, value, name, mimeType) VALUES (3, 'D04', 'R', 'application/x-rlang-transport');
+
+INSERT INTO harvestformattype (id, metadataprefix, name, stylesheetfilename) VALUES (0, 'ddi', 'DDI', null);
+INSERT INTO harvestformattype (id, metadataprefix, name, stylesheetfilename) VALUES (1, 'oai_etdms', 'MIF', 'mif2ddi.xsl');
+INSERT INTO harvestformattype (id, metadataprefix, name, stylesheetfilename) VALUES (2, 'oai_dc', 'DC', 'oai_dc2ddi.xsl');
+INSERT INTO harvestformattype (id, metadataprefix, name, stylesheetfilename) VALUES (3, 'oai_fgdc', 'FGDC', 'fgdc2ddi.xsl');
+INSERT INTO harvestformattype (id, metadataprefix, name, stylesheetfilename) VALUES (4, 'dcmi_terms', 'DCMI_terms', 'dcmi_terms2ddi.xsl');
+
+create index datavariable_id_index on  datavariable (id);
+create index summarystatistic_id_index on  summarystatistic (id);
+create index summarystatistic_datavariable_id_index on  summarystatistic (datavariable_id);
+create index variablecategory_id_index on  variablecategory (id);
+create index variablecategory_datavariable_id_index on  variablecategory (datavariable_id);
+create index variablerange_id_index on  variablerange (id);
+create index study_id_index on study(id);
+create index study_owner_id_index on study(owner_id);
+create index weightedvarrelationship_id_index on  weightedvarrelationship (weighted_variable_id,variable_id);
+create index studyfile_id_index on studyfile(id);
+create index datavariable_datatable_id_index on datavariable(datatable_id);
+create index variablerange_datavariable_id_index on  variablerange (datavariable_id);
+create index metadata_id_index on metadata(id);
+create index studyabstract_metadata_id_index on studyabstract(metadata_id);
+create index studyauthor_metadata_id_index on studyauthor(metadata_id);
+create index studydistributor_metadata_id_index on studydistributor(metadata_id);
+create index studygeobounding_metadata_id_index on studygeobounding(metadata_id);
+create index studygrant_metadata_id_index on studygrant(metadata_id);
+create index studykeyword_metadata_id_index on studykeyword(metadata_id);
+create index studynote_metadata_id_index on studynote(metadata_id);
+create index studyotherid_metadata_id_index on studyotherid(metadata_id);
+create index studyotherref_metadata_id_index on studyotherref(metadata_id);
+create index studyproducer_metadata_id_index on studyproducer(metadata_id);
+create index studyrelmaterial_metadata_id_index on studyrelmaterial(metadata_id);
+create index studyrelpublication_metadata_id_index on studyrelpublication(metadata_id);
+create index studyrelstudy_metadata_id_index on studyrelstudy(metadata_id);
+create index studysoftware_metadata_id_index on studysoftware(metadata_id);
+create index studytopicclass_metadata_id_index on studytopicclass(metadata_id);
+create index template_metadata_id_index on template(metadata_id);
+create index studyfileactivity_id_index on studyfileactivity(id);
+create index studyfileactivity_studyfile_id_index on studyfileactivity(studyfile_id);
+create index studyfileactivity_study_id_index on studyfileactivity(study_id);
+
+
+
+
+
+INSERT INTO vdcnetworkstats (id,vdcnetwork_id,downloadcount,studycount,filecount) values (0,0,0,0,0);
+
+ insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 1, 'cc by', 'CC Attribution (cc by)', 'http://creativecommons.org/licenses/by/3.0/', 'http://creativecommons.org/licenses/by/3.0/rdf', 'http://i.creativecommons.org/l/by/3.0/88x31.png' );
+-- removed until we support cc0
+--insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 2, 'cc0','CC Zero (cc0)','http://creativecommons.org/publicdomain/zero/1.0/','http://creativecommons.org/publicdomain/zero/1.0/rdf','http://i.creativecommons.org/l/zero/1.0/88x31.png');
+insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 3, 'cc by-sa','CC Attribution Share Alike (cc by-sa)','http://creativecommons.org/licenses/by-sa/3.0/', 'http://creativecommons.org/licenses/by-sa/3.0/rdf', 'http://i.creativecommons.org/l/by-sa/3.0/88x31.png' );
+insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 4, 'cc by-nd','CC Attribution No Derivatives (cc by-nd)','http://creativecommons.org/licenses/by-nd/3.0/', 'http://creativecommons.org/licenses/by-nd/3.0/rdf', 'http://i.creativecommons.org/l/by-nd/3.0/88x31.png' );
+insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 5, 'cc by-nc','CC Attribution Non-Commercial (cc by-nc)','http://creativecommons.org/licenses/by-nc/3.0/', 'http://creativecommons.org/licenses/by-nc/3.0/rdf', 'http://i.creativecommons.org/l/by-nc/3.0/88x31.png' );
+insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 6, 'cc by-nc-sa','CC Attribution Non-Commercial Share Alike (cc by-nc-sa)','http://creativecommons.org/licenses/by-nc-sa/3.0/', 'http://creativecommons.org/licenses/by-nc-sa/3.0/rdf', 'http://i.creativecommons.org/l/by-nc-sa/3.0/88x31.png' );
+insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 7, 'cc by-nc-nd','CC Attribution Non-Commercial No Derivatives (cc by-nc-nd)','http://creativecommons.org/licenses/by-nc-sa/3.0/', 'http://creativecommons.org/licenses/by-nc-sa/3.0/rdf', 'http://i.creativecommons.org/l/by-nc-sa/3.0/88x31.png' );
+
+INSERT INTO metadataformattype (id, name, mimetype, namespace, formatschema, partialexcludesupported, partialselectsupported) VALUES (1, 'ddi', 'application/xml', 'http://www.icpsr.umich.edu/DDI', 'http://www.icpsr.umich.edu/DDI/Version2-0.xsd', true, true);
+INSERT INTO metadataformattype (id, name, mimetype, namespace, formatschema, partialexcludesupported, partialselectsupported) VALUES (2, 'oai_dc', 'application/xml', 'http://www.openarchives.org/OAI/2.0/oai_dc/', 'http://www.openarchives.org/OAI/2.0/oai_dc.xsd', false, false);
+INSERT INTO metadataformattype (id, name, mimetype, namespace, formatschema, partialexcludesupported, partialselectsupported) VALUES (3, 'marc', 'application/octet-stream', 'http://www.loc.gov/marc/', 'MARC 21', false, false);
+
+/*create network guest book*/
+
+INSERT INTO guestbookquestionnaire(enabled,firstnamerequired, lastnamerequired, emailrequired, institutionrequired,  positionrequired, vdc_id) VALUES (true, true, true, true, false, false, null);
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DVN-web/installer/dvninstall/robots.txt	Wed May 13 11:50:21 2015 +0200
@@ -0,0 +1,22 @@
+User-agent: *
+Disallow: /
+User-agent: Googlebot
+Disallow: /
+#Crawl-delay: 120
+#Disallow: /dvn/faces/javax.faces.resource
+#Disallow: /dvn/OAIHandler
+#Disallow: /dvn/faces/ContactUsPage.xhtml
+#Disallow: /dvn/dv/*/faces/ContactUsPage.xhtml
+#Disallow: /dvn/faces/study/TermsOfUsePage.xhtml
+#Disallow: /dvn/faces/subsetting/SubsettingPage.xhtml
+#Disallow: /dvn/dv/*/faces/subsetting/SubsettingPage.xhtml
+#Disallow: /dvn/FileDownload/
+#Disallow: /FileDownload/
+#Disallow: /dvn/dv/*/FileDownload/
+#Disallow: /dvn/resources/
+#Disallow: /dvn/api/
+#
+
+
+# Created initially using: http://www.mcanerin.com/EN/search-engine/robots-txt.asp
+# Verified using: http://tool.motoricerca.info/robots-checker.phtml
Binary file DVN-web/installer/dvninstall/web-core.jar has changed