# HG changeset patch # User "jurzua " # Date 1431510621 -7200 # Node ID 1b2188262ae9ef694d6c3a184d4a0046a2e7188d # Parent dd9adfc73390c8992f2e4651deb3d794cedded91 adding the installer. diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/appdeploy/AS.properties.TEMPLATE --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/appdeploy/AS.properties.TEMPLATE Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,1 @@ +AS_ADMIN_PASSWORD=%GF_ADMIN_PASSWORD% diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/appdeploy/ant-deploy.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/appdeploy/ant-deploy.xml Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,111 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/appdeploy/build-impl.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/appdeploy/build-impl.xml Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/appdeploy/build.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/appdeploy/build.xml Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,9 @@ + + + + + + Builds, tests, and runs the project DVN-web. + + + diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/appdeploy/glassfish.properties.TEMPLATE --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/appdeploy/glassfish.properties.TEMPLATE Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,6 @@ +gfv3.port=4848 +gfv3.host=127.0.0.1 +gfv3.admin.url=http\://${gfv3.host}\:${gfv3.port} +gfv3.username=admin +gfv3.root=%GF_ROOT_DIR%/glassfish +gfv3.url=http\://${gfv3.host}\:8080 diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/appdeploy/private.properties --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/appdeploy/private.properties Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,1 @@ +deploy.ant.properties.file=glassfish.properties diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/appdeploy/project.properties --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/appdeploy/project.properties Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,39 @@ +build.archive.dir=${build.dir}/jar +build.classes.dir=${build.archive.dir} +build.classes.excludes=**/*.java,**/*.form,**/.nbattrs +build.dir=build +build.generated.dir=${build.dir}/generated +client.module.uri=DVN-web.war +client.urlPart= +compile.jsps=true +debug.classpath=${javac.classpath}:${build.classes.dir}:${jar.content.additional}:${run.classpath} +display.browser=true +dist.dir=dist +dist.jar=${dist.dir}/DVN-web.war +dist.javadoc.dir=${dist.dir}/javadoc +j2ee.appclient.mainclass.args=-client ${dist.jar} ${j2ee.appclient.args} +j2ee.platform=1.6 +j2ee.server.type=gfv3ee6 +jar.compress=false +jar.name=DVN-web.war +javac.debug=true +javac.deprecation=false +javac.source=${default.javac.source} +javac.target=${default.javac.target} +javadoc.author=false +javadoc.encoding= +javadoc.noindex=false +javadoc.nonavbar=false +javadoc.notree=false +javadoc.preview=true +javadoc.private=false +javadoc.splitindex=true +javadoc.use=true +javadoc.version=false +javadoc.windowtitle= +meta.inf=src/conf +no.dependencies=false +platform.active=default_platform +resource.dir=setup +source.root=. +src.dir=src diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/dcmi_terms2ddi.xsl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/config/dcmi_terms2ddi.xsl Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,156 @@ + + + + + + + + + + + + + + + + + handle + handle + + + + hdl: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/dvn_data_functions.R --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/config/dvn_data_functions.R Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,1765 @@ +library(foreign) +library(stats) +library(methods) +library(UNF) +library(R2HTML) + +options(digits.secs = 3) + + +############ parameters ######################## +univarstathdr<-c("Valid Cases", "Missing Cases(NAs)", "Total", "Mean", "Standard deviation", "Skewness", "Kurtosis", "Coefficient of variation", "Mode", "Minimum","1st Quartile","Median","3rd Quartile","Maximum","Range","Interquartile Range","Normality Test(Shapiro-Wilk Statistic)", "Normality Test(Shapiro-Wilk Statistic: p value)") + +imgprfx1<-c("\n") +imgsffx2<-c("\" >\n") + +############# parameters ####################### +# Note: +# - The parameter na.strings is set to "NA", even though in the DVN tab files Missing Values are encoded as empty strings; +# this may be some sort of a legacy thing (may be older files still had "NA"s in them as this was written?). After calling +# this function, read.table141vdc, the DVN application classes (for ex., DvnRforeignFileConversionServiceImpl.java) make +# another call to reset all the empties to NA. Some functions further down in this file also do that explicitly. +# - I changed the strip.white parameter to FALSE (-- L.A., 05/07/2013); having it set to TRUE was resulting in the dropping +# the empty entries that were supposed to represent Missing Values, when the subset contained a single numeric column, +# no matter what the na.strings= was set to. + +read.table141vdc<-function (file, header = FALSE, sep = "\t", quote = "", dec = ".", col.names=NULL, na.strings = "NA",colClasses = NA, colClassesx = NA, nrows = -1, skip = 0, check.names = TRUE,fill = !blank.lines.skip, strip.white = FALSE, blank.lines.skip = FALSE, comment.char = "", varFormat=list()) +{ + if (is.character(file)) { + file <- file(file, "r") + on.exit(close(file)) + } + if (!inherits(file, "connection")) stop("argument 'file' must be a character string or connection") + if (!isOpen(file)) { + open(file, "r") + on.exit(close(file)) + } + if (skip > 0) readLines(file, skip) + + cols<- length(colClassesx) + if (is.null(col.names)) col.names<-paste("V", 1:cols, sep = "") + if(check.names) col.names <- make.names(col.names, unique = TRUE) + what <- rep(list(""), cols) + names(what) <- col.names + known <- colClasses %in% c("logical", "integer", "numeric", "complex", "character") + what[known] <- sapply(colClasses[known], do.call, list(0)) + + data <- scan(file = file, what = what, sep = sep, quote = quote, dec = dec, nmax = nrows, skip = 0, na.strings = na.strings, quiet = TRUE, fill = fill, strip.white = strip.white, blank.lines.skip = blank.lines.skip, multi.line = FALSE, comment.char = comment.char) + + nlines <- length(data[[1]]) + + if (cols != length(data)) { + warning(paste("cols =", cols, " != length(data) =", length(data))) + cols <- length(data) + } + + #cat("colClassesx:\n") + #cat(paste(class(colClassesx),"\n")) + #cat(paste(colClassesx,"\n",sep=" ")) + #cat(paste(class(varFormat),"\n")) + #cat(paste(length(varFormat),"\n")) + #cat("varFormat:\n") + #cat(paste(varFormat,"\n",sep=" ")) + + saved.options <- options(digits.secs = 3) + + for (i in 1:cols) { + #if (known[i]) next + #data[[i]] <- as(data[[i]], colClasses[i]) + #cat(paste(class(data[[i]]),"\n")) + #cat(paste(mode(data[[i]]),"\n")) + if (colClassesx[i] == 0) { + + # Make sure the character values are handled as such: + #data[[i]]<-I(data[[i]]); + data[[i]]<-as.character(data[[i]]); + # And replace empty strings with NAs: + data[[i]][ data[[i]] == '' ]<-NA + # And remove the double quotes we had put around the non-missing + # string values as they were stored in the TAB files: + + data[[i]]<-sub("^\"", "", data[[i]]) + data[[i]]<-sub("\"$", "", data[[i]]) + + if (is.null(unlist(varFormat[col.names[i]]))){ + #cat("before-s=",i, "\n") + data[[i]] <- as(data[[i]], "character") + #cat("after-s=",i, "\n") + } else if (!is.null(unlist(varFormat[col.names[i]]))){ + if (varFormat[col.names[i]] == 'D'){ + #cat("before-d=",i, "\n") + #data[[i]]<-as.Date(data[[i]], "%Y-%m-%d") + data[[i]]<-as.Date(data[[i]]); + #cat("after-d=",i, "\n") + colClassesx[i]<-1 + } else if (varFormat[col.names[i]] == 'T'){ + #cat("before-t=",i,"\n") + data[[i]]<-as.POSIXct(strptime(data[[i]], "%T")) + #cat("after-t=", i,"\n") + colClassesx[i]<-1 + } else if (varFormat[col.names[i]] == 'DT'){ + data[[i]]<-as.POSIXct(strptime(data[[i]], "%F %H:%M:%OS")) + colClassesx[i]<-1 + } else if (varFormat[col.names[i]] == 'JT'){ + data[[i]]<-as.POSIXct(strptime(data[[i]], "%j %H:%M:%OS")) + colClassesx[i]<-1 + } + } + } else if (colClassesx[i] == 3) { + + # special case for Boolean/logical variables: + # (these will be passed from the application as vectors of 0s and 1s) + # also, note that this type will be used only when the subset is + # created as part of the "save-as" functionality. When it's for + # analysis, the DVN "boolean" variable will be of type 1, because + # they will be handled as regular integer categoricals with the labels + # "TRUE" and "FALSE". -- L.A. + #print(data[[i]]) + + for (j in 1:length(data[[i]])) { + if (!is.na(data[[i]][j]) && data[[i]][j] == "") { + data[[i]][j]<-NA + } + } + + #print(data[[i]]) + + data[[i]]<-as.logical(as.numeric(data[[i]])) + #print(data[[i]]) + + + } else { + data[[i]]<-type.convert(data[[i]], dec = dec) + #cat("data[[", i, "]]:", class(data[[i]]), "\n", sep="") + #if ( (class(data[[i]]) == "numeric") & (colClassesx[i]==1) ) { + # colClassesx[i]<-2 + #} + } + } + + options(saved.options) + + class(data) <- "data.frame" + row.names(data) <- as.character(seq(len = nlines)) + attr(data, "var.type")<-colClassesx + #cat("end of read.table141vdc\n") + data +} # end of read.table141vdc + +transformrecoded <-function(x, recodedvarsindx = 2, dec = ".", col.names = NULL, colClassesx = undef, varFormat = list()){ + + #cat("inside transformrecoded\n") + #cat(paste(col.names,"\n",sep="")) + + for (i in recodedvarsindx:length(x)) { + + #i = recodedindx[j] + #cat("index: ") + #cat(i) + #cat("\n") + + #cat(paste(class(x[[i]]),"\n")) + #cat(paste(mode(x[[i]]),"\n")) + + #cat(paste(varFormat[col.names[i]],"\n")) + #cat(paste(unlist(varFormat[col.names[i]]),"\n")) + + testbool<-is.null(unlist(varFormat[col.names[i]])) + #cat(as.character(testbool)) + + + if (!is.null(unlist(varFormat[col.names[i]]))){ + #cat("inside the if loop.\n") + if (varFormat[col.names[i]] == 'D'){ + x[[i]]<-as.Date(x[[i]]) + #cat("x[[i]] is a Date;\n") + colClassesx[i]<-1 + } else if (varFormat[col.names[i]] == 'T'){ + x[[i]]<-as.POSIXct(strptime(x[[i]], "%T")) + colClassesx[i]<-1 + } else if (varFormat[col.names[i]] == 'DT'){ + x[[i]]<-as.POSIXct(strptime(x[[i]], "%F %H:%M:%OS")) + colClassesx[i]<-1 + } else if (varFormat[col.names[i]] == 'JT'){ + x[[i]]<-as.POSIXct(strptime(x[[i]], "%j %H:%M:%OS")) + colClassesx[i]<-1 + } + } + } + x +} + +########################################################### +createvalindex <-function(dtfrm, attrname=NULL){ + # this version relies on the list-based approach + # completely new final [without old cod block] + if (is.null(dtfrm)) { + stop("dataframe is not specified\n") + } else if (is.null(attrname)){ + stop("attrname is is not specified\n") + } else if (!exists('dtfrm')) { + stop("dataframe is not found\n") + } else if (!is.data.frame(dtfrm) ) { + stop("Specified object is not a data.frame\n") + } + + #DBG<-TRUE + DBG<-FALSE + try ( { + if (attrname == 'val.index') { + tabletype<-'val.table' + valtable<-attr(dtfrm, 'val.table') + } else if (attrname == 'missval.index') { + tabletype<-'missval.table' + valtable<-attr(dtfrm, 'missval.table') + } else stop ("Specified attrname must be either val.index or missval.index\n") + + if (DBG) {cat("\nattribute name=",attrname,"\n")} + if (length(valtable)) { + vlindex <- list(); + vlst <- list(); + lstall<-list() + vltbl<-list() + if (DBG) { + cat("length(",attrname,")=",length(valtable),"\n") + cat("varidset(",attrname,")=",names(valtable),"\n") + } + nameset<-names(valtable) + if (DBG) { + str(nameset) + cat("\nnameset:", paste(nameset,collapse="|"), "\n",sep="") + } + for (i in 1:(length(valtable))){ + if (DBG) { + cat("var=",i,"\n", sep="") + cat("\tlstall:", paste(if (length(lstall)) {as.vector(lstall,mode="integer")} else {"empty"}, collapse=","), "\n",sep="") + } + nameseti<-nameset[i] + if (!is.null(lstall[[as.character(i)]])){next} + lsti<-list() + + # set i to the new list + lsti[[as.character(i)]]<-i + lstall[[as.character(i)]]<-i + vlindex[[as.character(nameseti)]]<-nameset[i] + vltbl[[as.character(nameseti)]]<-valtable[[i]] + + if (DBG) {cat("\tlsti:", paste(as.vector(lsti, mode="integer"),collapse=","), "\n",sep="")} + for (j in i:length(valtable)){ + if (!is.null(lstall[[as.character(j)]])){next} + if (attrname == 'val.index') { + if ( identical( names(valtable[[i]]), names(valtable[[j]]) ) & identical(valtable[[i]], valtable[[j]]) ) { + if (DBG) {cat("\tVL:new duplicate (var#) to be added:", j,"\n",sep="")} + lsti[[as.character(j)]]<-j + vlindex[[as.character(nameset[j])]]<-nameseti + lstall[[as.character(j)]]<-j + } + } else if (attrname == 'missval.index') { + if ( identical(valtable[[i]], valtable[[j]]) ) { + if (DBG) {cat("\tMSVL: new duplicate (var#) to be added:", j,"\n",sep="")} + lsti[[as.character(j)]]<-j + vlindex[[as.character(nameset[j])]]<-nameseti + lstall[[as.character(j)]]<-j + } + } + } + if (DBG) {cat("\tlsti to be attached to vlst:", paste(as.vector(lsti, mode="integer"),collapse=","), "\n",sep="")} + if (length(lsti)){ + vlst[[nameseti]]<-nameset[as.vector(lsti, mode="integer")] + } + } + if (DBG) { + cat("\nvlst=attr(dtfrm,'val.list') <- vlst\n") + str(vlst) + cat("\nvlindex=attr(dtfrm,'val.index') <- vlindex\n") + str(vlindex) + cat("\nvltbl=attr(dtfrm,'val.table')<- valtablex\n") + str(vltbl) + cat("\nnames(vltbl): equivalent to tmpunique\n") + cat("unique var IDs:", paste(names(vltbl),collapse="|"), "\n",sep="") + } + attr(dtfrm, attrname)<-vlindex + + if (attrname == 'val.index') { + attr(dtfrm, 'val.list') <- vlst + attr(dtfrm, 'val.table') <- vltbl + } else if (attrname == 'missval.index') { + attr(dtfrm, 'missval.list') <- vlst + attr(dtfrm, 'missval.table')<-vltbl + } + + } else { + # no value labels + #vlindex<-rep(NA, dim(dtfrm)[2]) + attr(dtfrm, attrname)<-NULL + if (attrname == 'val.index') { + attr(dtfrm, 'val.list')<- NA + } else if (attrname == 'missval.index') { + attr(dtfrm, 'missval.list') <- NA + } + } + + invisible(dtfrm) + }) # end try +} # end of createvalindex + +########################################################### +# 2 table functions that return univariate statistics +# continuous case + +frqtbl.ctn<-function(x){ + frqtbl<-list() + tbl1<-table(x, useNA='ifany') + frqtbl[['Mode']]<-NA + if (length(x) > length(tbl1)) { + frqtbl[['Mode']]<- names(tbl1)[which.max(tbl1)] + } + frqtbl +} + +frqtbl.dsc<-function(x){ + frqtbl<-list() + DBG<-FALSE + + # ftbl: frequency table + ftbl<-table(x, useNA='ifany') + + # get the mode + frqtbl[['Mode']]<-NA + frqtbl[['freqtbl']]<-NA + frqtbl[['pcnttbl']]<- NA + if (length(x) > length(ftbl)){ + frqtbl[['Mode']]<-names(ftbl[which.max(ftbl)]) + if ((length(ftbl)<=50)){ + # ptbl: percentage table + ptbl<-100*(ftbl/sum(ftbl)) + # set up the return list + frqtbl[['freqtbl']]<- ftbl + frqtbl[['pcnttbl']]<- ptbl + if (DBG){ + cat("\ttable header:",paste(dimnames(ftbl)[[1]], collapse='|'), "\n") + cat("\ttable frequency:",paste(ftbl, collapse='|'), "\n") + cat("\tstatistical mode:", frqtbl[['Mode']], "\n") + cat("\tstatistical mode(freq):", tbl1[which.max(ftbl)], "\n") + } + } + } + + frqtbl +} + +sw.stat<-function(x,N){ + DBG<-TRUE + DBG<-FALSE + SW<-list() + SW$value <- NA + SW$Pvalue <- NA + if ((N >= 3) & (N <= 5000)) { + shpr <- try(shapiro.test(x)) + if (attr(shpr, "class") == 'htest') { + if(DBG) {cat("sw statistics assigned\n")} + SW$value <- shpr[[1]][[1]] + SW$Pvalue <- shpr[[2]] + } + if(DBG) {cat("sw statistics end\n")} + } + SW +} + +univarStat.cntn<-function(varseti){ + options(digits=3) + DBG<-TRUE + DBG<-FALSE + if(DBG) {cat("pass the point univStat(continuous)\n")} + + N<-sum(complete.cases(varseti)) + svnm<-summary(varseti) + + if (N) { + min.value <- svnm[[1]] + q1.value <- svnm[[2]] + #median.value <- median(varseti) + median.value <- svnm[[3]] + q3.value <- svnm[[5]] + max.value <- svnm[[6]] + range.value <- svnm[[6]]-svnm[[1]] + iqr.value <- svnm[[5]]-svnm[[1]] + mean.value <- svnm[[4]] + } else { + min.value <- NA + q1.value <- NA + median.value <- NA + q3.value <- NA + max.value <- NA + range.value <- NA + iqr.value <- NA + mean.value <- NA + } + + stdv.value <- sd(varseti, na.rm=T) + z0 <- scale(varseti) + if (N >= 2) {cv.value <- stdv.value/svnm[[4]] } else {cv.value <- NA} + if (N >= 3) {skewness.value <- (N/(N-1)/(N-2))*sum((z0)^3, na.rm=T)} else {skewness.value <- NA} + if (N >= 4) {kurtosis.value <- ((N*(N+1)/(N-1))*sum((z0)^4, na.rm=T) - 3*(N-1)^2)/(N-2)/(N-3)} else {kurtosis.value <-NA} + # find the maximum frequency cell + # index: which.max(table(dtfrm[[i]])) + + maxfreq<-frqtbl.ctn(x=varseti)[["Mode"]] + SW<-sw.stat(x=varseti,N=N) + statset<- list( + Vald = N, + Invald = sum(is.na(varseti)), + Total = length(varseti), + Mean = mean.value, + Stdev = stdv.value, + Skewness = skewness.value, + Kurtosis = kurtosis.value, + CV = cv.value, + Mode = maxfreq, + Minimum = min.value, + Q1 = q1.value, + Median = median.value, + Q3 = q3.value, + Maximum = max.value, + Range = range.value, + I.Q.R = iqr.value, + S.W.statistic = SW$value, + S.W.P.value = SW$Pvalue + ) + statset +} + +univarStat.dscrt<-function(varseti, ordnl=TRUE){ + DBG<-TRUE + DBG<-FALSE + + if(DBG) {cat("pass the point univStat(discrete)\n")} + N<-sum(complete.cases(varseti)) + if (ordnl){ + median.value <-NULL + if (N) {median.value <- median(varseti, na.rm=TRUE) } + } + tmpfrq<-frqtbl.dsc(x=varseti) + + statset<- list( + Vald = N, + Invald = sum(is.na(varseti)), + Total = length(varseti), + Mode = tmpfrq[["Mode"]], + freqtbl = tmpfrq[["freqtbl"]], + pcnttbl = tmpfrq[["pcnttbl"]] + ) + if (ordnl){ + statset$Median<-median.value + } + statset +} + + +univarStat<-function(dtfrm){ + DBG<-TRUE + DBG<-FALSE + if(DBG) { + cat("\n\nEntered the function univarStat\n") + NAMESET<-names(dtfrm) + } + + STATLST<-list() + + # create temp vars + VARTYPE<-attr(dtfrm, "var.type") + for (i in 1: dim(dtfrm)[2]) { + try ({ + varseti<-dtfrm[[i]] + + if(DBG) {cat("variable name =",NAMESET[i],"\n")} + + N<-sum(complete.cases(varseti)) + + if (VARTYPE[i]== 2) { + + STATLST[[as.character(i)]]<-univarStat.cntn(varseti=varseti) + + } else if (VARTYPE[i] == 1) { + + STATLST[[as.character(i)]]<-univarStat.dscrt(varseti=varseti) + + } else if (VARTYPE[i] == 0) { + + STATLST[[as.character(i)]]<-univarStat.dscrt(varseti=varseti,ordnl=FALSE) + + } else { + + STATLST[[as.character(i)]]<-NULL + + } + + }) # end of try + } # end of the loop + + attr(dtfrm, "univarStat.lst")<-STATLST + + invisible(dtfrm) +} # end of univarStat +########################################################### +univarChart<-function(dtfrm, analysisoptn=NULL, imgflprfx=NULL, standalone=T){ + # description + # to print univariate charts + # + # arguments + # dtfrm[[i]] variable name + # analysisoptn Analysis option + # imgflprfx temporary image file prefix + + # local variable + # varlabel variable label (local variable) + # No return value; each image file is written in /tmp + # $RvlsPrfx = "$TMPDIR/Rvls.$PRCSSID"; + # note: value labels will be printed in html tables + # unvlst[[as.character(i)]]<-statset + + # new list-based notations + # USL<-attr(dtfrm,"univarStat.lst") + # chartset[["hstbx"]]<-hstgrmfile + # chartset[["qqplt"]]<-qqpltfile + # chartset[["brchrt"]]<-barpltfile + # USL[[as.character(i)]][["freqtbl"]] + # chrtlst[[as.character(i)]]<-chartset + + + DBG<-FALSE + #DBG<-TRUE + if (is.null(analysisoptn)){ + analysisoptn<-c(1,1,0) + } + + if (is.null(imgflprfx)) { + PRCID<-format(Sys.time(), "R%Y%m%d_%H%M%S") + #imgflprfx<-paste("c:/asone/R/temp/",PRCID,sep="") + imgflprfx<-PRCID + if (DBG) {cat("\nprocessID=",imgflprfx,"\n", sep="")} + } + + + + # function defintions + + + +varlabel.chrt<-function(lblset){ + DBG<-FALSE + #DBG<-TRUE + # variable label processing + if (DBG) {cat("\nEntered varlabel.chrt\n")} + + if (nchar(lblset[["varlabel"]])>45) { + varlabel<- paste(substr(lblset[["varlabel"]], 1, 45), "...") + } else { + varlabel<-lblset[["varlabel"]] + } + lblset[["varlabel"]]<-paste(lblset[["varname"]], ": ", varlabel, sep="") + lblset +} + + + +univarChart.cntn<-function(varseti, imgflprfx, labelset) { + DBG<-FALSE + #DBG<-TRUE + chartset<-list() + + if (DBG) {cat ("univarChart.cntn:varname:", labelset[["varname"]], "\n")} + + #histgram/boxplot + hstgrmfile<-paste(imgflprfx, labelset[["varname"]],"hs.jpg", sep=".") + bitmap(hstgrmfile, type = "jpeg", height = 3.5, width = 3, res=100, pointsize=9) + + layout(matrix(c(1,2),nrow=2,ncol=1), widths=c(1), heights=c(5,1)) + par(mar=c(4,4,1,1), mgp=c(2, 0.5, 0), tcl=-0.25, cex.axis=0.9, cex.lab=0.9) + + hist(varseti, main="", xlab=labelset[["varlabel"]], col="lightgrey") + + par(mar=c(2,4,0,1)) + boxplot(varseti, main="", xlab="", ylab="", col="lightgrey", horizontal=T) + + dev.off() + #par(def.par) + + if (!standalone){ + tmpvsldirhs<-unlist(strsplit(hstgrmfile,"/")) + hstgrmfile<-paste(tmpvsldirhs[(length(tmpvsldirhs)-1):length(tmpvsldirhs)],collapse="/") + } + chartset[["hstbx"]]<-hstgrmfile + + #qq-plot + qqpltfile<-paste(imgflprfx, labelset[["varname"]],"qq.jpg", sep=".") + bitmap(qqpltfile, type = "jpeg", height = 3, width = 3, res= 100, pointsize=8.5) + + par(tcl=-0.25, cex.axis=0.9, cex.lab=1.0) + qqnorm(varseti, main="Normal Q-Q Plot", ylab=labelset[["varlabel"]], pch=15) + qqline(varseti) + dev.off() + #par(def.par) + if (!standalone){ + tmpvsldirqq<-unlist(strsplit(qqpltfile,"/")) + qqpltfile<-paste(tmpvsldirqq[(length(tmpvsldirqq)-1):length(tmpvsldirqq)],collapse="/") + } + chartset[["qqplt"]]<-qqpltfile + chartset +} + +univarChart.dscrt<-function(frqtbl, imgflprfx, labelset){ + DBG<-FALSE + #DBG<-TRUE + chartset<-list() + if (DBG) {cat ("univarChart.dscrt:varname:", labelset[["varname"]], "\n")} + + barpltfile<-paste(imgflprfx, labelset[["varname"]], "bp.jpg", sep=".") + bitmap(barpltfile, type = "jpeg", height = 3, width = 3, res= 100, pointsize=8.5) + par(tcl=-0.25, cex.axis=0.9, cex.lab=1.0) + barplot(frqtbl, col="lightgrey", main="", xlab=labelset[["varlabel"]], ylab="Frequency") + dev.off() + #par(def.par) + + if (!standalone){ + tmpvsldirbp<-unlist(strsplit(barpltfile,"/")) + barpltfile<-paste(tmpvsldirbp[(length(tmpvsldirbp)-1):length(tmpvsldirbp)],collapse="/") + } + + chartset[["brchrt"]]<-barpltfile + chartset +} + + ############################ + # implementation + + + varlabels<-attr(dtfrm,"var.labels") + varnames<-names(dtfrm) + vartypes<-attr(dtfrm,"var.type") + + + STATLST<-NULL + if (!is.null(attr(dtfrm,"univarStat.lst"))) { + STATLST<-attr(dtfrm,"univarStat.lst") + } + + chrtlst<-list() + for (i in 1: dim(dtfrm)[2]){ + try( { + if (DBG) {cat("univarChart:",i,"-th var\n")} + chrtlbl<-list(varname=varnames[i], varlabel=varlabels[i]) + labelset<-varlabel.chrt(lblset=chrtlbl) + + varseti<-dtfrm[[i]] + + if (is.null(STATLST[[as.character(i)]])) { + tmpvald<-sum(complete.cases(varseti)) + } else { + tmpvald<-STATLST[[as.character(i)]][["Vald"]] + } + if (DBG) {cat("tmpvald=",tmpvald,"\n")} + + chartset<-list() + + if (vartypes[i]==2) { + #Continuous Variable + if (analysisoptn[2] & tmpvald) { + chrtlst[[as.character(i)]]<-univarChart.cntn(varseti=varseti, imgflprfx=imgflprfx, labelset=labelset) + } + } else { + #Discrete Variable + #bar plot + if (analysisoptn[2] & tmpvald ) { + + # chart option is chosen + if (analysisoptn[1]){ + # univariate statistics option is chosen -> freq table is available + # note: univariate statistics option is not chosen, tmpfrqtbl is NA + tmpfrqtbl<-STATLST[[as.character(i)]][["freqtbl"]] + } else { + # calculate statistics + if (vartypes[i]==1) { + statlst<-univarStat.dscrt(varseti=varseti) + } else { + statlst<-univarStat.dscrt(varseti=varseti,ordnl=FALSE) + } + tmpfrqtbl<-statlst[["freqtbl"]] + } + + chartset[["brchrt"]]<-NA + if( (length(tmpfrqtbl)<=10) & (length(tmpfrqtbl)>1) ) { + chartset<-univarChart.dscrt(frqtbl=tmpfrqtbl, imgflprfx=imgflprfx, labelset=labelset) + } else if (class(tmpfrqtbl)=="table") { + # number of categories <= 50 + # no chart but table + if (!analysisoptn[1]){ + STATLST[[as.character(i)]]<-statlst + } + } else if (is.na(tmpfrqtbl)) { + # no table available + if (!analysisoptn[1]){ + STATLST[[as.character(i)]]<-statlst + } + } + chrtlst[[as.character(i)]]<-chartset + } + } # end of D case + }) # end of try + } # end of var-wise-loop + attr(dtfrm, "univarChart.lst")<-chrtlst + + if (is.null(attr(dtfrm,"univarStat.lst")) ) { + attr(dtfrm,"univarStat.lst")<-STATLST + } + + invisible(dtfrm) +} # end of univarChart +####################################################################### +univarStatHtml<-function(dtfrm, tmpimgfile, analysisoptn, tmphtmlfile, standalone=T){ + # Description + # + # arguments + # dtfrm variable furnished with attributes + # tmpimgfile temporary image file prefix: =$SRVRCGI=$SERVER$CGIDIR + # analysisoptn analysis option + # nrows local variable + # tmphtmlfile temporary html file + # file tmphtmlfile + + DBG<-TRUE + DBG<-FALSE + + # open the connection + whtml<-file(tmphtmlfile, "w") + on.exit(close(whtml)) + + # color parameters + # legend: c(1:background, 2:table header, 3: table body(o), 4: table body(e)) + # clschm <-c("#FFFFFF", "#CCFFCC","#e0ffff","#f0fff0") # green-based palette + # blue-based palette + #clschm <-c("#FFFFFF", "#e6e6fa","#ffffff","#f5f5f5") + clschm <-c("dvnUnvStatTbl", "dvnUnvStatTblHdr","dvnUnvStatTblRowO","dvnUnvStatTblRowE") + + # table parameters + # legend: c(border, cellspacing) + tblprm <-c(0, 2) + + #cat("\nEntered the function univarStatHtml\n") + + # values for local tests + # set localtest 0 after local tests + localtest<-TRUE + localtest<-FALSE + if (localtest){ + tmpimgfile<-c("") + imgprfx1<-c("", namesi, if (!is.na(varlabelsi)) {paste(": ", varlabelsi, sep="")}, "\n",sep="");h3} + + ################### + # continuous case + univarStatHtml.cntn<-function(statlst, imgfllst, cmbntn, namesi, varlabelsi){ + + # statlst STATLST[[as.character(i)]] + # imgfllst imgfllst=CHRTLST[[as.character(i)]] + # cmbntn analysisoptn + # function definition sections + + # create the first tr tag: chart part + pt.tr1<-function(imgfllst, cmbntn){ + tr1<-"" + if (cmbntn[2]) { + + if (cmbntn[1]) { colspan<-" colspan=\"2\"" } else { colspan<-""} + + # both + + if(!is.null(imgfllst[["hstbx"]])){ + tr1.l<-paste("\n",imgflprfx,imgfllst[["hstbx"]],imgsffx1,"\n",sep="") + } else { + tr1.l<-paste("\n

Histogram/Boxplot Not Available

\n\n") + } + + if(!is.null(imgfllst[["qqplt"]])) { + tr1.r<-paste("\n",imgflprfx,imgfllst[["qqplt"]],imgsffx1,"\n",sep="") + } else { + tr1.r<-paste("\n

Normal Q-Q plot Not Available

\n\n",sep="") + } + + tr1<-paste("\n",tr1.l,tr1.r,"\n",sep="") + } + tr1 + } + + # create the 2nd and thereafter tr tags: statistics part + pt.tr2<-function(statlst, cmbntn){ + tr2<-"" + if (cmbntn[1]) { + # statistics on + # table header + tr2<-paste("\nStatisticValue\nStatisticValue\n\n",sep="") + + # statistical data + # when # of statistics is not even + if (blnkcell){ univarstathdr[length(statlst)+1]<-" "} + + # table body + for (j in 1:nrows) { + if (j%%2==FALSE) colorprm <- clschm[3] else colorprm <-clschm[4] + + tr2<-paste(tr2, + "\n", + "",univarstathdr[j],"\n", + "", prettyNum(statlst[[j]]),"\n", + "",univarstathdr[j+nrows],"\n", + "", if ( (j==nrows) & (blnkcell) ) {" "} else {prettyNum(statlst[[j+nrows]])},"\n\n", sep="") + } + } + tr2 + } + + # create the chart/statistics table segment + pt.tbl<-function(statlst=statlst,cmbntn=cmbntn,imgfllst=imgfllst){ + tr1<-pt.tr1(imgfllst=imgfllst, cmbntn=cmbntn) + tr2<-pt.tr2(statlst=statlst, cmbntn=cmbntn) + tbl<-paste("
\n\n",tr1,tr2,"
\n
\n",sep="") + tbl + } + + # create per variable html segment + pt.varunit.cntn<-function(vhdr,vcntnts){varunit<-paste(vhdr,vcntnts,"
", sep="");varunit} + ## end of function definitions ## + + # implementation + + pttbl<-pt.tbl(statlst=statlst, imgfllst=imgfllst, cmbntn=cmbntn) + ptvarheader<-pt.varheader(namesi=namesi, varlabelsi=varlabelsi) + ptvarunitc<-pt.varunit.cntn(vhdr=ptvarheader, vcntnts=pttbl) + + ptvarunitc + } # end of continuous case + + + ###################### + # discrete case + + univarStatHtml.dscrt<-function(statlst, imgfllst, cmbntn, namesi, varlabelsi, vltbl) { + # statlst STATLST[[as.character(i)]] + # imgfllst imgfllst=CHRTLST[[as.character(i)]] + # cmbntn analysisoptn + # function definition sections + + #statlst[["freqtbl"]] + # mode and median even if a freq table is not available + nrw<-3 + # add one for "total" row + #if (!is.na(statlst$freqtbl)) {nrw<-length(statlst$freqtbl)+1+nrw} + + if (class(statlst$freqtbl)=="table") {nrw<-length(statlst$freqtbl)+nrw} + # nrws: rowspan parameter value if the chart option is chosen + nrws<-nrw+1 + + pt.tr1<-function(imgfllst, cmbntn){ + try({ + # tr1.l: chart part + tr1.l<-"" + sprsstr1r<-FALSE + if (cmbntn[2]) { + rowspan<-"" + if (cmbntn[1]) { rowspan<-paste(" rowspan=\"",nrws,"\"",sep="") } + + if(!is.na(imgfllst[["brchrt"]])){ + tr1.l<-paste("\n",imgflprfx,imgfllst[["brchrt"]], imgsffx1, "\n", sep="") + } else { + if (class(statlst$freqtbl)=="table"){ + rowspan<-paste(" rowspan=\"",nrws,"\"",sep="") + tr1.l<-paste("\n

The number of categories is more than 10 or equal to 1.
Table substitutes for Bar plot

\n\n",sep="") + cmbntn[1]<-1 + } else { + tr1.lm<-paste("\n

The number of categories is more than 50. Frequency/Percentage tables are not shown here

\n\n",sep="") + + tr1.lhdr<-paste("Value: Value LabelFreqPercent\n",sep="") + + tr1.l<-paste(tr1.lm,tr1.lhdr, sep="") + + sprsstr1r<-TRUE + } + } + } + # tr1.r: freq/pcnt table header part + tr1.r<-"" + if (cmbntn[1]) { + if (class(statlst$freqtbl)=="table"){ + tr1.r<-paste("Value: Value LabelFreqPercent\n",sep="") + } else if (!sprsstr1r){ + tr1.rm<-paste("\n

The number of categories is more than 50. Frequency/Percentage tables are not shown here

\n\n",sep="") + + tr1.rhdr<-paste("Value: Value LabelFreqPercent\n",sep="") + + tr1.r<-paste(tr1.rm,tr1.rhdr, sep="") + } + } + tr1<-paste("\n",tr1.l,tr1.r,"\n",sep="") + }) # end of try + } + + # create the 2nd and thereafter tr tags: statistics part + pt.tr2<-function(statlst, cmbntn, vltbl, imgfllst){ + try({ + tr2<-"" + tableon<-FALSE + if ( cmbntn[2]){ + if (is.na(imgfllst[["brchrt"]])){ + tableon<-TRUE + } + } + if (cmbntn[1] | tableon) { + + if (class(statlst$freqtbl)=="table") {tblkey<-names(statlst$freqtbl)} + # if freqtbl is NA, tblkey becomes NULL + for (j in 1:nrw) { + if (j%%2==FALSE) { colorprm <- clschm[3]} else {colorprm <-clschm[4]} + if (j < (nrw -2)) { + + catgrylbl<-"" + if (!is.null(vltbl)){ + if(!is.null(vltbl[[tblkey[j]]])) { + catgrylbl<-paste("(",vltbl[[tblkey[j]]],")",sep="") + } + } + tr2<-paste(tr2, "\n",tblkey[j],catgrylbl,"\n",statlst$freqtbl[[j]],"\n", signif(statlst$pcnttbl[[j]],3),"\n\n", sep="") + + } else if (j == (nrw -2)) { + #cat("entering the total row\n") + tr2<-paste(tr2, "\nTotal\n",statlst$Vald+statlst$Invald,"\n100\n\n", sep="") + + } else if (j == (nrw -1)) { + # median + #cat("entering the median\n") + median.vl<- "Not Available" + median.lbl<-"" + if (!is.null(statlst$Median)) { + median.vl<- as.character(statlst$Median) + if (!is.null(vltbl) && (nrw>3)){ + if (!is.null(vltbl[[median.vl]])) { + median.lbl<-paste("(",vltbl[[median.vl]],")",sep="") + } + } + } + + tr2<-paste(tr2,"\nMedian\n",median.vl,"\n",median.lbl,"\n\n", sep="") + + } else if (j == nrw) { + # mode + #cat("entering the Mode\n") + mode.vl<-"Not Available" + mode.lbl<-"" + if (!is.null(statlst$Mode)) { + mode.vl<-statlst$Mode + if (!is.null(vltbl) && (nrw>3) ) { + if (!is.null(vltbl[[mode.vl]])) { + mode.lbl<-paste("(",vltbl[[mode.vl]], ")", sep="") + } + } + } + + tr2<-paste(tr2,"\nMode\n",mode.vl,"\n",mode.lbl,"\n\n", sep="") + } + } + } + tr2 + }) # end of try + } + + # create the chart/statistics table segment + pt.tbl<-function(statlst=statlst,cmbntn=cmbntn,imgfllst=imgfllst,vltbl=vltbl){ + try({ + tr1<-pt.tr1(imgfllst=imgfllst, cmbntn=cmbntn) + tr2<-pt.tr2(statlst=statlst, cmbntn=cmbntn, vltbl=vltbl,imgfllst=imgfllst) + tbl<-paste("
\n\n",tr1,tr2,"
\n
\n",sep="") + tbl + }) + } + + # create per variable html segment + pt.varunit.dscrt<-function(vhdr,vcntnts){varunit<-paste(vhdr,vcntnts,"
", sep="");varunit} + + ## end of function definitions ## + + + # implementation + try({ + #cat("enters the discrete html body function\n", sep="") + pttbl<-pt.tbl(statlst=statlst, imgfllst=imgfllst, cmbntn=cmbntn, vltbl=vltbl) + + ptvarheader<-pt.varheader(namesi=namesi, varlabelsi=varlabelsi) + ptvarunitd<-pt.varunit.dscrt(vhdr=ptvarheader, vcntnts=pttbl) + + ptvarunitd + }) + } # end of discrete case + + + + # main + # implementation + rawVarName <- nameset + if (length(attr(dtfrm, "Rsafe2raw"))>0){ + Rsafe2raw <- attr(dtfrm, "Rsafe2raw") + for (i in 1:length(nameset)){ + if (!is.null(Rsafe2raw[[nameset[i]]])){ + rawVarName[i] <- Rsafe2raw[[nameset[i]]]; + } + } + } + + for (i in 1:dim(dtfrm)[2]){ + try({ + if (VARTYPE[i]==2) { + varsgmnt.c<-univarStatHtml.cntn(statlst=STATLST[[as.character(i)]], imgfllst=CHRTLST[[as.character(i)]], cmbntn=analysisoptn, namesi=rawVarName[i], varlabelsi=varlabelset[i]) + cat(file=whtml, varsgmnt.c, sep="") + } else { + if (DBG) {cat(i,"-th var before entering the discrete html function\n", sep="")} + #cat("check the value table=",VALTABLE[[VALINDEX[[i]]]],"\n", sep="") + if (is.null(VALINDEX[[as.character(i)]])){valtable<-NULL} else {valtable<-VALTABLE[[VALINDEX[[as.character(i)]]]]} + varsgmnt.d<-univarStatHtml.dscrt(statlst=STATLST[[as.character(i)]], imgfllst=CHRTLST[[as.character(i)]], cmbntn=analysisoptn, namesi=rawVarName[i], varlabelsi=varlabelset[i], vltbl=valtable) + cat(file=whtml, varsgmnt.d, sep="") + } + }) # end of try + } # end of var-wise for-loop + + +} #end of the function univarStatHtml + + +########################################################### +univarDataDwnld<-function(dtfrm, dwnldoptn, dsnprfx) { + # dtfrm(=z1) dataset to be downloaded + # dwnldoptn(=z2) data download option + # dsnprfx(=z3) dataset name prefix + +# The portion of code immediately below has been added to +# convert extra metadata, such as value labels, supplied in +# the proprietary attributes (below) into standard R notations, +# such as "comments" for variable labels and "factors" for +# value labels. +# +# This is still work in progress! -- L.A. + + NAMESET<-names(dtfrm) + VARLABELS<-attr(dtfrm,"var.labels") + + attr(x,"orig.names")<-attr(dtfrm,"var.labels") + + CHRTLST<-attr(dtfrm, "univarChart.lst") + STATLST<-attr(dtfrm, "univarStat.lst") + VARTYPE<-attr(dtfrm, "var.type") + VALINDEX<-attr(dtfrm, "val.index") + VALTABLE<-attr(dtfrm, "val.table") + + MISSVALINDEX <- attr(x,"missval.index") + MISSVALTABLE <- attr(x,"missval.table") + + + recodemiss<-TRUE + recodefactors<-TRUE + dropfactorlevels<-FALSE + orderfactors<-TRUE + + + for (i in 1:length(x)) { + cat("inside the for loop\n") + cat("class: ") + cat(class(x[[i]])) + cat("\n") + # Recoding discrete, categorical variables as R factors; + # But, (experimental...) only if there are value labels supplied. + # This means, among other things, that an ingested R character, + # or integer vector would stay a vector, and not a factor, + # in a saved-as-R subset. + + + # -- L.A. + + if (!is.null(VARTYPE) && VARTYPE[i]<2 && recodefactors) { + + # Additionally, if we are saving as Stata, we're only + # recoding discrete numeric values (vartype 1), but not Strings. + # This is because of the nature of factors in R. + # TODO: add documentation for all of this! + # -- L.A. + + #if ((dwnldoptn != 'D03') || (VARTYPE[i] == 1)) { + if (((dwnldoptn != 'D03') || (VARTYPE[i] == 1)) && !(is.null(VALINDEX[[as.character(i)]]))) { + + if (is.null(VALINDEX[[as.character(i)]])) { + vti <- NULL + } else { + vti <- VALTABLE[[VALINDEX[[as.character(i)]]]] + cat(paste(class(vti),"\n")) + cat(paste(length(vti),"\n")) + cat(paste("VTI", vti, "\n", sep=" : ")) + } + + if (dropfactorlevels) { + vtilevels<-NULL + } else { + if (is.numeric(x[[i]])) { + vtilevels<-as.numeric(names(vti)) + } else { + vtilevels<-names(vti) + } + } + + # save / re-attach date/time-related class name + classToken <- class(x[[i]]) + vlevsi <- as.list(sort(unique.default(c(x[[i]],vtilevels)))) + if ((classToken[1] == "Date") || (classToken[1] == "POSIXt")) { + class(vlevsi)<- classToken + } + names(vlevsi)<-vlevsi + tmatch<-na.omit(match(names(vti),names(vlevsi))) + if (length(tmatch)>0) { + names(vlevsi)[tmatch] <- vti + } + + mti<-integer(0); + mti<-integer(0); + if (recodemiss && !is.null(MISSVALINDEX[[as.character(i)]])) { + mti<-MISSVALTABLE[[MISSVALINDEX[[as.character(i)]]]] + tmatch<-na.omit(match(mti,vlevsi)) + if (length(tmatch)>0) { + vlevsi[tmatch]<-NULL + } + } + +# TODO: Add explicit nominal/ordinal/dichotomous information to meta data, instead +# of assuming non-character vars are ordered + + if ((dwnldoptn == 'D04') && !(is.null(VALORDER[[as.character(i)]]))) { + cat("ordered value labels supplied") + x[[i]] <- factor(x[[i]], + levels=VALORDER[[as.character(i)]], + ordered=TRUE) + } else { + cat("no ordered value labels supplied\n") + cat(paste(VARTYPE[i],"\n",sep="")) + cat(paste(length(vlevsi),"\n",sep="")) + orderedfct<-(orderfactors && + VARTYPE[i]>0 && ((length(vlevsi)-length(mti)>2))) + cat(paste(as.character(orderedfct),"\n", sep="")) + paste("MTI", mti,"\n",sep=" : ") + paste("VLEVSI", vlevsi,"\n",sep=" : ") + + x[[i]] <- factor(x[[i]], + levels=vlevsi, + labels=names(vlevsi), + ordered=(orderfactors && + VARTYPE[i]>0 && ((length(vlevsi)-length(mti)>2)))) + } + + attr(x,"vlevsi")<-vlevsi; + attr(x,"namesvlevsi")<-names(vlevsi); + + } + } + +# try to add variable labels as R comments: (L.A. -- ?) + + comment(x[[i]]) <- VARLABELS[i] + } + +# end of added recoding -- L.A. + + + + if (dwnldoptn == 'D01') { + # In the future when a sample program file is attached to + # a text file, col.names should be set to F to avoid + # printing a variable list + write.table(dtfrm, file=dsnprfx, sep="\t", row.names=F, na=".") + } else if (dwnldoptn == 'D02') { +# SPLUS: (L.A.) + for (i in 1:length(x)) { + if (class(x[[i]]) == 'AsIs'){ + x[[i]]<-as.character(x[[i]]); + } + } + #attach(dtfrm) + dump('x', file=dsnprfx) + #dump(ls(dtfrm), file=dsnprfx) + #detach(dtfrm) + } else if (dwnldoptn == 'D03') { +# STATA: (L.A.) + ## -- replaced dtfrm with x in the following paragraph; - L.A. (?) + # truncate over-sized string variables + MaxLenStringVar <- 127 + vt <- attr(x, 'var.type') + for (i in 1:length(vt)){ + if (vt[i] == 0){ + #cat(paste(i, "-th var is char type", sep=""), "\n") + maxlen <- max(unlist(lapply(x[[i]],nchar))) + if (maxlen > MaxLenStringVar) { + #cat(paste(i, "-th var is over-sized string var", sep=""), "\n") + x[[i]] <- strtrim(x[[i]], MaxLenStringVar) + } + } + } + write.dta(x, file=dsnprfx, version=7) + } else if (dwnldoptn == 'D04') { +# SAVE AS R WORKSPACE: (L.A.) + save(x,file=dsnprfx) + } +} # end of univarDataDwnld.R + +########################################################### +sumStatTabAll.DDI<-function(dtfrm, file="", flid=1, jobNo=0, startno=1, novars=0){ + + # sumStatTabAll.DDI(dtfrm=x,file="/tmp/mry/00001/00001.stat.1.tab", flid=1) + # arguments + # dtfrm data frame furnished with attributes + # file outp file (tab-delimited) + # flid file ID + # ordrDDI == 0 if no division of a job + # constants + entref<-c("&", "<", ">", "'", "\"") + nmstr<-c("&","<", ">", "'",""") + sumStatset<-c("mean", "medn", "mode", "vald", "invd", "min", "max", "stdev") + DEBUG<-FALSE + + fileid<-paste("file", flid, sep="") + #varIDprfx<-paste("v", flid, ".", sep="") + + # open the connection + if (file!="") { + if (jobNo==0){ + wxml<-file(file, "w") + } else if (jobNo >0) { + wxml<-file(file, "a") + } + on.exit(close(wxml)) + + } else { + stop("output file name is not specified\n") + } + + # An auxiliary function that replaces the above five characters with the named entities + chr2xtrf<-function(lbl){ + for (i in 1:length(entref)) { + lbl<-gsub(entref[i], nmstr[i], lbl, perl=T, useBytes = T) + } + } + + # xml printing up to the section 3 + #if (jobNo<= 1 ){ + # if (jobNo){novars<-"" } else {novars<-dim(dtfrm)[2]} + if (jobNo == 0){ + if (novars==0){novar<-"";} else if (novars > 0) {novar <-novars} + cat(file=wxml, sep="",paste(dim(dtfrm)[1],novar,fileid,sep="\t"),"\n" ) + } + + + VARTYPE<-attr(dtfrm, "var.type") + NAMES<-names(dtfrm) + for (i in 1: dim(dtfrm)[2]){ + + # sumStatset<-c("mean", "medn", "mode", "vald", "invd", "min", "max", "stdev") + + + tmpvari<-dtfrm[[i]] + if(DEBUG) {cat("variable name =",NAMES[i],"\n")} + + if (VARTYPE[i] ==0){ + # set '' to NA tmpvari[ tmpvari == ""]<-NA; + tmpvari[ sub('^\\s+', '',tmpvari, perl = T, useBytes=T)==''] <-NA + } + + + tbl1<-table(tmpvari, useNA='ifany') + + N<-sum(complete.cases(tmpvari)) + + if (VARTYPE[i]== 2) { + + svnm<-summary(tmpvari) + if (N) { + min.value <- svnm[[1]] + median.value <- median(tmpvari, na.rm=TRUE) + max.value <- svnm[[6]] + mean.value <- svnm[[4]] + } + else { + min.value <- NA + median.value <- NA + max.value <- NA + mean.value <- NA + } + stdv.value <- sd(tmpvari, na.rm=T) + # find the maximum frequency cell + # index: which.max(table(dtfrm[[i]])) + maxfreq<-NA + if (length(tmpvari) > length(tbl1)) { + maxfreq<- names(tbl1)[which.max(tbl1)] + } + statset<- list( + Mean = mean.value, + Median = median.value, + Mode.Value = maxfreq, + Vald = N, + Invald = sum(is.na(tmpvari)), + Minimum = min.value, + Maximum = max.value, + Stdev = stdv.value + ) + #if (length(attr(tmpvari, 'Univariate'))==0){attr(dtfrm[[i]],"Univariate") <- statset} + } else if ( (VARTYPE[i] < 2) & (VARTYPE[i] >=0) ){ + + if(DEBUG) {cat("pass the point univStat(discrete)\n")} + statset<- list( + Vald = N, + Invald = (length(tmpvari) - N), + Total = length(tmpvari) + ) + + if (DEBUG){cat("\ttable header:",paste(dimnames(tbl1)[[1]], collapse='|'), "\n")} + if (DEBUG){cat("\ttable frequency:",paste(tbl1, collapse='|'), "\n")} + } else { + if(DEBUG) {cat("out-of-range value", i ,"-th var =", VARTYPE[i],"\n")} + } + + # section 4 + pt.vr.1<-paste(NAMES[i],VARTYPE[i],sep='\t') + pt.vr.st<-"" + #iadj <- (i+startno-1) + if (VARTYPE[i] == 2) { + # continuous variable case + #pt.vr.st<- if (!is.na(statset[[1]])) {statset[[1]]} else {"."} + pt.vr.st<-"8" + for (j in 1:length(sumStatset)) { + pt.vr.st<-paste(pt.vr.st,if (!is.na(statset[[j]])) {statset[[j]]} else {"."}, sep="\t") + } + + # if: end of continuous variable part + } else { + + # discrete variable case + + # actual value table + if (dim(tbl1)[1] <= 100) { + # integer: how many value-freq sets follw? + pt.vr.st<-dim(tbl1)[1] + for (j in 1:(dim(tbl1)[1])) { + # each value-freq set + tmpvalue<-dimnames(tbl1)[[1]][j] + #if (VARTYPE[i] == 0) {tmpvalue<-chr2xtrf(tmpvalue)} + if (DEBUG) { cat("i=", i, "\tj=", j,"\ttmpvalue:", tmpvalue, "\n", sep="")} + + pt.vr.st<-paste(pt.vr.st,if (!is.na(tmpvalue)) {tmpvalue} else {"."}, tbl1[[j]],sep="\t") + + } + + } else { + # for more-than-100-category vars, print 0 + pt.vr.st<-0 + } + + # for all cases, valid, invalid and N are printed + for (k in 1:length(statset)){ + pt.vr.st<-paste(pt.vr.st,if (!is.na(statset[[k]])) {statset[[k]]} else {"."}, sep="\t") + } + + + if (DEBUG) {cat("\n");} + } # else: end of discrete case + + u <- unf(tmpvari, version=3) + uxml<-paste(as.character(u),"\n",sep="") + # dump this var + cat(file=wxml,paste(pt.vr.1,pt.vr.st,uxml,sep="\t"),sep="") + + } # end of the variable-wise loop + +} #end of the sumStatTabAll.DDIx.R + + +checkBinaryResponse<-function(binx){ + bnryVarTbl <-attr(table(binx), 'dimnames')[[1]]; + if (length(bnryVarTbl) == 2){ + if ((bnryVarTbl[1] == 0) && (bnryVarTbl[2]==1)){ + #cat('this variable is already 0-1\n'); + } else { + #cat('this variable needs the conversion\n'); + #cat(paste( bnryVarTbl[1],' is recoded to 1; ', bnryVarTbl[2],' is recoded to 0;\n', sep='')); + binx<-as.integer(binx == bnryVarTbl[1]); + } + } + invisible(binx) +} + + +####################################################################### +univarStatHtmlBody<-function(dtfrm, whtml, analysisoptn, standalone=F){ + # Description + # + # arguments + # dtfrm variable furnished with attributes + # tmpimgfile temporary image file prefix: =$SRVRCGI=$SERVER$CGIDIR + # analysisoptn analysis option + # nrows local variable + # tmphtmlfile temporary html file + # file tmphtmlfile + + DBG<-TRUE + DBG<-FALSE + + # open the connection + #whtml<-file(tmphtmlfile, "w") + #on.exit(close(whtml)) + + # color parameters + # legend: c(1:background, 2:table header, 3: table body(o), 4: table body(e)) + # clschm <-c("#FFFFFF", "#CCFFCC","#e0ffff","#f0fff0") # green-based palette + # blue-based palette + #clschm <-c("#FFFFFF", "#e6e6fa","#ffffff","#f5f5f5") + clschm <-c("dvnUnvStatTbl", "dvnUnvStatTblHdr","dvnUnvStatTblRowO","dvnUnvStatTblRowE") + + # table parameters + # legend: c(border, cellspacing) + tblprm <-c(0, 2) + + #cat("\nEntered the function univarStatHtml\n") + + # values for local tests + # set localtest 0 after local tests + localtest<-TRUE + localtest<-FALSE + if (localtest){ + tmpimgfile<-c("") + imgprfx1<-c("", namesi, if (!is.na(varlabelsi)) {paste(": ", varlabelsi, sep="")}, "\n",sep="");h3} + + ################### + # continuous case + univarStatHtml.cntn<-function(statlst, imgfllst, cmbntn, namesi, varlabelsi){ + + # statlst STATLST[[as.character(i)]] + # imgfllst imgfllst=CHRTLST[[as.character(i)]] + # cmbntn analysisoptn + # function definition sections + + # create the first tr tag: chart part + pt.tr1<-function(imgfllst, cmbntn){ + tr1<-"" + if (cmbntn[2]) { + + if (cmbntn[1]) { colspan<-" colspan=\"2\"" } else { colspan<-""} + + # both + + if(!is.null(imgfllst[["hstbx"]])){ + tr1.l<-paste("\n",imgflprfx,imgfllst[["hstbx"]],imgsffx1,"\n",sep="") + } else { + tr1.l<-paste("\n

Histogram/Boxplot Not Available

\n\n") + } + + if(!is.null(imgfllst[["qqplt"]])) { + tr1.r<-paste("\n",imgflprfx,imgfllst[["qqplt"]],imgsffx1,"\n",sep="") + } else { + tr1.r<-paste("\n

Normal Q-Q plot Not Available

\n\n",sep="") + } + + tr1<-paste("\n",tr1.l,tr1.r,"\n",sep="") + } + tr1 + } + + # create the 2nd and thereafter tr tags: statistics part + pt.tr2<-function(statlst, cmbntn){ + tr2<-"" + if (cmbntn[1]) { + # statistics on + # table header + tr2<-paste("\nStatisticValue\nStatisticValue\n\n",sep="") + + # statistical data + # when # of statistics is not even + if (blnkcell){ univarstathdr[length(statlst)+1]<-" "} + + # table body + for (j in 1:nrows) { + if (j%%2==FALSE) colorprm <- clschm[3] else colorprm <-clschm[4] + + tr2<-paste(tr2, + "\n", + "",univarstathdr[j],"\n", + "", prettyNum(statlst[[j]]),"\n", + "",univarstathdr[j+nrows],"\n", + "", if ( (j==nrows) & (blnkcell) ) {" "} else {prettyNum(statlst[[j+nrows]])},"\n\n", sep="") + } + } + tr2 + } + + # create the chart/statistics table segment + pt.tbl<-function(statlst=statlst,cmbntn=cmbntn,imgfllst=imgfllst){ + tr1<-pt.tr1(imgfllst=imgfllst, cmbntn=cmbntn) + tr2<-pt.tr2(statlst=statlst, cmbntn=cmbntn) + tbl<-paste("
\n\n",tr1,tr2,"
\n
\n",sep="") + tbl + } + + # create per variable html segment + pt.varunit.cntn<-function(vhdr,vcntnts){varunit<-paste(vhdr,vcntnts,"
", sep="");varunit} + ## end of function definitions ## + + # implementation + + pttbl<-pt.tbl(statlst=statlst, imgfllst=imgfllst, cmbntn=cmbntn) + ptvarheader<-pt.varheader(namesi=namesi, varlabelsi=varlabelsi) + ptvarunitc<-pt.varunit.cntn(vhdr=ptvarheader, vcntnts=pttbl) + + ptvarunitc + } # end of continuous case + + + ###################### + # discrete case + + univarStatHtml.dscrt<-function(statlst, imgfllst, cmbntn, namesi, varlabelsi, vltbl) { + # statlst STATLST[[as.character(i)]] + # imgfllst imgfllst=CHRTLST[[as.character(i)]] + # cmbntn analysisoptn + # function definition sections + + #statlst[["freqtbl"]] + # mode and median even if a freq table is not available + nrw<-3 + # add one for "total" row + #if (!is.na(statlst$freqtbl)) {nrw<-length(statlst$freqtbl)+1+nrw} + + if (class(statlst$freqtbl)=="table") {nrw<-length(statlst$freqtbl)+nrw} + # nrws: rowspan parameter value if the chart option is chosen + nrws<-nrw+1 + + pt.tr1<-function(imgfllst, cmbntn){ + try({ + # tr1.l: chart part + tr1.l<-"" + sprsstr1r<-FALSE + if (cmbntn[2]) { + rowspan<-"" + if (cmbntn[1]) { rowspan<-paste(" rowspan=\"",nrws,"\"",sep="") } + + if(!is.na(imgfllst[["brchrt"]])){ + tr1.l<-paste("\n",imgflprfx,imgfllst[["brchrt"]], imgsffx1, "\n", sep="") + } else { + if (class(statlst$freqtbl)=="table"){ + rowspan<-paste(" rowspan=\"",nrws,"\"",sep="") + tr1.l<-paste("\n

The number of categories is more than 10 or equal to 1.
Table substitutes for Bar plot

\n\n",sep="") + cmbntn[1]<-1 + } else { + + tr1.lm<-paste("\n

The number of categories is more than 50. Frequency/Percentage tables are not shown here

\n\n",sep="") + + tr1.lhdr<-paste("Value: Value LabelFreqPercent\n",sep="") + tr1.l<-paste(tr1.lm,tr1.lhdr, sep="") + + sprsstr1r<-TRUE + } + } + } + # tr1.r: freq/pcnt table header part + tr1.r<-"" + if (cmbntn[1]) { + if (class(statlst$freqtbl)=="table"){ + tr1.r<-paste("Value: Value LabelFreqPercent\n",sep="") + } else if (!sprsstr1r){ + + tr1.rm<-paste("\n

The number of categories is more than 50. Frequency/Percentage tables are not shown here

\n\n",sep="") + + tr1.rhdr<-paste("Value: Value LabelFreqPercent\n",sep="") + + tr1.r<-paste(tr1.rm,tr1.rhdr, sep="") + } + } + tr1<-paste("\n",tr1.l,tr1.r,"\n",sep="") + }) # end of try + } + + # create the 2nd and thereafter tr tags: statistics part + pt.tr2<-function(statlst, cmbntn, vltbl, imgfllst){ + try({ + tr2<-"" + tableon<-FALSE + if ( cmbntn[2]){ + if (is.na(imgfllst[["brchrt"]])){ + tableon<-TRUE + } + } + if (cmbntn[1] | tableon) { + + if (class(statlst$freqtbl)=="table") {tblkey<-names(statlst$freqtbl)} + # if freqtbl is NA, tblkey becomes NULL + for (j in 1:nrw) { + if (j%%2==FALSE) { colorprm <- clschm[3]} else {colorprm <-clschm[4]} + if (j < (nrw -2)) { + + catgrylbl<-"" + if (!is.null(vltbl)){ + if(!is.null(vltbl[[tblkey[j]]])) { + catgrylbl<-paste("(",vltbl[[tblkey[j]]],")",sep="") + } + } + tr2<-paste(tr2, "\n",tblkey[j],catgrylbl,"\n",statlst$freqtbl[[j]],"\n", signif(statlst$pcnttbl[[j]],3),"\n\n", sep="") + + } else if (j == (nrw -2)) { + #cat("entering the total row\n") + tr2<-paste(tr2, "\nTotal\n",statlst$Vald+statlst$Invald,"\n100\n\n", sep="") + + } else if (j == (nrw -1)) { + # median + #cat("entering the median\n") + median.vl<- "Not Available" + median.lbl<-"" + if (!is.null(statlst$Median)) { + median.vl<- as.character(statlst$Median) + if (!is.null(vltbl) && (nrw>3)){ + if (!is.null(vltbl[[median.vl]])) { + median.lbl<-paste("(",vltbl[[median.vl]],")",sep="") + } + } + } + + tr2<-paste(tr2,"\nMedian\n",median.vl,"\n",median.lbl,"\n\n", sep="") + + } else if (j == nrw) { + # mode + #cat("entering the Mode\n") + mode.vl<-"Not Available" + mode.lbl<-"" + if (!is.null(statlst$Mode)) { + mode.vl<-statlst$Mode + if (!is.null(vltbl) && (nrw>3) ) { + if (!is.null(vltbl[[mode.vl]])) { + mode.lbl<-paste("(",vltbl[[mode.vl]], ")", sep="") + } + } + } + + tr2<-paste(tr2,"\nMode\n",mode.vl,"\n",mode.lbl,"\n\n", sep="") + } + } + } + tr2 + }) # end of try + } + + # create the chart/statistics table segment + pt.tbl<-function(statlst=statlst,cmbntn=cmbntn,imgfllst=imgfllst,vltbl=vltbl){ + try({ + tr1<-pt.tr1(imgfllst=imgfllst, cmbntn=cmbntn) + tr2<-pt.tr2(statlst=statlst, cmbntn=cmbntn, vltbl=vltbl,imgfllst=imgfllst) + tbl<-paste("
\n\n",tr1,tr2,"
\n
\n",sep="") + tbl + }) + } + + # create per variable html segment + pt.varunit.dscrt<-function(vhdr,vcntnts){varunit<-paste(vhdr,vcntnts,"
", sep="");varunit} + + ## end of function definitions ## + + + # implementation + try({ + #cat("enters the discrete html body function\n", sep="") + pttbl<-pt.tbl(statlst=statlst, imgfllst=imgfllst, cmbntn=cmbntn, vltbl=vltbl) + + ptvarheader<-pt.varheader(namesi=namesi, varlabelsi=varlabelsi) + ptvarunitd<-pt.varunit.dscrt(vhdr=ptvarheader, vcntnts=pttbl) + + ptvarunitd + }) + } # end of discrete case + + + + # main + # implementation + rawVarName <- nameset + if (length(attr(dtfrm, "Rsafe2raw"))>0){ + Rsafe2raw <- attr(dtfrm, "Rsafe2raw") + for (i in 1:length(nameset)){ + if (!is.null(Rsafe2raw[[nameset[i]]])){ + rawVarName[i] <- Rsafe2raw[[nameset[i]]]; + } + } + } + + for (i in 1:dim(dtfrm)[2]){ + try({ + if (VARTYPE[i]==2) { + varsgmnt.c<-univarStatHtml.cntn(statlst=STATLST[[as.character(i)]], imgfllst=CHRTLST[[as.character(i)]], cmbntn=analysisoptn, namesi=rawVarName[i], varlabelsi=varlabelset[i]) + #cat(file=whtml, varsgmnt.c, sep="") + HTML(file=whtml, varsgmnt.c) + } else { + if (DBG) {cat(i,"-th var before entering the discrete html function\n", sep="")} + #cat("check the value table=",VALTABLE[[VALINDEX[[i]]]],"\n", sep="") + if (is.null(VALINDEX[[as.character(i)]])){valtable<-NULL} else {valtable<-VALTABLE[[VALINDEX[[as.character(i)]]]]} + varsgmnt.d<-univarStatHtml.dscrt(statlst=STATLST[[as.character(i)]], imgfllst=CHRTLST[[as.character(i)]], cmbntn=analysisoptn, namesi=rawVarName[i], varlabelsi=varlabelset[i], vltbl=valtable) + #cat(file=whtml, varsgmnt.d, sep="") + HTML(file=whtml, varsgmnt.d) + } + }) # end of try + } # end of var-wise for-loop + + +} #end of the function univarStatHtml diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/error.xsl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/config/error.xsl Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,39 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/fgdc2ddi.xsl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/config/fgdc2ddi.xsl Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,224 @@ + + + + + http://www.icpsr.umich.edu/DDI/Version2-0.xsd + + + + + + + + http://www.icpsr.umich.edu/DDI + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Harvard Geospatial Library + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + File Decompression Technique: + + + + + + + + + + + + + + + + + + diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/graphml.props --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/config/graphml.props Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,7 @@ +neostore.nodestore.db.mapped_memory=90M +neostore.relationshipstore.db.mapped_memory=1G +neostore.propertystore.db.mapped_memory=50M +neostore.propertystore.db.strings.mapped_memory=100M +neostore.propertystore.db.arrays.mapped_memory=0M +neostore.propertystore.db.index.mapped_memory=50M +neostore.propertystore.db.index.keys.mapped_memory=50M diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/header.xsl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/config/header.xsl Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + +
+ +
+
+ + + + + + + + +
diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/jhove.conf --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/config/jhove.conf Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,43 @@ + + + ../applications/j2ee-apps/DVN-EAR + utf-8 + /tmp + 131072 + 1.0 + 1024 + + edu.harvard.hul.ois.jhove.module.AiffModule + + + edu.harvard.hul.ois.jhove.module.WaveModule + + + edu.harvard.hul.ois.jhove.module.PdfModule + + + edu.harvard.hul.ois.jhove.module.Jpeg2000Module + + + edu.harvard.hul.ois.jhove.module.JpegModule + + + edu.harvard.hul.ois.jhove.module.GifModule + + + edu.harvard.hul.ois.jhove.module.TiffModule + + + edu.harvard.hul.ois.jhove.module.HtmlModule + + + edu.harvard.hul.ois.jhove.module.AsciiModule + + + edu.harvard.hul.ois.jhove.module.Utf8Module + + diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/logging.properties --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/config/logging.properties Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,116 @@ +# +# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. +# +# Copyright (c) 2010-2011 Oracle and/or its affiliates. All rights reserved. +# +# The contents of this file are subject to the terms of either the GNU +# General Public License Version 2 only ("GPL") or the Common Development +# and Distribution License("CDDL") (collectively, the "License"). You +# may not use this file except in compliance with the License. You can +# obtain a copy of the License at +# https://glassfish.dev.java.net/public/CDDL+GPL_1_1.html +# or packager/legal/LICENSE.txt. See the License for the specific +# language governing permissions and limitations under the License. +# +# When distributing the software, include this License Header Notice in each +# file and include the License file at packager/legal/LICENSE.txt. +# +# GPL Classpath Exception: +# Oracle designates this particular file as subject to the "Classpath" +# exception as provided by Oracle in the GPL Version 2 section of the License +# file that accompanied this code. +# +# Modifications: +# If applicable, add the following below the License Header, with the fields +# enclosed by brackets [] replaced by your own identifying information: +# "Portions Copyright [year] [name of copyright owner]" +# +# Contributor(s): +# If you wish your version of this file to be governed by only the CDDL or +# only the GPL Version 2, indicate your decision by adding "[Contributor] +# elects to include this software in this distribution under the [CDDL or GPL +# Version 2] license." If you don't indicate a single choice of license, a +# recipient has the option to distribute your version of this file under +# either the CDDL, the GPL Version 2 or to extend the choice of license to +# its licensees as provided above. However, if you add GPL Version 2 code +# and therefore, elected the GPL Version 2 license, then the option applies +# only if the new code is made subject to such option by the copyright +# holder. +# +#GlassFish logging.properties list +#All attributes details +handlers=java.util.logging.ConsoleHandler,com.sun.enterprise.server.logging.GFFileHandler +java.util.logging.ConsoleHandler.formatter=com.sun.enterprise.server.logging.UniformLogFormatter +com.sun.enterprise.server.logging.GFFileHandler.formatter=com.sun.enterprise.server.logging.UniformLogFormatter +com.sun.enterprise.server.logging.GFFileHandler.file=${com.sun.aas.instanceRoot}/logs/server.log +com.sun.enterprise.server.logging.GFFileHandler.rotationTimelimitInMinutes=0 +com.sun.enterprise.server.logging.GFFileHandler.flushFrequency=1 +java.util.logging.FileHandler.limit=50000 +com.sun.enterprise.server.logging.GFFileHandler.logtoConsole=false +com.sun.enterprise.server.logging.GFFileHandler.rotationLimitInBytes=2000000 +com.sun.enterprise.server.logging.SyslogHandler.useSystemLogging=false +java.util.logging.FileHandler.count=1 +com.sun.enterprise.server.logging.GFFileHandler.retainErrorsStasticsForHours=0 +log4j.logger.org.hibernate.validator.util.Version=warn +com.sun.enterprise.server.logging.GFFileHandler.maxHistoryFiles=0 +com.sun.enterprise.server.logging.GFFileHandler.rotationOnDateChange=false +java.util.logging.FileHandler.pattern=%h/java%u.log +java.util.logging.FileHandler.formatter=java.util.logging.XMLFormatter +edu.harvard.level=INFO +# Used for setting log record format values for setting log data under server.log file +# Generic Record Format: [#|yyyy-mm-ddTHH:mm:ss:SSS-ZZZZ|Level|ProductId|Logger Name|Name Value Pairs|MsgId: Message|#] +# User is allowed to change date format for recording data in server.log +com.sun.enterprise.server.logging.GFFileHandler.logFormatDateFormat=yyyy-MM-dd'T'HH:mm:ss.SSSZ + +#All log level details +com.sun.enterprise.server.logging.GFFileHandler.level=INFO +javax.enterprise.system.tools.admin.level=INFO +org.apache.jasper.level=INFO +javax.enterprise.resource.corba.level=INFO +javax.enterprise.system.core.level=INFO +javax.enterprise.system.core.classloading.level=INFO +javax.enterprise.resource.jta.level=INFO +java.util.logging.ConsoleHandler.level=INFO +javax.enterprise.system.webservices.saaj.level=INFO +javax.enterprise.system.tools.deployment.level=INFO +javax.enterprise.system.container.ejb.level=INFO +javax.enterprise.system.core.transaction.level=INFO +org.apache.catalina.level=INFO +javax.enterprise.system.container.ejb.mdb.level=INFO +org.apache.coyote.level=INFO +org.apache.commons.digester.level=WARNING +javax.enterprise.system.std.com.sun.enterprise.server.logging.level=WARNING +javax.level=INFO +javax.enterprise.resource.javamail.level=INFO +javax.enterprise.system.webservices.rpc.level=INFO +javax.enterprise.system.container.web.level=INFO +javax.enterprise.system.util.level=INFO +javax.enterprise.resource.resourceadapter.level=INFO +javax.enterprise.resource.jms.level=INFO +javax.enterprise.system.core.config.level=INFO +javax.enterprise.system.level=INFO +javax.enterprise.system.core.security.level=INFO +javax.enterprise.system.container.cmp.level=INFO +javax.enterprise.system.webservices.registry.level=INFO +javax.enterprise.system.core.selfmanagement.level=INFO +.level=INFO +javax.enterprise.resource.jdo.level=INFO +javax.enterprise.system.core.naming.level=INFO +javax.enterprise.resource.webcontainer.jsf.application.level=INFO +javax.enterprise.resource.webcontainer.jsf.resource.level=INFO +javax.enterprise.resource.webcontainer.jsf.config.level=INFO +javax.enterprise.resource.webcontainer.jsf.context.level=INFO +javax.enterprise.resource.webcontainer.jsf.facelets.level=INFO +javax.enterprise.resource.webcontainer.jsf.lifecycle.level=INFO +javax.enterprise.resource.webcontainer.jsf.managedbean.level=INFO +javax.enterprise.resource.webcontainer.jsf.renderkit.level=INFO +javax.enterprise.resource.webcontainer.jsf.taglib.level=INFO +javax.enterprise.resource.webcontainer.jsf.timing.level=INFO +javax.enterprise.resource.sqltrace.level=INFO +javax.org.glassfish.persistence.level=INFO +org.jvnet.hk2.osgiadapter.level=INFO +javax.enterprise.system.tools.backup.level=INFO +org.glassfish.admingui.level=INFO +javax.enterprise.system.ssl.security.level=INFO +ShoalLogger.level=CONFIG +org.eclipse.persistence.session.level=INFO diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/metadata.xsl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/config/metadata.xsl Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/mif2ddi.xsl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/config/mif2ddi.xsl Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,466 @@ + + + + + http://www.icpsr.umich.edu/DDI/Version1-3.xsd + + + + + + + + + + + + + + + + + + + + http://www.icpsr.umich.edu/DDI + + + + + + + + + + + + + + + + + + + + + + + U.S. Census Bureau + + + + + + + + + + + + + + + + + + + + + + + : + + + + + + + + + + + + + + + + + + + + + + + + + TheDataWeb + + + + + + + + + + + + + start + + + + end + + + + + + + + + + + + + + + + + + + You must agree to the terms and conditions described here: + + + + + + + + + + + + + + /TheDataWeb_Tabulation/VDCRepositoryServlet/ + + / + + / + + / + + / + + + + Data File + + + + + + + + + + + + + + + + + + + + + + + + wgt + + + + + + + + + + contin + discrete + contin + + + + + + + + + + + + + + + numeric + numeric + numeric + character + + + + other + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + (-) = () + + + + + + YN + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ( + + ) + + + : + + + + : + + + + : + + + + : + + + + + + + / + + / + + / + + + + + + + + + + + + + [ + + ] + + + + + + + /text() + [ + + ] + + + /comment() + [ + + ] + + + /processing-instruction() + [ + + ] + + + + + + + + + diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/neodb.props --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/config/neodb.props Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,5 @@ +neostore.nodestore.db.mapped_memory=32M +neostore.relationshipstore.db.mapped_memory=384M +neostore.propertystore.db.mapped_memory=256M +neostore.propertystore.db.strings.mapped_memory=1M +neostore.propertystore.db.arrays.mapped_memory=1M diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/networkData/lib/collections-generic-4.01.jar Binary file DVN-web/installer/dvninstall/config/networkData/lib/collections-generic-4.01.jar has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/networkData/lib/colt-1.2.0.jar Binary file DVN-web/installer/dvninstall/config/networkData/lib/colt-1.2.0.jar has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/networkData/lib/concurrent-1.3.4.jar Binary file DVN-web/installer/dvninstall/config/networkData/lib/concurrent-1.3.4.jar has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/networkData/lib/geronimo-jta_1.1_spec-1.1.1.jar Binary file DVN-web/installer/dvninstall/config/networkData/lib/geronimo-jta_1.1_spec-1.1.1.jar has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/networkData/lib/jung-algorithms-2.0.jar Binary file DVN-web/installer/dvninstall/config/networkData/lib/jung-algorithms-2.0.jar has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/networkData/lib/jung-api-2.0.jar Binary file DVN-web/installer/dvninstall/config/networkData/lib/jung-api-2.0.jar has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/networkData/lib/jung-visualization-2.0.jar Binary file DVN-web/installer/dvninstall/config/networkData/lib/jung-visualization-2.0.jar has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/networkData/lib/junit-3.8.1.jar Binary file DVN-web/installer/dvninstall/config/networkData/lib/junit-3.8.1.jar has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/networkData/lib/lucene-core-2.9.2.jar Binary file DVN-web/installer/dvninstall/config/networkData/lib/lucene-core-2.9.2.jar has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/networkData/lib/neo4j-index-1.1.jar Binary file DVN-web/installer/dvninstall/config/networkData/lib/neo4j-index-1.1.jar has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/networkData/lib/neo4j-kernel-1.1.jar Binary file DVN-web/installer/dvninstall/config/networkData/lib/neo4j-kernel-1.1.jar has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/networkData/lib/neo4j-utils-1.1.jar Binary file DVN-web/installer/dvninstall/config/networkData/lib/neo4j-utils-1.1.jar has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/networkData/lib/nestedvm-1.0.jar Binary file DVN-web/installer/dvninstall/config/networkData/lib/nestedvm-1.0.jar has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/networkData/lib/network_utils-1.0-SNAPSHOT.jar Binary file DVN-web/installer/dvninstall/config/networkData/lib/network_utils-1.0-SNAPSHOT.jar has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/networkData/lib/sqlite-jdbc-3.6.16.jar Binary file DVN-web/installer/dvninstall/config/networkData/lib/sqlite-jdbc-3.6.16.jar has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/oai_dc2ddi.xsl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/config/oai_dc2ddi.xsl Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,135 @@ + + + + + + + + + + + + + + + + + handle + handle + + + + hdl: + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/config/oaicat.properties --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/config/oaicat.properties Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,51 @@ +# OAICat Configuration + +# This paramter is optional. Comment it out if you want the code to get the baseURL +# from the request instead. Setting it here is only necessary if your firewall messes +# with the request in a way that alters the baseURL from your expectations. +# OAIHandler.baseURL=http://alcme.oclc.org/oaicat/OAIHandler + +# Uncomment this line if you want to include an XSL stylesheet reference +# in the OAI response so they can be rendered into HTML by browsers. +# OAIHandler.styleSheet=/oaicat/oaicat.xsl + +# Uncomment this line if you want OAICat to render the responses into HTML on the server using +# OAIHandler.stylesheet when the request comes from an old browser. +# OAIHandler.renderForOldBrowsers=true +OAIHandler.appBase=../applications/j2ee-modules + +AbstractCatalog.oaiCatalogClassName=edu.harvard.iq.dvn.core.web.oai.catalog.DVNOAICatalog +AbstractCatalog.recordFactoryClassName=edu.harvard.iq.dvn.core.web.oai.catalog.DVNXMLRecordFactory +AbstractCatalog.secondsToLive=18000 + +# Choose one of the following two +#AbstractCatalog.granularity=YYYY-MM-DD +AbstractCatalog.granularity=YYYY-MM-DDThh:mm:ssZ + +# Change to include properties peculiar to your implementation of AbstractCatalog +DVNOAICatalog.maxListSize=10 +DVNOAICatalog.homeDir=../applications/j2ee-apps/DVN-EAR/DVN-web_war/WEB-INF/ +DVNOAICatalog.hideExtension=true + +FileRecordFactory.repositoryIdentifier=oaicat.oclc.org + +# Custom Identify response values +Identify.repositoryName=Dataverse Network repository +Identify.adminEmail=mailto:dataverse@lists.hmdc.harvard.edu +Identify.earliestDatestamp=2000-01-01T00:00:00Z +Identify.deletedRecord=no + +# This is the old way to specify for the Identify verb +# Identify.repositoryIdentifier=oaicat.oclc.org +# Identify.sampleIdentifier=oai:oaicat.oclc.org:OCLCNo/ocm00000012 + +# This is the new way to specify elements in general for the Identify verb +# Append something unique like .1, .2, etc to 'Identify.description' for each occurrence +#Identify.description.1=oaioaicat.oclc.org:oai:oaicat.oclc.org:OCLCNo/ocm00000012 + +# List the supported metadataPrefixes along with the class that performs the associated crosswalk +Crosswalks.oai_dc=ORG.oclc.oai.server.crosswalk.XML2oai_dc +# Crosswalks.oai_etdms=ORG.oclc.oai.server.crosswalk.XML2oai_etdms +Crosswalks.ddi=edu.harvard.iq.dvn.core.web.oai.catalog.DVNXML2ddi + +#FileMap2oai_dc.xsltName=../applications/j2ee-modules/oaicat/WEB-INF/etdms2dc.xsl diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_images/application-octet-stream.png Binary file DVN-web/installer/dvninstall/doc/guides/_images/application-octet-stream.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_images/application-pdf.png Binary file DVN-web/installer/dvninstall/doc/guides/_images/application-pdf.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_images/complex_exploration.png Binary file DVN-web/installer/dvninstall/doc/guides/_images/complex_exploration.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_images/complex_graph_screenshot.png Binary file DVN-web/installer/dvninstall/doc/guides/_images/complex_graph_screenshot.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_images/displaytabscreenshot.png Binary file DVN-web/installer/dvninstall/doc/guides/_images/displaytabscreenshot.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_images/editfiltersscreenshot.png Binary file DVN-web/installer/dvninstall/doc/guides/_images/editfiltersscreenshot.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_images/editmeasuresscreenshot.png Binary file DVN-web/installer/dvninstall/doc/guides/_images/editmeasuresscreenshot.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_images/edittimevariablescreenshot.png Binary file DVN-web/installer/dvninstall/doc/guides/_images/edittimevariablescreenshot.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_images/measure_selected.png Binary file DVN-web/installer/dvninstall/doc/guides/_images/measure_selected.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_images/simple_explore_data.png Binary file DVN-web/installer/dvninstall/doc/guides/_images/simple_explore_data.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_images/sourcetabscreenshot.png Binary file DVN-web/installer/dvninstall/doc/guides/_images/sourcetabscreenshot.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_sources/dataverse-R-ingest.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/doc/guides/_sources/dataverse-R-ingest.txt Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,189 @@ +============ +Ingest of R (.RData) files +============ + +Overview. +========= + +Support for ingesting R data files has been added in version 3.5. R +has been increasingly popular in the research/academic community, +owing to the fact that it is free and open-source (unlike SPSS and +STATA). Consequently, more and more data is becoming available +exclusively as R data files. This long-awaited feature makes it +possible to ingest such data into DVN as "subsettable" files. + +Requirements. +============ + +R ingest relies on R having been installed, configured and made +available to the DVN application via RServe (see the Installers +Guide). This is in contrast to the SPSS and Stata ingest - which can +be performed without R present. (though R is still needed to perform +most subsetting/analysis tasks on the resulting data files). + +The data must be formatted as an R dataframe (data.frame()). If an +.RData file contains multiple dataframes, only the 1st one will be +ingested. + +Data Types, compared to other supported formats (Stat, SPSS) +=========================================================== + +Integers, Doubles, Character strings +------------------------------------ + +The handling of these types is intuitive and straightforward. The +resulting tab file columns, summary statistics and UNF signatures +should be identical to those produced by ingesting the same vectors +from SPSS and Stata. + +**A couple of things that are unique to R/new in DVN:** + +R explicitly supports Missing Values for all of the types above; +Missing Values encoded in R vectors will be recognized and preserved +in TAB files (as 'NA'), counted in the generated summary statistics +and data analysis. + +In addition to Missing Values, R recognizes "Not a Value" (NaN) and +positive and negative infinity for floating point variables. These +are now properly supported by the DVN. + +Also note, that unlike Stata, that does recognize "float" and "double" +as distinct data types, all floating point values in R are in fact +double precision. + +R Factors +--------- + +These are ingested as "Categorical Values" in the DVN. + +One thing to keep in mind: in both Stata and SPSS, the actual value of +a categorical variable can be both character and numeric. In R, all +factor values are strings, even if they are string representations of +numbers. So the values of the resulting categoricals in the DVN will +always be of string type too. + +| **New:** To properly handle *ordered factors* in R, the DVN now supports the concept of an "Ordered Categorical" - a categorical value where an explicit order is assigned to the list of value labels. + +(New!) Boolean values +--------------------- + +R Boolean (logical) values are supported. + + +Limitations of R, as compared to SPSS and STATA. +------------------------------------------------ + +Most noticeably, R lacks a standard mechanism for defining descriptive +labels for the data frame variables. In the DVN, similarly to +both Stata and SPSS, variables have distinct names and labels; with +the latter reserved for longer, descriptive text. +With variables ingested from R data frames the variable name will be +used for both the "name" and the "label". + +| *Optional R packages exist for providing descriptive variable labels; + in one of the future versions support may be added for such a + mechanism. It would of course work only for R files that were + created with such optional packages*. + +Similarly, R categorical values (factors) lack descriptive labels too. +**Note:** This is potentially confusing, since R factors do +actually have "labels". This is a matter of terminology - an R +factor's label is in fact the same thing as the "value" of a +categorical variable in SPSS or Stata and DVN; it contains the actual +meaningful data for the given observation. It is NOT a field reserved +for explanatory, human-readable text, such as the case with the +SPSS/Stata "label". + +Ingesting an R factor with the level labels "MALE" and "FEMALE" will +produce a categorical variable with "MALE" and "FEMALE" in the +values and labels both. + + +Time values in R +================ + +This warrants a dedicated section of its own, because of some unique +ways in which time values are handled in R. + +R makes an effort to treat a time value as a real time instance. This +is in contrast with either SPSS or Stata, where time value +representations such as "Sep-23-2013 14:57:21" are allowed; note that +in the absence of an explicitly defined time zone, this value cannot +be mapped to an exact point in real time. R handles times in the +"Unix-style" way: the value is converted to the +"seconds-since-the-Epoch" Greenwitch time (GMT or UTC) and the +resulting numeric value is stored in the data file; time zone +adjustments are made in real time as needed. + +Things still get ambiguous and confusing when R **displays** this time +value: unless the time zone was explicitly defined, R will adjust the +value to the current time zone. The resulting behavior is often +counter-intuitive: if you create a time value, for example: + + timevalue<-as.POSIXct("03/19/2013 12:57:00", format = "%m/%d/%Y %H:%M:%OS"); + +on a computer configured for the San Francisco time zone, the value +will be differently displayed on computers in different time zones; +for example, as "12:57 PST" while still on the West Coast, but as +"15:57 EST" in Boston. + +If it is important that the values are always displayed the same way, +regardless of the current time zones, it is recommended that the time +zone is explicitly defined. For example: + + attr(timevalue,"tzone")<-"PST" +or + timevalue<-as.POSIXct("03/19/2013 12:57:00", format = "%m/%d/%Y %H:%M:%OS", tz="PST"); + +Now the value will always be displayed as "15:57 PST", regardless of +the time zone that is current for the OS ... **BUT ONLY** if the OS +where R is installed actually understands the time zone "PST", which +is not by any means guaranteed! Otherwise, it will **quietly adjust** +the stored GMT value to **the current time zone**, yet it will still +display it with the "PST" tag attached!** One way to rephrase this is +that R does a fairly decent job **storing** time values in a +non-ambiguous, platform-independent manner - but gives you no guarantee that +the values will be displayed in any way that is predictable or intuitive. + +In practical terms, it is recommended to use the long/descriptive +forms of time zones, as they are more likely to be properly recognized +on most computers. For example, "Japan" instead of "JST". Another possible +solution is to explicitly use GMT or UTC (since it is very likely to be +properly recognized on any system), or the "UTC+" notation. Still, none of the above +**guarantees** proper, non-ambiguous handling of time values in R data +sets. The fact that R **quietly** modifies time values when it doesn't +recognize the supplied timezone attribute, yet still appends it to the +**changed** time value does make it quite difficult. (These issues are +discussed in depth on R-related forums, and no attempt is made to +summarize it all in any depth here; this is just to made you aware of +this being a potentially complex issue!) + +An important thing to keep in mind, in connection with the DVN ingest +of R files, is that it will **reject** an R data file with any time +values that have time zones that we can't recognize. This is done in +order to avoid (some) of the potential issues outlined above. + +It is also recommended that any vectors containing time values +ingested into the DVN are reviewed, and the resulting entries in the +TAB files are compared against the original values in the R data +frame, to make sure they have been ingested as expected. + +Another **potential issue** here is the **UNF**. The way the UNF +algorithm works, the same date/time values with and without the +timezone (e.g. "12:45" vs. "12:45 EST") **produce different +UNFs**. Considering that time values in Stata/SPSS do not have time +zones, but ALL time values in R do (yes, they all do - if the timezone +wasn't defined explicitely, it implicitly becomes a time value in the +"UTC" zone!), this means that it is **impossible** to have 2 time +value vectors, in Stata/SPSS and R, that produce the same UNF. + +| **A pro tip:** if it is important to produce SPSS/Stata and R versions of +the same data set that result in the same UNF when ingested, you may +define the time variables as **strings** in the R data frame, and use +the "YYYY-MM-DD HH:mm:ss" formatting notation. This is the formatting used by the UNF +algorithm to normalize time values, so doing the above will result in +the same UNF as the vector of the same time values in Stata. + +Note: date values (dates only, without time) should be handled the +exact same way as those in SPSS and Stata, and should produce the same +UNFs. \ No newline at end of file diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_sources/dataverse-api-main.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/doc/guides/_sources/dataverse-api-main.txt Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,533 @@ +==================================== +APIs Guide +==================================== + +.. _api: + +**Introduction** + +We strongly encourage anyone interested in building tools to +interoperate with the Dataverse Network to utilize our open source +APIs. Please visit our `website `__ for +examples of external apps that have been built to work with our APIs. + +.. _data-sharing-api: + +Data Sharing API +++++++++++++++++++++++++++ + +As of version 3.0, a new API for programmatic access to the DVN data and +metadata has been added. The API allows a remote, non-DVN +archive/application to search the holdings and download files from a +Dataverse Network. + +The Data Sharing API documentation is available below: + +API URLs +==================== + +The URLs for the Data Sharing API resources are of the form: + +``/dvn/api/{/arg}{?{{arg}&...}}`` + +Generally, mandatory arguments are embedded in the URL and optional +arguments are supplied as query parameters, in the ``?param=...`` notation. +See the documentation for the individual resources below for details. + +The API supports basic HTTP Authentication. So that the access +credentials are not transmitted in the clear, the API verbs (methods) +below are **only accessible over HTTPS**. + +Metadata API +========================== + +The API for accessing Dataverse Network metadata is implemented in 4 verbs +(resources): + +| ``metadataSearchFields`` +| ``metadataSearch`` +| ``metadataFormatsAvailable`` +| ``metadata`` + +metadataSearchFields +---------------------------------- + +**Arguments:** + +``none`` + +**URL example:** + +``/dvn/api/metadataSearchFields/`` + +**Output:** + +XML record in the format below: + +.. code-block:: guess + + + + title + title + + + authorName + authorName + + + otherId + otherId + + ... + + +metadataSearch +------------------------------------ + +**Arguments:** + +| ``queryString: mandatory, embedded.`` +| *Standard Lucene-style search queries are supported; (same query format currently used to define OAI sets, etc.)* + +**URLs examples:** + +| ``/dvn/api/metadataSearch/title:test`` +| ``/dvn/api/metadataSearch/title:test AND authorName:leonid`` + +**Output:** + +XML record in the format below: + +.. code-block:: guess + + + title:test + + + ... + + + +**Error Conditions:** + +Note that when the query does not produce any results, the resource returns an XML record +with an empty ```` list, NOT a 404. + +metadataFormatsAvailable +-------------------------------------- + +**Arguments:** + +| ``objectId: mandatory, embedded.`` +| *Both global and local (database) IDs are supported.* + +**URLs examples:** + +| ``/dvn/api/metadataFormatsAvailable/hdl:1902.1/6635`` +| ``/dvn/api/metadataFormatsAvailable/9956`` + +**Output:** + +XML record in the format below: + +.. code-block:: guess + + + + ddi + http://www.icpsr.umich.edu/DDI/Version2-0.xsd + application/xml + + + oai_dc + http://www.openarchives.org/OAI/2.0/oai_dc.xsd + application/xml + + + +(**Note** the ``selectSupported`` and ``excludeSupported`` attributes above!) + +**Error Conditions:** + +``404 NOT FOUND`` if study does not exist + +metadata +------------------------- + +**Arguments:** + +| ``objectId: mandatory, embedded.`` +| *Both global and local (database) IDs are supported.* + +| ``formatType: optional, query.`` +| *Defaults to DDI if not supplied.* + +**URLs examples:** + +| ``/dvn/api/metadata/hdl:1902.1/6635 /dvn/api/metadata/9956`` +| ``/dvn/api/metadata/hdl:1902.1/6635?formatType=ddi`` + +**Output:** + +Metadata record in the format requested, if available. No extra +headers, etc. + +**Partial selection of metadata sections:** + +When requesting partial records is supported (see +``metadataFormatsAvailable``, above for more info), these additional parameters can be supplied: + +| ``partialExclude: optional, query.`` +| *Xpath query representing metadata section to drop, where supported.* + +| ``partialInclude: optional, query.`` +| *Xpath query representing metadata section to include, where supported.* + +**Examples:** + +| ``/dvn/api/metadata/hdl:1902.1/6635?formatType=ddi&partialExclude=codeBook/dataDscr`` +| will produce a DDI without the dataDscr section. +| *[I’m expecting this to be the single most useful and common real-life application of thisfeature - L.A.]* + +| ``/dvn/api/metadata/hdl:1902.1/6635?formatType=ddi&partialInclude=codeBook/stdyDscr`` +| will produce a DDI with the stdyDscr section only. + +(**Note**: for now, only simple top-level Xpath queries like the above are supported). + +One other limitation of the current implementation: it does not validate the supplied ``partialExclude`` and ``partialInclude`` arguments; no error messages/diagnostics will be given if the Xpath queries are not part of the metadata schema. For example, if you request partialInclude=foobar, it will quietly produce an empty DDI, and ``partialExclude=foobar`` will not exclude anything (and you will get a complete DDI). + +**Error Conditions:** + +| ``404 NOT FOUND`` +| if study does not exist + +| ``503 SERVICE UNAVAILABLE`` +| if study exists, but the format requested is not available; +| also, when partial exclude or include is requested, if it’s not supported by the service (see the documenation for metadataFormatsAvailable above). + +**Notes:** + +A real-life workflow scenario may go as follows: + +a. Find the searchable index fields on this DVN (meatadataSearchFields) +b. Run a search (metadataSearch) +c. For [select] studies returned, find what metadata formats are available (metadataFormatsAvailable) +d. Retrieve the metadata in the desired format (metadata) + +File Access API +===================== + +The Dataverse Network API for downloading digital objects (files) is implemented in 2 +verbs (resources): + +| ``downloadInfo`` +| ``download`` + +downloadInfo +----------------------------- + +**Arguments:** + +| ``objectId: mandatory, embedded.`` +| Database ID of the Dataverse Network Study File. + +**URLs example:** + +``/dvn/api/downloadInfo/9956`` + +**Output:** + +XML record in the format below: + +*(Note: the record below is only an example; we will provide full schema/documentation of theFileDownloadInfo record format below)* + +.. code-block:: guess + + + + + prettypicture.jpg + image/jpeg + 52825 + + + testUser + password + + + + + Authorized Access only + + Terms of Use + + + + + thumbnail + imageThumb=true + image/png + Image Thumbnail + + + + + + +**Error Conditions:** + +| ``404 NOT FOUND`` +| Study file does not exist. + +download +--------------------------------- + +**Arguments:** + +| ``objectId: mandatory, embedded.`` +| Database ID of the DVN Study File. + +| ``Optional Query args:`` +| As specified in the output of downloadInfo, above. + +**URLs examples:** + +| ``/dvn/api/download/9956`` +| ``/dvn/api/download/9956?imageThumb=true`` +| ``/dvn/api/download/9957?fileFormat=stata`` + +**Output:** + +Byte Stream (with proper HTTP headers specifying the content +type, file name and such) + +**Error Conditions:** + +| ``404 NOT FOUND`` +| Study file does not exist. + +| ``401 AUTHORIZATION REQUIRED`` +| Access to restricted object attempted without HTTP Authorization header supplied. + +| ``403 PERMISSION DENIED HTTP`` +| Authorization header supplied, but the authenticated user is not +| authorized to directly access the object protected by Access +| Permissions and/or Access Restrictions (“Terms of Use”). + +.. _data-deposit-api: + +Data Deposit API +++++++++++++++++ + +As of version 3.6, a new API for programmatic deposit of data and metadata to the Dataverse Network has been added. The API allows a remote, non-Dataverse Network archive/application to deposit files and metadata to a Dataverse Network installation. + +Overview of Data Deposit API +============================ + +"v1" of the DVN Data Deposit API is a partial implementation of the SWORDv2 protocol, the specification for which available at http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html + +Please reference the SWORDv2 specification for expected HTTP status codes (i.e. 201, 204, 404, etc.), headers (i.e. "Location"), etc. + +Data Deposit API v1 `curl` examples +----------------------------------- + +The following `curl` commands demonstrate supported operations: + +Retrieve SWORD service document +******************************* + +The service document enumerates the dataverses ("collections" from a SWORD perspective) the user can deposit data into. The "collectionPolicy" element for each dataverse contains the deposit terms of use for the network and dataverse. + +``curl https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/service-document`` + +Create a study with an Atom entry (XML file) +******************************************** + +``curl --data-binary "@atom-entry-study.xml" -H "Content-Type: application/atom+xml" https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS`` + +.. code-block:: guess + + + + + + Roasting at Home + Peets, John + Stumptown, Jane + + Coffee Bean State University + + Peets, J., & Stumptown, J. (2013). Roasting at Home. New England Journal of Coffee, 3(1), 22-34. + + 2013-07-11 + + + Considerations before you start roasting your own coffee at home. + + coffee + beverage + caffeine + + United States + Canada + + aggregate data + + Stumptown, Jane. 2011. Home Roasting. Coffeemill Press. + + Creative Commons CC-BY 3.0 (unported) http://creativecommons.org/licenses/by/3.0/ + + Peets, John. 2010. Roasting Coffee at the Coffee Shop. Coffeemill Press + + +Dublin Core (DC) Qualified Mapping - DDI - Dataverse Network DB Element Crosswalk +*********************************************************************************** + ++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+ +|DC (terms: namespace) | DVN DB Element | DDI Element 2.x | Note | ++=============================+==============================================+================================+============================================================================================================================================+ +|dcterms:title | title | 2.1.1.1 title | | ++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+ +|dcterms:creator | author (LastName, FirstName) | 2.1.2.1 AuthEnty | | ++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+ +|dcterms:subject | keyword | 2.2.1.1. keyword | | ++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+ +|dcterms:description | abstract | 2.2.2 abstract | Describing the purpose, scope or nature of the data collection... | ++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+ +|dcterms:publisher | producer | 2.1.3.1 producer | person or agency financially or administratively responsible for the dataset | ++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+ +|dcterms:contributor | n/a | n/a | see dcterms:creator above | ++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+ +|dcterms:date |productionDate (YYYY-MM-DD or YYYY-MM or YYYY)| 2.1.3.3 prodDate | production or published date of dataset | ++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+ +|dcterms:type | kindOfData | 2.2.3.10 dataKind | Type of data included in the file: survey data, census/enumeration data, aggregate data, clinical | ++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+ +|dcterms:format | n/a | n/a | | ++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+ +|dcterms:identifier | otherID | 2.1.1.5 IDNo | Don't use this field to map a journal article ID. Only ID's that directly belong to dataset | ++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+ +|dcterms:source | dataSources | 2.3.1.8.1 dataSrc | List of books, articles, data files if any that served as the sources for the data collection | ++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+ +|dcterms:language | n/a | n/a | | ++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+ +|dcterms:relation | relatedMaterial | 2.5.1 relMat | any related material (journal article is not included here - see: dcterms:isReferencedBy below) | ++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+ +|dcterms:coverage | geographicCoverage | 2.2.3.4 geogCover | Info on the geographic coverage of the data | ++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+ +|dcterms:rights | restrictions | 2.4.2.3 restrctn | any restrictions on the access or use of the dataset | ++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+ +|dcterms:bibliographicCitation| dataCitation | ? (2.1.7 biblCit) | data citation for the study in the Dataverse Network | ++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+ +|dcterms:isReferencedBy | studyRelPublications |? (not set by DDI community yet)|the publication (journal article, book, other work) that uses this dataset (include citation, permanent identifier (DOI), and permanent URL)| ++-----------------------------+----------------------------------------------+--------------------------------+--------------------------------------------------------------------------------------------------------------------------------------------+ + + + +Add files to a study with a zip file +************************************ + +``curl --data-binary @example.zip -H "Content-Disposition: filename=example.zip" -H "Content-Type: application/zip" -H "Packaging: http://purl.org/net/sword/package/SimpleZip" https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/study/hdl:TEST/12345`` + +Display a study atom entry +************************** + +Contains data citation (bibliographicCitation), alternate URI [persistent URI of study], edit URI, edit media URI, statement URI. + +``curl https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/hdl:TEST/12345`` + +Display a study statement +************************* + +Contains feed of file entries, latestVersionState, locked boolean + +``curl https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/statement/study/hdl:TEST/12345`` + +Delete a file by database id +**************************** + +``curl -i -X DELETE https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/file/2325541`` + +Replacing cataloging information (title, author, etc.) for a study +****************************************************************** + +Please note that all cataloging information will be replaced, including fields that can not be expressed with "dcterms" fields. + +``curl --upload-file "atom-entry-study2.xml" -H "Content-Type: application/atom+xml" https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/hdl:TEST/12345`` + +.. code-block:: guess + + + + + + The Levels of Caffeine in Cold Brew Coffee + Peets, John L. + Stumptown Research Institute + Peets, J., & Stumptown, J. (2013). Roasting at Home. New England Journal of Coffee, 3(1), 22-34. + 2013-08-11 + This study evaluates the caffeine levels of a cold brewed coffee. + coffee bean + caffeine + cold brew process + Stumptown Coffee Company + Creative Commons CC-BY 3.0 (unported) http://creativecommons.org/licenses/by/3.0/ + + +List studies in a dataverse +*************************** + +``curl https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS`` + +Delete a study (non-released studies only) +****************************************** + +``curl -i -X DELETE https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/hdl:TEST/12345`` + +Deaccession a study (released studies only) +**************************************************** + +``curl -i -X DELETE https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/hdl:TEST/12345`` + +Release a study +*************** + +``curl -X POST -H "In-Progress: false" --upload-file zero-length-file.txt https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/hdl:TEST/12345`` + +Determine if a dataverse has been released +****************************************** + +Look for a `dataverseHasBeenReleased` boolean. + +``curl https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS`` + +`curl` reference +---------------- + +Per http://curl.haxx.se/docs/manpage.html + +* `--upload-file` is an HTTP `PUT` +* `--data-binary` is an HTTP `POST` + +DVN Data Deposit API v1 client sample code (Python) +=================================================== + +https://github.com/dvn/swordpoc/tree/master/dvn_client contains sample Python code for writing a DVN Data Deposit API v1 client. It makes use of a Python client library which conforms to the SWORDv2 specification: https://github.com/swordapp/python-client-sword2 + +SWORDv2 client libraries +======================== + +* Python: https://github.com/swordapp/python-client-sword2 +* Java: https://github.com/swordapp/JavaClient2.0 +* Ruby: https://github.com/swordapp/sword2ruby +* PHP: https://github.com/swordapp/swordappv2-php-library diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_sources/dataverse-developer-main.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/doc/guides/_sources/dataverse-developer-main.txt Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,688 @@ +==================================== +DVN Developers Guide +==================================== + +Please note: This guide was updated in October 2013 to reflex the switch +from Ant to Maven in DVN 3.6.1. + +Build Environment (Configuring NetBeans) +++++++++++++++++++++++++++++++++++++++++ + +This chapter describes setting up the build environment that you will +need to build the DVN application from source code.  + +Install NetBeans and GlassFish +============================== + +As of DVN version 3.6.1 and the switch to Maven, a DVN development +environment should not have any dependency on a particular IDE, but use +of NetBeans 7.2.1 is encouraged because it's the version used by most of +the current developers (on Mac OS X). + +The NetBeans project is currently offering an installer bundle that +contains both NetBeans 7.2.1 and a supported version of GlassFish +(3.1.2.2). If they choose to discontinue the bundle, you will have to +download and install the two packages separately. Note that you can have +multiple versions of both NetBeans and GlassFish on your system. + +Please note: While we intend to investigate NetBeans 7.4 and GlassFish +4, these are not yet known to provide a suitable development +environment. + +We strongly recommend that you run both installs **as a regular user**. There's no reason to run your development environment as root. + +Install NetBeans bundle +----------------------- + +Download NetBeans 7.2.1 Java EE + GlassFish Open Source Edition 3.1.2.2 +bundle from https://netbeans.org/downloads/7.2.1 + +For Mac OS X, you will download a .dmg disk image that will open +automatically and start the installer for you. Choose the typical +installation but be sure to install GlassFish and JUnit when prompted. + +Note that you don't have to uninstall your existing NetBeans version. +You can have as many versions installed as you need in parallel. + +When you start NetBeans 7.2.1 for the first time, you will be asked if +you want to import the settings from the previous installations. If you +have an existing, pre-DVN 3.\* development environment on your system,  +**answer "no" -- we want to create the new configuration from scratch.** + +[If you have to] Install GlassFish 3.1.2.2 +------------------------------------------ + +We **strongly** recommend that you install GlassFish Server 3.1.2.2, +Open Source Edition, **Full Platform**. If you have to install it +separately from NetBeans, it can be obtained from +http://glassfish.java.net/downloads/3.1.2.2-final.html + +The page above contains a link to the installation instructions, but the +process is very straightforward - just download and run the installer. + +It is strongly recommended that you use Sun/Oracle Java JDK version 1.6. +Please make sure you have the newest (or at least, recent) build number +available for your platform. (On Mac OS X 10.8, since the JDK can be +installed as part of OS distribution, the version currently provided by +Apple should be sufficient). In other words, we do not recommend +building DVN under JDK 1.7 until the ticket regarding the move from Java +6 to 7 has been closed: https://redmine.hmdc.harvard.edu/issues/3306 + +Note that you don't have to uninstall older versions of GlassFish you +may still have around. It's ok to have multiple versions installed. But +make sure you have the 3.1.2.2 installation selected as the active +server in NetBeans. + +**Important:** During the installation, leave the admin password fields +blank. This is not a security risk since out of the box, GlassFish +3.1.2.2 will only be accepting admin connections on the localhost +interface. Choosing a password at this stage, however, will complicate +the installation process unnecessarily. Since this is a development +system, you can probably keep this configuration unchanged (admin on +localhost only). If you need to be able to connect to the admin console +remotely, please see the note in the Appendix section of the main +Installers Guide. + +Install JUnit (if you haven't already) +-------------------------------------- + +Depending on how you installed NetBeans, you might already have JUnit +installed. JUnit can be installed from Tools -> Plugins. + +Check out a new copy of the DVN source tree +=========================================== + +Create a GitHub account [if you don't have one already] +------------------------------------------------------- + +Sign up at https://github.com + +Please note that primary audience of this guide (for now) is people who +have push access to https://github.com/IQSS/dvn . If you do not have +push access and want to contribute (and we hope you do!) please fork the +repo per https://help.github.com/articles/fork-a-repo and make +adjustments below when cloning the repo. + +Set up an ssh keypair (if you haven't already) +----------------------------------------------------- + +You *can* use git with passwords over HTTPS but it's much nicer to set +up SSH keys. + +https://github.com/settings/ssh is the place to manage the ssh keys +GitHub knows about for you. That page also links to a nice howto: +https://help.github.com/articles/generating-ssh-keys + +From the terminal, ``ssh-keygen`` will create new ssh keys for you: + +- private key: ``~/.ssh/id_rsa`` + + - It is **very important to protect your private key**. If someone + else acquires it, they can access private repositories on GitHub + and make commits as you! Ideally, you'll store your ssh keys on an + encrypted volume and protect your private key with a password when + prompted for one by ``ssh-keygen``. See also "Why do passphrases + matter" at https://help.github.com/articles/generating-ssh-keys + +- public key: ``~/.ssh/id_rsa.pub`` + +After you've created your ssh keys, add the public key to your GitHub +account. + +Clone the repo +-------------- + +Please see `branches <#branches>`__ for detail, but in short, the +"develop" branch is where new commits go. Below we will assume you want +to make commits to "develop". + +In NetBeans, click Team, then Git, then Clone. + +Remote Repository +***************** + +- Repository URL: ``github.com:IQSS/dvn.git`` +- Username: ``git`` +- Private/Public Key + + - Private Key File: ``/Users/[YOUR_USERNAME]/.ssh/id_rsa`` + +- Passphrase: (the passphrase you chose while running ``ssh-keygen``) + +Click Next. + +If you are prompted about the authenticity of github.com's RSA key fingerprint, answer "Yes" to continue connecting. GitHub's RSA key fingerprint is listed at https://help.github.com/articles/generating-ssh-keys + +Remote Branches +*************** + +Under Select Remote Branches check the "develop" branch. + +Please note: You may see other branches listed, such as "master", but +there is no need to check them out at this time. + +Click Next. + +Destination Directory +********************* + +The defaults should be fine: + +- Parent Directory: ``/Users/[YOUR_USERNAME]/NetBeansProjects`` +- Clone Name: ``dvn`` +- Checkout Branch: ``develop*`` +- Remote Name: ``origin`` + +Click Finish. + +You should see a message that 3 projects were cloned. Click "Open +Project". + +Open Projects +============= + +In the "Open Projects" dialog you should see three projects, DVN-lockss, +DVN-root, and DVN-web (a child of DVN-root). + +Highlight DVN-root and check "Open Required" (to include DVN-web) and click "Open". + +At this point, you should have two (and only two) projects open in +NetBeans: DVN-root and DVN-web. If you hover over the projects, it's +normal at this point to see warnings such as "Some dependency artifacts +are not in the local repository" or "Cannot find application server: +GlassFish Server 3+". We'll correct these next. + +Build for the first time +======================== + +In NetBeans, right-click DVN-root and click "Build". This will download +many dependencies via Maven and may take several minutes. + +When this process has completed, right-click DVN-web and click "Build". +You should expect to see "BUILD SUCCESS". This means you have +successfully built the .war application package, but do not attempt to +deploy the application just yet! We need to configure the server +environment first, which consists of GlassFish and PostgreSQL + +Application Environment (Configuring GlassFish and PostgreSQL) +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +In this chapter, we describe the process of setting up your own local +application environment into which you will deploy the DVN application. + +Install PostgreSQL database server +================================== + +For Mac OS X (our default development OS), you can get the installer +from http://www.postgresql.org/download/macosx + +The installation is very straightforward; just make sure you answer +"yes" when asked if Postgres should be accepting network connections. +(The application will be accessing the database at the "localhost" +address). + +Once installed, we recommend that you also allow connections +over local Unix sockets. This way the installer won't have to ask you +for the Postgres password every time it needs to talk to the database. +To do so, modify the "local all all" line in the data/pg\_hba.conf file +to look like this: + +| local all all trust + +**Note** that this only opens Postgres to the local socket connections, +and should not be considered a security risk. But if you are extra +cautious, you may use instead: + +| local all all ident sameuser + +Restart Postgres for the changes to take effect! + +Please note: if you have any problems with the PostgreSQL setup, please +ensure the right ``psql`` is in your ``$PATH``. + +You can check the instructions in the main Installers Guide for more info: +:ref:`PostgreSQL section`; +but the above should be sufficient to get your environment set up. + +Run the install-dev script +========================== + +The installer is supplied with the DVN source in the tools directory. +You must run it as root (for direct access to Postgres). + +| To run the script: +| ``sudo su -`` +| ``cd /Users/[YOUR_USERNAME]/NetBeansProjects/dvn/tools/installer/dvninstall`` + +| then execute +| ``./install-dev`` + +When prompted for various settings, you will likely be able to accept +all the default values (in a development environment, they are for the +most part the same for everybody). + +Testing login +============= + +Once the ``install-dev`` script has completed successfully, you will +have a fully functional Dataverse Network server. After making sure +GlassFish has been started per the output of the script, you should be +able to log in DVN with these credentials: + +- http://localhost:8080/dvn/ +- username: networkAdmin +- password: networkAdmin + +Please note that when deploying from NetBeans for the first time, you +will be prompted to select a deployment server. From the drop down, +select "GlassFish Server 3.1.2", click "Remember in Current IDE Session" +and click "OK". + +Developing with Git +++++++++++++++++ + + +.. _commit: + +Commit +================== + +**Committing Changes** + +By following the instructions in the :ref:`build ` step, you +should be in the "develop" branch, which is where we want to make +commits as we work toward the next release. + +You can verify which branch you are on by clicking Team then "Repository +Browser". + +You should see ``dvn [develop]`` at the root of the tree and **develop** +in bold under Branches -> Local + +Click Team, then "Show Changes". Select the desired files and +right-click to commit. + +To publish your changes on GitHub, you'll need to follow the next step: +:ref:`push `. + +.. _push: + +Push +=========== + +**Pushing your commits to GitHub** + +After making your :ref:`commit `, push it to GitHub by clicking +Team -> Remote -> Push, then Next (to use your configured remote +repository), then checking **develop** and Finish. + +Your commit should now appear on GitHub in the develop branch: +https://github.com/IQSS/dvn/commits/develop + +Your commit should **not** appear in the master branch on GitHub: +https://github.com/IQSS/dvn/commits/master . Not yet anyway. We only +merge commits into master when we are ready to release. Please see the +`branches <#branches>`__ section for for detail. + + +Release +============ + +Merge develop into master +-------------------------------------- + +Tag the release +*************************** + +Here is an example of how the 3.4 tag ( +`https://github.com/IQSS/dvn/tree/3.4 `__) was created and pushed to GitHub: + +.. code-block:: guess + + murphy:dvn pdurbin$ git branch + * develop + master + murphy:dvn pdurbin$ git pull + Already up-to-date. + murphy:dvn pdurbin$ git checkout master + Switched to branch 'master' + murphy:dvn pdurbin$ git merge develop + Updating fdbfe57..6ceb24f + (snip) + create mode 100644 tools/installer/dvninstall/readme.md + murphy:dvn pdurbin$ git tag + 3.3 + murphy:dvn pdurbin$ git tag -a 3.4 -m 'merged develop, tagging master as 3.4' + murphy:dvn pdurbin$ git tag + 3.3 + 3.4 + murphy:dvn pdurbin$ git push origin 3.4 + Counting objects: 1, done. + Writing objects: 100% (1/1), 182 bytes, done. + Total 1 (delta 0), reused 0 (delta 0) + To git@github.com:IQSS/dvn.git + * [new tag] 3.4 -> 3.4 + murphy:dvn pdurbin$ + murphy:dvn pdurbin$ git push origin master + Total 0 (delta 0), reused 0 (delta 0) + To git@github.com:IQSS/dvn.git + fdbfe57..6ceb24f master -> master + murphy:dvn pdurbin$ + +Make release available for download +****************************************************** + +On dvn-build: + +.. code-block:: guess + + cd tools/installer + make installer + +Rename the resulting "dvninstall.zip" to include the release number +(i.e. "dvninstall\_v3\_4.zip") and upload it, the separate war file, a +readme, and a buildupdate script (all these files should include the +release number) to SourceForge (i.e. +`http://sourceforge.net/projects/dvn/files/dvn/3.4/ `__). + +Increment the version number +******************************************************* + +The file to edit is: + +| `https://github.com/IQSS/dvn/blob/develop/src/DVN-web/src/VersionNumber.properties `__ + +Branches +=========== + +Current list of branches +------------------------------------- + +`https://github.com/IQSS/dvn/branches `__ + +New branching model: develop vs. master +------------------------------------------------- + +Please note that with the move to git, we are adopting the branching +model described at +`http://nvie.com/posts/a-successful-git-branching-model/ `__ + +In this branching model there are two persistent branches: + +- develop: where all new commits go +- master: where code gets merged and tagged as a release + +That is to say, **please make your commits on the develop branch, not +the master branch**. + +Feature branches +------------------------ + + "The essence of a feature branch is that it exists as long as the + feature is in development, but will eventually be merged back into + develop (to definitely add the new feature to the upcoming release) + or discarded (in case of a disappointing experiment)." -- + `http://nvie.com/posts/a-successful-git-branching-model/ `__ + +Example feature branch: 2656-lucene +--------------------------------------------------- + +First, we create the branch and check it out: + +:: + + murphy:dvn pdurbin$ git branch + 2656-solr + * develop + murphy:dvn pdurbin$ git branch 2656-lucene + murphy:dvn pdurbin$ + murphy:dvn pdurbin$ git branch + 2656-lucene + 2656-solr + * develop + murphy:dvn pdurbin$ git checkout 2656-lucene + Switched to branch '2656-lucene' + murphy:dvn pdurbin$ + murphy:dvn pdurbin$ git status + # On branch 2656-lucene + nothing to commit (working directory clean) + murphy:dvn pdurbin$ + +| Then, we make a change and a commit, and push it to: + +| `https://github.com/iqss/dvn/tree/2656-lucene `__ (creating a new remote branch): + + +:: + + murphy:dvn pdurbin$ vim src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/Indexer.java + murphy:dvn pdurbin$ + murphy:dvn pdurbin$ git commit -m 'start lucene faceting branch' src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/Indexer.java + [2656-lucene 3b82f88] start lucene faceting branch + 1 file changed, 73 insertions(+), 2 deletions(-) + murphy:dvn pdurbin$ + murphy:dvn pdurbin$ git push origin 2656-lucene + Counting objects: 25, done. + Delta compression using up to 8 threads. + Compressing objects: 100% (10/10), done. + Writing objects: 100% (13/13), 2.23 KiB, done. + Total 13 (delta 6), reused 0 (delta 0) + To git@github.com:IQSS/dvn.git + * [new branch] 2656-lucene -> 2656-lucene + murphy:dvn pdurbin$ + +| + +As we work on the feature branch, we merge the latest changes from +"develop". We want to resolve conflicts in the feature branch itself so +that the feature branch will merge cleanly into "develop" when we're +ready. In the example below, we use ``git mergetool`` and ``opendiff`` +to resolve conflicts and save the merge. Then we push the newly-merged +2656-lucene feature branch to GitHub: + +| + +:: + + murphy:dvn pdurbin$ git branch + * 2656-lucene + 2656-solr + develop + murphy:dvn pdurbin$ git checkout develop + murphy:dvn pdurbin$ git branch + 2656-lucene + 2656-solr + * develop + murphy:dvn pdurbin$ git pull + remote: Counting objects: 206, done. + remote: Compressing objects: 100% (43/43), done. + remote: Total 120 (delta 70), reused 96 (delta 46) + Receiving objects: 100% (120/120), 17.65 KiB, done. + Resolving deltas: 100% (70/70), completed with 40 local objects. + From github.com:IQSS/dvn + 8fd223d..9967413 develop -> origin/develop + Updating 8fd223d..9967413 + Fast-forward + .../admin/EditNetworkPrivilegesServiceBean.java | 5 +- + (snip) + src/DVN-web/web/study/StudyFilesFragment.xhtml | 2 +- + 12 files changed, 203 insertions(+), 118 deletions(-) + murphy:dvn pdurbin$ murphy:dvn pdurbin$ git pull + remote: Counting objects: 206, done. + remote: Compressing objects: 100% (43/43), done. + remote: Total 120 (delta 70), reused 96 (delta 46) + Receiving objects: 100% (120/120), 17.65 KiB, done. + Resolving deltas: 100% (70/70), completed with 40 local objects. + From github.com:IQSS/dvn + 8fd223d..9967413 develop -> origin/develop + Updating 8fd223d..9967413 + Fast-forward + .../admin/EditNetworkPrivilegesServiceBean.java | 5 +- + (snip) + .../harvard/iq/dvn/core/web/study/StudyUI.java | 2 +- + src/DVN-web/web/HomePage.xhtml | 5 +- + murphy:dvn pdurbin$ + murphy:dvn pdurbin$ git checkout 2656-lucene + Switched to branch '2656-lucene' + murphy:dvn pdurbin$ + murphy:dvn pdurbin$ + murphy:dvn pdurbin$ git merge develop + Auto-merging src/DVN-web/web/BasicSearchFragment.xhtml + CONFLICT (content): Merge conflict in src/DVN-web/web/BasicSearchFragment.xhtml + Auto-merging src/DVN-web/src/edu/harvard/iq/dvn/core/web/BasicSearchFragment.java + Auto-merging src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/Indexer.java + Automatic merge failed; fix conflicts and then commit the result. + murphy:dvn pdurbin$ + murphy:dvn pdurbin$ git status + # On branch 2656-lucene + # Changes to be committed: + # + # modified: src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/admin/EditNetworkPrivilegesServiceBean.java + (snip) + # new file: src/DVN-web/web/admin/ChooseDataverseForCreateStudy.xhtml + # modified: src/DVN-web/web/study/StudyFilesFragment.xhtml + # + # Unmerged paths: + # (use "git add/rm ..." as appropriate to mark resolution) + # + # both modified: src/DVN-web/web/BasicSearchFragment.xhtml + # + murphy:dvn pdurbin$ git mergetool + merge tool candidates: opendiff kdiff3 tkdiff xxdiff meld tortoisemerge gvimdiff diffuse ecmerge p4merge araxis bc3 emerge vimdiff + Merging: + src/DVN-web/web/BasicSearchFragment.xhtml + + Normal merge conflict for 'src/DVN-web/web/BasicSearchFragment.xhtml': + {local}: modified file + {remote}: modified file + Hit return to start merge resolution tool (opendiff): + murphy:dvn pdurbin$ + murphy:dvn pdurbin$ git add . + murphy:dvn pdurbin$ + murphy:dvn pdurbin$ git commit -m "Merge branch 'develop' into 2656-lucene" + [2656-lucene 519cd8c] Merge branch 'develop' into 2656-lucene + murphy:dvn pdurbin$ + murphy:dvn pdurbin$ git push origin 2656-lucene + (snip) + murphy:dvn pdurbin$ + + +| When we are ready to merge the feature branch back into the develop branch, we can do so. + +| Here's an example of merging the 2656-lucene branch back into develop: + +:: + + murphy:dvn pdurbin$ git checkout 2656-lucene + Switched to branch '2656-lucene' + murphy:dvn pdurbin$ git pull + Already up-to-date. + murphy:dvn pdurbin$ git checkout develop + Switched to branch 'develop' + murphy:dvn pdurbin$ git pull + Already up-to-date. + murphy:dvn pdurbin$ git merge 2656-lucene + Removing lib/dvn-lib-EJB/lucene-core-3.0.0.jar + Merge made by the 'recursive' strategy. + lib/dvn-lib-EJB/lucene-core-3.0.0.jar | Bin 1021623 -> 0 bytes + lib/dvn-lib-EJB/lucene-core-3.5.0.jar | Bin 0 -> 1466301 bytes + lib/dvn-lib-EJB/lucene-facet-3.5.0.jar | Bin 0 -> 293582 bytes + src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/DvnQuery.java | 160 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/IndexServiceBean.java | 56 ++++++++++++++++++++ + src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/IndexServiceLocal.java | 16 +++++- + src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/Indexer.java | 432 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++-- + src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/ResultsWithFacets.java | 71 +++++++++++++++++++++++++ + src/DVN-web/src/SearchFieldBundle.properties | 4 +- + src/DVN-web/src/edu/harvard/iq/dvn/core/web/AdvSearchPage.java | 86 +++++++++++++++++++++++++++++++ + src/DVN-web/src/edu/harvard/iq/dvn/core/web/BasicSearchFragment.java | 102 +++++++++++++++++++++++++++++++++++- + src/DVN-web/src/edu/harvard/iq/dvn/core/web/StudyListing.java | 11 ++++ + src/DVN-web/src/edu/harvard/iq/dvn/core/web/StudyListingPage.java | 428 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++- + src/DVN-web/src/edu/harvard/iq/dvn/core/web/study/FacetResultUI.java | 42 +++++++++++++++ + src/DVN-web/src/edu/harvard/iq/dvn/core/web/study/FacetUI.java | 62 ++++++++++++++++++++++ + src/DVN-web/web/AdvSearchPage.xhtml | 3 +- + src/DVN-web/web/BasicSearchFragment.xhtml | 9 ++-- + src/DVN-web/web/StudyListingPage.xhtml | 43 +++++++++++----- + 18 files changed, 1500 insertions(+), 25 deletions(-) + delete mode 100644 lib/dvn-lib-EJB/lucene-core-3.0.0.jar + create mode 100644 lib/dvn-lib-EJB/lucene-core-3.5.0.jar + create mode 100644 lib/dvn-lib-EJB/lucene-facet-3.5.0.jar + create mode 100644 src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/DvnQuery.java + create mode 100644 src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/ResultsWithFacets.java + create mode 100644 src/DVN-web/src/edu/harvard/iq/dvn/core/web/study/FacetResultUI.java + create mode 100644 src/DVN-web/src/edu/harvard/iq/dvn/core/web/study/FacetUI.java + murphy:dvn pdurbin$ + murphy:dvn pdurbin$ git status + # On branch develop + # Your branch is ahead of 'origin/develop' by 68 commits. + # + nothing to commit (working directory clean) + murphy:dvn pdurbin$ + murphy:dvn pdurbin$ git push + Counting objects: 51, done. + Delta compression using up to 8 threads. + Compressing objects: 100% (12/12), done. + Writing objects: 100% (19/19), 1.41 KiB, done. + Total 19 (delta 7), reused 0 (delta 0) + To git@github.com:IQSS/dvn.git + b7fae01..2b88b68 develop -> develop + murphy:dvn pdurbin$ + +Switching to the master branch to merge commits from the develop branch +------------------------------------------------------------------------------------------------------- + +We should really only need to switch from the develop branch to the +master branch as we prepare for a release. + +First, we check out the master branch by clicking Team -> Git -> Branch +-> Switch to Branch. + +Change Branch to "origin/master" and check the box for "Checkout as New +Branch" and fill in "master" as the "Branch Name" to match the name of +the branch we're switching to. Then click "Switch". + +Now, in the Git Repository Browser (from Team -> Repository Browser) the +root of the tree should say ``dvn [master]`` and you should see two +branches under Branches -> Local. **master** should be in bold and +develop should not. + +Tips +========= + +Previewing changes before a pull +-------------------------------- + +If the build fails overnight you may want to hold off on doing a pull +until the problem is resolved. To preview what has changed since your +last pull, you can do a ``git fetch`` (the first part of a pull) then +``git log HEAD..origin/develop`` to see the commit messages. +``git log -p`` or ``git diff`` will allow you to see the contents of the +changes: + +:: + + git checkout develop + git fetch + git log HEAD..origin/develop + git log -p HEAD..origin/develop + git diff HEAD..origin/develop + +After the build is working again, you can simply do a pull as normal. + +Errors +=========== + +Duplicate class +--------------- + +The error "duplicate class" can result whenever you resolve a merge +conflict in git. + +The fix is to close NetBeans and delete (or move aside) the cache like +this: + +:: + + cd ~/Library/Caches/NetBeans + mv 7.2.1 7.2.1.moved + +According to https://netbeans.org/bugzilla/show_bug.cgi?id=197983 this might be fixed in NetBeans 7.3. diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_sources/dataverse-installer-main.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/doc/guides/_sources/dataverse-installer-main.txt Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,1090 @@ +==================================== +Installers Guide +==================================== + +.. _introduction: + +**Introduction** + +This is our "new and improved" installation guide, it was first +released with the Dataverse Network application versions 2.2.4, when we +introduced the new, automated and much simplified installation process. +As of February 2012, it has been updated to reflect the changes made in +the newly released version 3.0 of the software. (Our existing users will +notice however, that the changes in the installation process have been +fairly minimal). + +The guide is intended for anyone who needs to install the DVN app, +developers and Dataverse Network administrators alike. + +The top-down organization of the chapters and sections is that of +increasing complexity. First a very basic, simple installation scenario +is presented. The instructions are straightforward and only the required +components are discussed. This use case will in fact be sufficient for +most DVN developers and many Dataverse Network administrators. Chances +are you are one of such users, so if brave by nature, you may stop +reading this section and go straight to the :ref:`“Quick Install” ` chapter. + +The “basic” installation process described in the first chapter is +fully automated, everything is performed by a single interactive script. +This process has its limitations. It will likely work only on the +supported platforms. Optional components need to be configured outside +of the Installer (these are described in the "Optional Components" +section). + +For an advanced user, we provide the detailed explanations of all the +steps performed by the Installer. This way he or she can experiment with +individual configuration options, having maximum flexibility and control +over the process. Yet we tried to organize the advanced information in +such a way that those who only need the most basic instructions would +not have to read through it unnecessarily. Instead we provide them with +an easy way to get a bare-bones configuration of the DVN up and running. + +If you are interested in practicing a DVN installation in a Vagrant +environment you can later throw away, please follow the instructions at +https://github.com/dvn/dvn-install-demo to spin up a Linux virtual +machine on your laptop with ``vagrant up``. When you are finished with +this temporary DVN installation, you can delete the virtual machine with +``vagrant destroy``. + +If you encounter any problems during installation, please contact the +development team +at `support@thedata.org `__ +or our `Dataverse Users +Community `__. + +.. _quick-install: + +Quick Install +++++++++++++++++++++++ + +For an experienced and/or rather bold user, this is a 1 +paragraph version of the installation instructions: + +This should work on RedHat and its derivatives, and MacOS X. If this +does not describe your case, you will very likely have to install and +configure at least some of the components manually. Meaning, you may +consider reading through the chapters that follow! Still here? Great. +Prerequisites: Sun/Oracle Java JDK 1.6\_31+ and a “virgin” installation +of Glassfish v3.1.2; PostgreSQL v8.3+, configured to listen to network +connections and support password authentication on the localhost +interface; you may need R as well. See the corresponding sections under +“2. Prerequisites”, if necessary. Download the installer package from +SourceForge: + +`http://sourceforge.net/projects/dvn/files/dvn `__ + +Choose the latest version and download the dvninstall zip file. + +Unzip the package in a temp location of your choice (this will create +the directory ``dvninstall``). Run the installer, as root: + + ``cd dvninstall`` + ./ ``install`` + +Follow the installation prompts. If it all works as it should, you +will have a working DVN instance running in about a minute from now. + +Has it worked? Awesome! Now you may read the rest of the guide +chapters at your own leisurely pace, to see if you need any of the +optional components described there. And/or if you want to understand +what exactly has just been done to your system. + +SYSTEM REQUIREMENTS +++++++++++++++++++++++++++++++++++ + +Or rather, recommendations. The closer your configuration is to what’s +outlined below, the easier it will be for the DVN team to provide +support and answer your questions. + +- Operating system - The production version of the Dataverse Network at + IQSS (dvn.iq.harvard.edu) runs on RedHat Linux 5. Most of the DVN + development is currently done on MacOS X. Because of our experience + with RedHat and MacOS X these are the recommended platforms. You + should be able to deploy the application .ear file on any other + platform that supports Java. However, the automated installer we + provide will likely work on RedHat and MacOS only. Some information + provided in this guide is specific to these 2 operating systems. (Any + OS-specific instructions/examples will be clearly marked, for + example:\ ``[MacOS-specific:]``) + +- CPU - The production IQSS Dataverse Network runs on generic, + multi-core 64-bit processors. + +- Memory - The application servers currently in production at the IQSS + have 64 GB of memory each. Development and testing systems require a + minimum of 2 gigabyte of memory. + +- Disk space - How much disk space is required depends on the amount of + data that you expect to serve. The IQSS Dataverse Network file system + is a standalone NetApp with 2 TB volume dedicated to the DVN data. + +- Multiple servers – All the DVN components can run on the same server. + On a busy, hard-working production network the load can be split + across multiple servers. The 3 main components, the application + server (Glassfish), the database (Postgres) and R can each run on its + own host. Furthermore, multiple application servers sharing the same + database and R server(s) can be set up behind a load balancer. + Developers would normally run Glassfish and Postgres on their + workstations locally and use a shared R server. + +- If it actually becomes a practical necessity to bring up more servers + to handle your production load, there are no universal instructions + on how to best spread it across extra CPUs. It will depend on the + specifics of your site, the nature of the data you serve and the + needs of your users, whether you’ll benefit most from dedicating + another server to run the database, or to serve R requests. Please + see the discussion in the corresponding sections of the Prerequisites + chapter. + +.. _prerequisites: + +PREREQUISITES +++++++++++++++++++++++++++ + +In this chapter, an emphasis is made on clearly identifying those +components that are absolutely required for every installation and +marking any advanced, optional instructions as such. + +Glassfish +======================= + +Version 3.1.2 is required. + +Make sure you have **Sun/Oracle**\ **Java JDK version 1.6, build 31** +or newer\. It is available from +`http://www.oracle.com/technetwork/java/javase/downloads/index.html `__. + + +**[note for developers:]** + +If you are doing this installation as part of your DVN software +development setup: The version of NetBeans currently in use by the DVN +team is 7.0.1, and it is recommended that you use this same version if +you want to participate in the development. As of writing of this +manual, NetBeans 7.0.1 installer bundle comes with an older version of +Glassfish. So you will have to install Glassfish version 3.1.2 +separately, and then select it as the default server for your NetBeans +project. + +**[/note for developers]** + +We **strongly** recommend that you install GlassFish Server 3.1.2, +Open Source Edition, **Full Platform**. You are very likely to run into +installation issues if you attempt to run the installer and get the +application to work with a different version! Simply transitioning from +3.1.1 to 3.1.2 turned out to be a surprisingly complex undertaking, +hence this recommendation to all other installers and developers to stay +with the same version. + +It can be obtained from + +`http://glassfish.java.net/downloads/3.1.2-final.html `__ + +The page contains a link to the installation instructions. However, +the process is completely straightforward. You are given 2 options for +the format of the installer package. We recommend that you choose to +download it as a shell archive; you will need to change its executable +permission, with **chmod +x**, and then run it, as root: + +./**installer-filename.sh** + +[**Important:]** + +Leave the admin password fields blank. This is not a security risk, +since out of the box, Glassfish will only be accepting admin connections +on the localhost interface. Choosing password at this stage however will +complicate the installation process unnecessarily\ **.**\ If this is a +developers installation, you can probably keep this configuration +unchanged (admin on localhost only). If you need to be able to connect +to the admin console remotely, please see the note in the Appendix +section of the manual. + +**[/Important]** + +| **[Advanced:]** +| **[Unix-specific:`]** + +The installer shell script will normally attempt to run in a graphic +mode. If you are installing this on a remote Unix server, this will +require X Windows support on your local workstation. If for whatever +reason it's not available, you have an option of running it in a *silent +mode* - check the download page, above, for more information. + +| **[/Unix-specific]** +| **[/Advanced]** + +.. _postgresql: + +PostgreSQL +======================= + +| **Version 8.3 or higher is required.** +| Installation instructions specific to RedHat Linux and MacOS X are +| provided below. +| Once the database server is installed, you'll need to configure access +| control to suit your installation. +| Note that any modifications to the configuration files above require you to restart Postgres: +| ``service postgresql restart`` (RedHat) + +| or +| "Restart Server" under Applications -> PostgreSQL (MacOS X) + +By default, most Postgres distributions are configured to listen to network connections on the localhost interface only; and to only support ident for authentication. (The MacOS installer may ask you if network connections should be allowed - answer "yes"). At a minimum, if GlassFish is running on the same host, it will also need to allow password authentication on localhost. So you will need to modify the "``host all all 127.0.0.1``\ " line in your ``/var/lib/pgsq1/data/pg_hba.conf`` so that it looks like this: + +| ``host all all 127.0.0.1/32 password`` + +Also, the installer script needs to have direct access to the local PostgresQL server via Unix domain sockets. So this needs to be set to either "trust" or "ident". I.e., your **pg\_hba.conf** must contain either of the 2 lines below: + +| **local all all ident sameuser** +| or +| **local all all trust** + +("ident" is the default setting; but if it has been changed to +"password" or "md5", etc. on your system, Postgres will keep prompting +you for the master password throughout the installation) + +**[optional:]** + +If GlassFish will be accessing the database remotely, add or modify the following line in your ``/data/postgresql.conf``: + +| ``listen_addresses='*'`` + +to enable network connections on all interfaces; and add the following +line to ``pg_hba.conf``: + +| host all all ``[ADDRESS] 255.255.255.255 password`` + +| where ``[ADDRESS]`` is the numeric IP address of the GlassFish server. +| Using the subnet notation above you can enable authorization for multiple hosts on | your network. For example, + +| ``host all all 140.247.115.0 255.255.255.0 password`` + +| will permit password-authenticated connections from all hosts on the ``140.247.115.*`` subnet. +| **[/optional:]** + +| +| **[RedHat-specific:]** +| **[Advanced:]** + +Please note that the instructions below are meant for users who have some experience with basic RedHat admin tasks. You should be safe to proceed if an instruction such as “uninstall the postgres rpms” makes sense to you immediately. I.e., if you already know how to install or uninstall an rpm package. Otherwise we recommend that you contact your systems administrator. + +For RedHat (and relatives), version 8.4 is now part of the distribution. As of RedHat 5, the default ``postgresql`` rpm is still version 8.1. So you may have to un-install the ``postgresql`` rpms, then get the ones for version 8.4: + +| ``yum install postgresql84 postgresql84-server`` + +Before you start the server for the first time with + +| ``service postgresql start`` + +You will need to populate the initial database with + + +| ``service postgresql initdb`` + + +| **[/advanced]** +| **[/RedHat-specific]** + + +**[MacOS-specific:]** + + +Postgres Project provides a one click installer for Mac OS X 10.4 and +above at +`http://www.postgresql.org/download/macosx `__. +Fink and MacPorts packages are also available. + + +**[/MacOS-specific]`** + + +| **[advanced:]** +| **[optional:]** + +See the section :ref:`PostgresQL setup ` in the Appendix for the description of the steps that the automated installer takes to set up PostgresQL for use with the DVN. + +| **[/optional]** +| **[/advanced]** + +.. _r-and-rserve: + +R and RServe +======================= + +Strictly speaking, R is an optional component. You can bring up a +running DVN instance without it. The automated installer will allow such +an installation, with a warning. Users of this Dataverse Network will be +able to upload and share some data. Only the advanced modes of serving +quantitative data to the users require R ``[style?]``. Please consult +the :ref:`"Do you need R?" ` section in the Appendix for an extended discussion of this. + + +| **Installation instructions:** + +Install the latest version of R from your favorite CRAN mirror (refer to `http://cran.r-project.org/ `__ for more information). Depending on your OS distribution, this may be as simple as typing + +| **[RedHat/Linux-specific:]** + +``yum install R R-devel`` + +(for example, the above line will work in CentOS out of the box; in RedHat, you will have to add support for EPEL repository -- see +`http://fedoraproject.org/wiki/EPEL `__ +-- then run the ``yum install`` command) + +| **[/RedHat/Linux-specific]** + +Please make sure to install the "devel" package too! you will need it +to build the extra R modules. + +Once you have R installed, download the package ``dvnextra.tar`` from this location: + +`http://dvn.iq.harvard.edu/dist/R/dvnextra.tar `__ + +Unpack the archive: + +``tar xvf dvnextra.tar`` + +then run the supplied installation shell script as root: + +| ``cd dvnextra`` +| ``./installModules.sh`` + +This will install a number of R modules needed by the DVN to run statistics and analysis, some from CRAN and some supplied in the bundle; it will also configure Rserve to run locally on your system and install some startup files that the DVN will need. + +**Please note that the DVN application requires specific versions of the 3rd-party R packages. For example, if you obtain and install the version of Zelig package currently available from CRAN, it will not work with the application. This is why we distribute the sources of the correct versions in this tar package.** + + +| **[advanced:]** +| We haven’t had much experience with R on any platforms other than RedHat-and-the-like. Our developers use MacOS X, but point their DVN instances to a shared server running Rserve under RedHat. + +The R project ports their distribution to a wide range of platforms. However, the installer shell script above will only run on Unix; and is not really guaranteed to work on anything other than RedHat. If you have some experience with either R or system administration, you should be able to use the script as a guide to re-create the configuration steps on any other platform quite easily. You will, however, be entirely on your own while embarking on that adventure. +**[/advanced]** + + + +System Configuration +================================ + +**[Advanced/optional:]** + +Many modern OS distributions come pre-configured so that all the +network ports are firewalled off by default. + +Depending on the configuration of your server, you may need to open some +of the following ports. + +On a developers personal workstation, the user would normally access his +or her DVN instance on the localhost interface. So no open ports are +required unless you want to give access to your DVN to another +user/developer. + +When running a DVN that is meant to be accessible by network users: At a +minimum, if all the components are running on the same server, the HTTP +port 80 needs to be open. You may also want to open TCP 443, to be able +to access Glassfish admin console remotely. + +If the DVN is running its own HANDLE.NET server (see Chapter 4. +"Optional Components"), the TCP port 8000 and TCP/UDP ports 2641 are +also needed. + +If the DVN application needs to talk to PostgreSQL and/or Rserve running +on remote hosts, the TCP ports 5432 and 6311, respectively, need to be +open there. + +**[/Advanced/optional]** + + + +RUNNING THE INSTALLER ++++++++++++++++++++++++++++++++++++++++++ + +Once the :ref:`Prerequisites ` have been take care of, the DVN application can be installed. + +The installer package can be downloaded from our repository on SourceForge at + +`http://sourceforge.net/projects/dvn/files/dvn/3.0/dvninstall\_v3\_0.zip `_ + +| Unzip the package in a temp location of your choice (this will create the directory | ``dvninstall``). Run the installer, as root: +| ``cd dvninstall`` +| ``./install`` + +Follow the installation prompts. The installer will first verify the contents of the package and check if the required components +(in :ref:`Prerequisites `) are present on the system. Then it will lead you through the application setup. + +| **[Advanced:]** + +The limitations of the installer package: + +Some extra configuration steps will be required if the PostgreSQL database is being set up on a remote server. + +It will most likely only work on the supported platforms, RedHat and Mac OS X. + +It is only guaranteed to work on a fresh Glassfish installation. If you already have more than one Glassfish domains created and/or have applications other than the DVN running under Glassfish, please consult the :ref:`"What does the Installer do?" ` section. + +It does not install any of the optional components (:ref:`see Chapter 4`.) + +For the detailed explanation of the tasks performed by the Installer, see the :ref:`"What does the Installer do?" ` section. + +| **[/Advanced]** + +.. _optional-components: + +Optional Components +++++++++++++++++++++++++++ + +``[The sections on ImageMagick, Google Analytics and Captcha have been rewritten and, hopefully, made less confusing. The Handles instructions have also been modified, but I would like to work on it some more. Namely I'd like to read their own technical manual, and see if we should provide our own version of installation instructions, similarly to what we do with some other packages; we've heard complaints from users about their manual not being very easy to follow]`` + +reCAPTCHA bot blocker +================================= + +We found that our “email us” feature can be abused to send spam +messages. You can choose to use the reCAPTCHA filter to help prevent +this. Configure the filter as follows: + +#. | Go to reCAPTCHA web site at + | `http://recaptcha.net/ `_ + | and sign up for an account. + | Register your website domain to acquire a public/private CAPTCHA key pair. + | Record this information in a secure location. +#. Insert the the public/private key pair and domain for your reCAPTCHA + account into the ``captcha`` table of the DVN PostgreSQL database. + Use ``psql``, ``pgadmin`` or any other database utility; the SQL + query will look like this: + ``INSERT INTO captcha (publickey, domainname, privatekey) VALUES ('sample', 'sample.edu', 'sample')`` +#. Verify that the Report Issue page is now showing the reCAPTCHA + challenge. + +Google Analytics +================================ + +Network Admins can use the Google Analytics tools to view Dataverse Network website usage statistics. + +Note: It takes about 24 hours for Google Analytics to start monitoring +your website after the registration. + +| +| To enable the use of Google Analytics: + +#. Go to the Google Analytics homepage at + `http://www.google.com/analytics/indexu.html `__. +#. Set up a Google Analytics account and obtain a tracking code for your Dataverse Network installation. +#. Use the Google Analytics Help Center to find how to add the tracking code to the content you serve. +#. Configure the DVN to use the tracking key (obtained in Step 2, + above), by setting | the ``dvn.googleanalytics.key`` JVM option in + Glassfish. + + This can be done by adding the following directly to the + ``domain.xml`` config file (for example: ``/usr/local/glassfish/domains/domain1/confi/domain.xml``): + ``-Ddvn.googleanalytics.key=XX-YYY`` (this will require Glassfish restart) + + Or by using the Glassfish Admin Console configuration GUI. Consult the “Glassfish Configuration” section in the Appendix. + +Once installed and activated, the usage statistics can be accessed from +the Network Options of the DVN. + +ImageMagick +======================= + +When image files are ingested into a DVN, the application +automatically creates small "thumbnail" versions to display on the +Files View page. These thumbnails are generated once, then cached for +future use. + +Normally, the standard Java image manipulation libraries are used to +do the scaling. If you have studies with large numbers of large +images, generating the thumbnails may become a time-consuming task. If +you notice that the Files view takes a long time to load for the first +time because of the images, it is possible | to improve the +performance by installing the ``ImageMagick`` package. If it is +installed, the application will automatically use its +``/usr/bin/convert`` utility to do the resizing, which appears to be +significantly faster than the Java code. + +``ImageMagick`` is available for, or even comes with most of the popular OS distributions. + + +| **** + +It is part of the full RedHat Linux distribution, although it is not +included in the default "server" configuration. It can be installed on a +RedHat server with the ``yum install ImageMagick`` command. + +**** + +Handle System +=========================== + +DVN administrators may choose to set up a `HANDLE.NET `_ server to issue and register persistent, global identifiers for their studies. The DVN app can be modified to support other naming services, but as of now it comes +pre-configured to use Handles. + +To install and set up a local HANDLE.NET server: + +#. Download HANDLE.NET. + Refer to the HANDLE.NET software download page at + `http://handle.net/download.html `__. +#. Install the server on the same host as GlassFish. + Complete the installation and setup process as described in the + HANDLE.NET Technical Manual: + `http://www.handle.net/tech_manual/Handle_Technical_Manual.pdf `__. +#. Accept the default settings during installation, **with one + exception:** do not encrypt private keys (this will make it easier to + manage the service). **Note** that this means answer 'n' when + prompted "Would you like to encrypt your private key?(y/n). [y]:" If + you accept the default 'y' and then hit return when prompted for + passphrase, this **will** encrypt the key, with a blank pass phrase! +#. During the installation you will be issued an "authority prefix". + This is an equivalent of a domain name. For example, the prefix + registered to the IQSS DVN is "1902.1". The IDs issued to IQSS + studies are of a form "1902.1/XXXX", where "XXXX" is some unique + identifier. +#. Use ``psql`` or ``pgAdmin`` to execute the following SQL command: + ``insert into handleprefix (prefix) values( '')``; +#. ``(Optional/advanced)`` If you are going to be assigning HANDLE.NET + ids in more than 1 authority prefix (to register studies harvested + from remote sources): Once you obtain the additional HANDLE.NET + prefixes, add each to the ``handleprefix`` table, using the SQL + command from step 3. +#. Use ``psql`` or ``pgAdmin`` to execute the following SQL + command: ``update vdcnetwork set handleregistration=true, authority='';`` + + + +Note: The DVN app comes bundled with the HANDLE.NET client libraries. +You do not need to install these separately. + +Twitter setup +====================== + +To set up the ability for users to enable Automatic Tweets in your +Dataverse Network: + +#. You will first need to tell twitter about you Dataverse Network Application. Go to `https://dev.twitter.com/apps `_ and login (or create a new Twitter account). +#. Click "Create a new application". +#. Fill out all the fields. For callback URL, use your Dataverse Network Home Page URL. +#. Once created, go to settings tab and set Application Type to "Read and Write". You can optionally also upload an Application + Icon and fill out Organization details (the end user will see these. +#. Click details again. You will need both the Consumer key and secret as JVM Options. Add via Glassfish console: + -Dtwitter4j.oauth.consumerKey=*** + + + -Dtwitter4j.oauth.consumerSecret=*** +#. Restart Glassfish. +#. To verify that Automatic Tweets are now properly set up, you can go to the Dataverse Network Options page or any Dataverse Options page and see that their is a new option, "Enable Twitter". + +Digital Object Identifiers +========================== + +Beginning with version 3.6, DVN will support the use of Digital Object Identifiers. Similar to the currently enabled Handle System, these DOIs will enable a permanent link to studies in a DVN network. + +DVN uses the EZID API (`www.n2t.net/ezid `__) to facilitate the creation and maintenance of DOIs. Network administrators will have to arrange to get their own account with EZID in order to implement creation of DOIs. Once an account has been set up the following settings must be made in your DVN set-up: + +Update your database with the following query: + +Use ``psql`` or ``pgAdmin`` to execute the following SQL command: +``update vdcnetwork set handleregistration=true, protocol = 'doi', authority=' where id = 0;`` + +Add the following JVM options: + +``-Ddoi.username=`` + +``-Ddoi.password=`` + +``-Ddoi.baseurlstring=https://ezid.cdlib.org`` + +Note: The DVN app comes bundled with the EZID API client libraries. You do not need to install these separately. + +Appendix ++++++++++++++++++++++++ + +.. _do-you-need-r: + +Do you need R? +========================== + +This is a more detailed explanation of the statement made earlier in the "Prerequisites" section: "Only the advanced modes of serving quantitative data to the users require R." ``[style?]`` + +In this context, by “quantitative data” we mean data sets for which +machine-readable, variable-level metadata has been defined in the DVN +database. “Subsettable data” is another frequently used term, in the +DVN parlance. The currently supported sources of subsettable data are +SPSS and STATA files, as well as row tabulated or CSV files, with +extra control cards defining the data structure and variable +metadata. (See full documentation in User Guide for :ref:`Finding and Using Data ` + +Once a “subsettable” data set is create, users can run online statistics and analysis on it. That’s where R is used. In our experience, most of the institutions who have installed the DVN did so primarily in order to share and process quantitative data. When this is the case, R must be considered a required component. But a DVN network built to serve a collection of strictly human-readable (text, image, etc.) data, R will not be necessary at all. + +.. _what-does-the-intstaller-do: + +What does the Installer do? +=================================== + +The Installer script (chapters Quick Install, Running the Installer.) automates the following tasks: + +#. Checks the system for required components; +#. Prompts the user for the following information: + + a) Location of the Glassfish directory; + + b) Access information (host, port, database name, username, password) for PostgresQL; + + c) Access information (host, port, username, password) for Rserve; + +#. Attempts to create the PostgreSQL user (role) and database, from :ref:`prerequisiste PostgreSQL setup step ` above; see the :ref:`"PostgreSQL configuration"` Appendix section for details. +#. Using the :ref:`Glassfish configuration template (section the Appendix) ` and the information collected in step 2.b. above, creates the config file domain.xml and installs it the Glassfish domain directory. +#. Copies additional configuration files (supplied in the dvninstall/config directory of the Installer package) into the config directory of the Glassfish domain. +#. Installs Glassfish Postgres driver (supplied in the dvninstall/pgdriver directory of the Installer package) into the lib directory in the Glassfish installation tree. +#. Attempts to start Glassfish. The config file at this point contains the configuration settings that the DVN will need to run (see section :ref:`Glassfish Configuration, individual settings section` of the Appendix), but otherwise it is a "virgin", fresh config. Glassfish will perform some initialization tasks on this first startup and deploy some internal apps. +#. If step 5. succeeds, the Installer attempts to deploy the DVN application (the Java archive DVN-EAR.ear supplied with the installer). +#. Stops Glassfish, populates the DVN database with the initial content (section :ref:`"PostgreSQL configuration"`" of the Appendix), starts Glassfish. +#. Attempts to establish connection to Rserve, using the access information obtained during step 2.c. If this fails, prints a warning message and points the user to the Prerequisites section of this guide where R installation is discussed. +#. Finally, prints a message informing the user that their new DVN should be up and running, provides them with the server URL and suggests that they visit it, to change the default passwords and perhaps start setting up their Dataverse Network. + +Throughout the steps above, the Installer attempts to diagnose any +potential issues and give the user clear error messages when things go +wrong ("version of Postgres too old", "you must run this as root", +etc.). + +Enough information is supplied in this manual to enable a user (a +skilled and rather patient user, we may add) to perform all the steps +above without the use of the script. + +.. _glassfish-configuration-template: + +Glassfish configuration template +==================================== + +The configuration template (``domain.xml.TEMPLATE``) is part of the +installer zip package. The installer replaces the placeholder +configuration tokens (for example, ``%POSTGRES_DATABASE%``) with the +real values provided by the user to create the Glassfish configuration +file ``domain.xml``. + +``[I was thinking of copy-and-pasting the entire template file here; +but it is 30K of XML, so I decided not to. The above explains where it +can be found, if anyone wants to look at it, for reference or +whatever]`` + +.. _glassfish-configuration-individual-settings: + +Glassfish Configuration, individual settings +===================================================== + +As explained earlier in the Appendix, the Installer configures Glassfish +by cooking a complete domain configuration file (``domain.xml``) and +installing it in the domain directory. + +All of the settings and options however can be configured individually +by an operator, using the Glassfish Admin Console. + +The Console can be accessed at the network port 4848 when Glassfish is +running, by pointing a browser at + + ``http://[your host name]:4848/`` + +and logging in as ``admin``. The initial password is ``adminadmin``. It +is of course strongly recommended to log in and change it first thing +after you run the Installer. + +The sections below describe all the configuration settings that would +need to be done through the GUI in order to replicate the configuration +file produced by the Installer. This information is provided for the +benefit of an advanced user who may want to experiment with individual +options. Or to attempt to install DVN on a platform not supported by our +installer; although we wish sincerely that nobody is driven to such +desperate measures ever. + +.. _jvm-options: + +JVM options +----------------------- + +Under Application Server->JVM Settings->JVM Options: + +If you are installing Glassfish in a production environment, follow +these steps: + +#. | Delete the following options: -Dsun.rmi.dgc.server.gcInterval=3600000 + | -Dsun.rmi.dgc.client.gcInterval=3600000 +#. | Add the following options: + | -XX:MaxPermSize=192m + | -XX:+AggressiveHeap + | -Xss128l + | -XX:+DisableExplicitGC + | -Dcom.sun.enterprise.ss.ASQuickStartup=false +#. | To install on a multi-processor machine, add the following: + | ``-XX:+UseParallelOldGC`` +#. | To enable the optional HANDLE.NET installation and provide access to + | study ID registration, add the following (see the "Handles System" + | section in the "Optional Components" for + | details): + | ``-Ddvn.handle.baseUrl=<-Dataverse Network host URL>/dvn/study?globalId=hdl:`` + | ``-Ddvn.handle.auth=`` + | ``-Ddvn.handle.admcredfile=/hs/svr_1/admpriv.bin`` +#. | To enable the optional Google Analytics option on the Network Options + | page and provide access to site usage reports, add the following (see + | the "Google Analytics" section in the "Optional Components" for + | details): + | ``-Ddvn.googleanalytics.key=`` +#. | Configure the following option only if you run multiple instances + | of the GlassFish server for load balancing. This option controls + | which GlassFish instance runs scheduled jobs, such as harvest or + | export. + | For the server instance that will run scheduled jobs, include the + | following JVM option: + | ``-Ddvn.timerServer=true`` + | For all other server instances, include this JVM option: + | ``-Ddvn.timerServer=false`` + | If you are installing Glassfish in either a production or development + | environment, follow these steps: + + - | Change the following options’ settings: + | Change ``-client`` to ``-server``. + | Change ``-Xmx512m`` to whatever size you can allot for the maximum + | Java heap space. + | Set `` –Xms512m`` to the same value to which you set ``–Xmx512m``. + - | To configure permanent file storage (data and documentation files + | uploaded to studies) set the following: + | ``-Dvdc.study.file.dir=${com.sun.aas.instanceRoot}/config/files/studies`` + - | To configure the temporary location used in file uploads add the + | following: + | ``-Dvdc.temp.file.dir=${com.sun.aas.instanceRoot}/config/files/temp`` + - | To configure export and import logs (harvesting and importing), + | add the following: + | -Dvdc.export.log.dir=${com.sun.aas.instanceRoot}/logs/export + | -Dvdc.import.log.dir=${com.sun.aas.instanceRoot}/logs/import + - | Add the following: + | -Djhove.conf.dir=${com.sun.aas.instanceRoot}/config + | -Ddvn.inetAddress= + | -Ddvn.networkData.libPath=${com.sun.aas.instanceRoot}/applications/j2ee- + | apps/DVN-EAR + - | To manage calls to RServe and the R host (analysis and file upload), add + | the following: + | ``-Dvdc.dsb.host=`` + | ``-Dvdc.dsb.rserve.user=`` + | ``-Dvdc.dsb.rserve.pwrd=`` + | ``-Dvdc.dsb.rserve.port=`` + + + | For Installing R, see: + | :ref:`R and R-Serve ` + | for information about configuring these values in the ``Rserv.conf`` + | file. + | These settings must be configured for subsetting and analysis to + | work. + - | To configure search index files set the following: + | ``-Ddvn.index.location=${com.sun.aas.instanceRoot}/config`` + - | To use the optional customized error logging and add more information + | to your log files, set the following: + | ``-Djava.util.logging.config.file= ${com.sun.aas.instanceRoot} /config/logging.properties`` + | **Note**: To customize the logging, edit the ``logging.properties`` file + - | The default size limit for file downloads is 100MB. To override this + | default add the following JVM option: + | ``-Ddvn.batchdownload.limit=`` + +EJB Container +----------------------------- + +Under Configuration->EJB Container->EJB Timer Service: + +#. | Set the Timer Datasource to the following: + | ``jdbc/VDCNetDS`` +#. | Save the configuration. + +HTTP Service +----------------------------- + +The HTTP Service configuration settings described in this section are suggested defaults. These settings are very important. There are no right values to define; the values depend on the specifics of your web traffic, how many requests you get, how long they take to process on average, and your hardware. For detailed the +| Sun Microsystems Documentation web site at the following URL: + +`http://docs.sun.com/ `_ + + +| **Note**: If your server becomes so busy that it drops connections, +| adjust the Thread Counts to improve performance. + +#. Under Configuration->HTTP Service->HTTP + Listeners->\ ``http-listener-1``: + + - Listener Port: 80 + - Acceptor Threads: The number of CPUs (cores) on your server + +#. Under Configuration->HTTP Service, in the RequestProcessing tab: + + - Thread Count: Four times the number of CPUs (cores) on your server + - Initial Thread Count: The number of CPUs (cores) + +#. Under Configuration->HTTP Service->Virtual Servers->server: add new property ``allowLinking`` with the value ``true``. + + #. | Under Configuration->HTTP Service, configure Access Logging: + + | format=%client.name% %auth-user-name% %datetime% %request% %status% + | %response.length% + | rotation-enabled=true + | rotation-interval-in-minutes=15 + | rotation-policy=time + | rotation-suffix=yyyy-MM-dd + +JavaMail Session +------------------------------------ + +Under Resources->JavaMail Sessions\ ``->mail/notifyMailSession:`` + +- | Mail Host: ```` + | **Note**: The Project recommends that you install a mail server on the same machine as GlassFish and use ``localhost`` for this entry. Since email notification is used for workflow events such as creating a dataverse or study, these functions may not work properly if a valid mail server is not configured. +- Default User: ``dataversenotify`` + This does not need to be a real mail account. +- Default Return Address: ``do-not-reply@`` + +JDBC Resources +------------------------------------ + +**Under Resources->JDBC->Connection Pools:** + + +| Add a new Connection Pool entry: + +- entryName: ``dvnDbPool`` +- Resource Type: ``javax.sql.DataSource`` +- Database Vendor: ``PostgreSQL`` +- DataSource ClassName: ``org.postgresql.ds.PGPoolingDataSource`` +- Additional Properties: + + - ConnectionAttributes: ``;create=true`` + - User: ``dvnApp`` + - PortNumber: ``5432`` (Port 5432 is the PostgreSQL default port.) + - Password: ```` + - DatabaseName: ```` + - ServerName: ```` + - JDBC30DataSource: ``true`` + +| + +**Under Resources->JDBC->JDBC Resources:** + +| Add a new JDBC Resources entry: + +- JNDI Name: ``jdbc/VDCNetDS`` +- Pool Name: ``dvnDbPool`` + +JMS Resources +----------------------------------------- + +Under Resources->JMS Resources: + +#. Add a new Connection Factory for the DSB Queue: + + - JNDI Name: ``jms/DSBQueueConnectionFactory`` + - Resource Type: ``javax.jms.QueueConnectionFactory`` + +#. Add a new Connection Factory for the Index Message: + + - JNDI Name: ``jms/IndexMessageFactory`` + - Resource Type: ``javax.jms.QueueConnectionFactory`` + +#. Add a new Destination Resource for the DSB Queue: + + - JNDI Name: ``jms/DSBIngest`` + - Physical Destination Name: ``DSBIngest`` + - Resource Type: ``javax.jms.Queue`` + +#. Add a new Destination Resource for the Index Message: + + - JNDI Name: ``jms/IndexMessage`` + - Physical Destination Name: ``IndexMessage`` + - Resource Type: ``javax.jms.Queue`` + +.. _postgresql-setup: + +PostgreSQL setup +======================= + +The following actions are normally performed by the automated installer +script. These steps are explained here for reference, and/or in case +your need to perform them manually: + +1. Start as root, then change to user postgres: + + ``su postgres`` + + Create DVN database usert (role): + + ``createuser -SrdPE [DB_USERNAME]`` + + (you will be prompted to choose a user password). + + Create DVN database: + + ``createdb [DB_NAME] --owner=[DB_USERNAME]`` + + ``[DB_NAME]`` and ``[USER_NAME]`` are the names you choose for your DVN database and database user. These, together with the password you have assigned, will be used in the Glassfish configuration so that the application can talk to the database. + +2. Before Glassfish can be configured for the DVN app, the Postgres driver needs to be installed in the /lib directory. We supply a version of the driver known to work with the DVN in the dvninstall/pgdriver directory of the Installer bundle. (This is the :ref:`"What does the Installer do?" ` section of this appendix) An example of the installed location of the driver: + + ``/usr/local/glassfish/lib/postgresql-8.3-603.jdbc4.jar`` + +3. Finally, after the DVN application is deployed under Glassfish for the first time, the database needs to be populated with the initial content: + + ``su postgres`` + ``psql -d [DB_NAME] -f referenceData.sql`` + + The file referenceData.sql is provided as part of the installer zip package. + +RedHat startup file for glassfish, example +==================================================== + +Below is an example of a glassfish startup file that you may want to +install on your RedHat (or similar) system to have glassfish start +automatically on boot. + +| Install the file as ``/etc/init.d/glassfish``, then run ``chkconfig glassfish on`` + +Note that the extra configuration steps before the domain start line, +for increasing the file limit and allowing "memory overcommit". These +are useful settings to have on a production server. + +| You may of course add extra custom configuration specific to your + setup. + +.. code-block:: guess + + #! /bin/sh + # chkconfig: 2345 99 01 + # description: GlassFish App Server + set -e + ASADMIN=/usr/local/glassfish/bin/asadmin + case "$1" in + start) + echo -n "Starting GlassFish server: glassfish" + # Increase file descriptor limit: + ulimit -n 32768 + # Allow "memory overcommit": + # (basically, this allows to run exec() calls from inside the + # app, without the Unix fork() call physically hogging 2X + # the amount of memory glassfish is already using) + echo 1 > /proc/sys/vm/overcommit_memory + $ASADMIN start-domain domain1 echo "." + ;; + stop) + echo -n "Stopping GlassFish server: glassfish" + $ASADMIN stop-domain domain1 + echo "." + ;; + *) + echo "Usage: /etc/init.d/glassfish {start|stop}" + + exit 1 + esac + exit 0 + + +Enabling secure remote access to Asadmin +======================================== + +As was mentioned in the Glassfish section of the manual, in version +3.1.2 admin interface (asadmin) is configured to be accessible on the +localhost interface only. If you need to be able to access the admin +console remotely, you will have to enable secure access to it. (It will +be accessible over https only, at ``https://:4848``; connections +to ``http://:4848`` will be automatically redirected to the https +interface) + +The following must be done as root: + +#. First you need to configure the admin password: + + ``/glassfish3/bin/asadmin change-admin-password`` + + (since you didn't create one when you were installing Glassfish, leave the "current password" blank, i.e., hit ENTER) + +#. Enable the secure access: + + ``/glassfish3/bin/asadmin enable-secure-admin`` + + (Note that you will need to restart Glassfish after step 2. above) + +.. _using-lockss-with-dvn: + +Using LOCKSS with DVN +======================================= + +DVN holdings can be crawled by LOCKSS servers (`www.lockss.org `__). It is made possible by the special plugin developed and maintained by the DVN project, which a LOCKSS daemon utilizes to crawl and access materials served by a Dataverse network. + +The current stable version of the plugin is available at the following location: + +`http://lockss.hmdc.harvard.edu/lockss/plugin/DVNOAIPlugin.jar `__ + + +As of January 2013 and DVN version 3.3, the plugin is compatible with the LOCKSS daemon version 1.55. The plugin sources can be found in the main DVN source tree in `https://dvn.svn.sourceforge.net/svnroot/dvn/dvn-app/trunk/src/DVN-lockss `_ (please note that the DVN project is currently **in the process of moving to gitHub!** The preserved copy of the 3.3 source will be left at the URL above, together with the information on the current location of the source repository). + +In order to crawl a DVN, the following steps need to be performed: + +#. Point your LOCKSS daemon to the plugin repository above. (Refer to the LOCKSS documentation for details); +#. Create a LOCKSS Archival Unit for your target DVN: + + In the LOCKSS Admin Console, go to **Journal Configuration** -> **Manual Add/Edit** and click on **Add Archival Unit**. + + On the next form, select **DVNOAI** in the pull down menu under **Choose a publisher plugin** and click **Continue**. + + Next configure the parameters that define your DVN Archival Unit. LOCKSS daemon can be configured to crawl either the entire holdings of a DVN (no OAI set specified), or a select Dataverse. + +Note that LOCKSS crawling must be authorized on the DVN side. Refer to +the :ref:`"Edit LOCKSS Settings" ` +section of the DVN Network Administrator Guide for the instructions on +enabling LOCKSS crawling on the network level, and/or to the +:ref:`Enabling LOCKSS access to the Dataverse ` +of the Dataverse Administration Guide. Once you allow LOCKSS crawling of +your Dataverse(s), you will need to enter the URL of the "LOCKSS +Manifest" page provided by the DVN in the configuration above. For the +network-wide archival unit this URL will be +``http``\ ``:///dvn/faces/ManifestPage.xhtml``; for an +individual dataverse it is +``http``\ ``:///dvn/dv//faces/ManifestPage.xhtml.`` + +| The URL of the DVN OAI server is ``http``\ ``:///dvn/OAIHandler``. + +Read Only Mode +=================== + +A Read Only Mode has been established in DVN to allow the application to remain available while deploying new versions or patches. Users will be able to view data and metadata, but will not be able to add or edit anything. Currently there is no way to switch to Read Only Mode through the application. +In order to change the application mode you must apply the following queries through ``psql`` or ``pgAdmin``: + +To set to Read Only Mode: + + | ``BEGIN;`` + | ``SET TRANSACTION READ WRITE;`` + | ``-- Note database and user strings may have to be modified for your particular installation;`` + | ``-- You may also customize the status notice which will appear on all pages of the application;`` + | ``update vdcnetwork set statusnotice = "This network is currently in Read Only state. No saving of data will be allowed.";`` + | ``ALTER DATABASE "dvnDb" set default_transaction_read_only=on;`` + | ``Alter user "dvnApp" set default_transaction_read_only=on;`` + | ``update vdcnetwork set statusnotice = "";`` + | ``END;`` + +To return to regular service: + + | ``BEGIN;`` + | ``SET TRANSACTION READ WRITE;`` + | ``-- Note database and user strings may have to be modified for your particular installation;`` + | ``ALTER DATABASE "dvnDb" set default_transaction_read_only=off;`` + | ``Alter user "dvnApp" set default_transaction_read_only=off;`` + | ``update vdcnetwork set statusnotice = "";`` + | ``END;`` + +Backup and Restore +================================ + +**Backup** + +| The PostgreSQL database and study files (contained within the Glassfish directory by default but this is :ref:`configurable via JVM options `) are the most critical components to back up. The use of standard PostgreSQL tools (i.e. pg\_dump) is recommended. + +Glassfish configuration files (i.e. domain.xml, robots.txt) and local +customizations (i.e. images in the docroot) should be backed up as well. +In practice, it is best to simply back up the entire Glassfish directory +as other files such as logs may be of interest. + +| **Restore** + +Restoring DVN consists of restoring the PostgreSQL database and the +Glassfish directory. diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_sources/dataverse-user-main.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/doc/guides/_sources/dataverse-user-main.txt Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,4744 @@ +==================================== +User Guide +==================================== + +Common Tasks +++++++++++++++++++ + +Here is a list of the most common ways people use the Dataverse Network. +Activities can be grouped into finding and using data or publishing +data. A brief description of each activity follows with more detailed +information available in the Users Guide. + +Finding Data +=============== + +Visitors to the site can browse dataverses looking for data of +interest or they can search by keywords. There are Basic and Advanced +Searches. + +**Browsing the Site** + + +The Network Homepage presents a list of recently released dataverses on the left side of the page. +A dataverse is a container for studies that can be managed as a group by the dataverse administrator. +Most often a dataverse represents a single organization or scholar and so their studies are often related. +On the right side of the page there are lists of both recently released studies and studies that have been +downloaded most often. At the bottom of these lists, the View More link brings the user to a complete list +of released dataverses or studies as applicable. The home page also includes a scrolling list of datverse +collections called subnetworks, if applicable. + +Clicking on the name of a dataverse, study or subnetwork displays its home page. + +**Browsing Dataverses** + +If you click the View More link under the recently released dataverse list on the Network Homepage you'll be brought to +the Browse Dataverses page. Here you can sort the dataverses by Name, Affiliation, Release Date and Download Count. You +may also filter the dataverses by typing a filter term in the "filter" text box. The filter will only display those +dataverses whose name or affiliation matches the filter term. Clicking on the name of a dataverse displays its home page. + +**Search** + +For many purposes, Basic Search is sufficient. On the center top of the network homepage enter keywords or +complete sentences and click **Search**. A resulting list of studies is +displayed. Further refinement can be made by clicking facets such as +"Original Dataverse" or "Author" under "Refine Results" on the left side +of the page. After a facet has been clicked, it will appear at the top +of the page under "Search Results for" and clicking the selected facet +will remove it, restoring the previous results. In addition to the +network homepage, Basic Search can be found on the upper right of the +dataverse home pages as well as on the search results and Advanced +Search pages. Be aware that searching from a dataverse limits the scope +of search to studies within that dataverse while searching from the +network home page searches all released studies. + +When a more specific search is needed, use Advanced Search. Advanced +Search allows searching on keywords found in specific cataloging +information fields, in particular collections in a dataverse where +available, or by variable name. The link to Advanced Search is next to +the Basic Search feature on the network and dataverse home pages and the +search results page. + +Using Data +============== + +Data in the Dataverse Network is stored in files. Files of any +type are allowed but some types of tabular and network data files are +supported by additional functionality, including downloading in +different formats, downloading subsets of variables, and analytical +tools. + +**Download Files** + +To download files, click on a study of interest, then select the +data tab. Individual files can be downloaded or groups of files by +checking files of interest or entire file categories and clicking +Download All Selected Files. Groups of files are packaged into a single +``.zip`` file. Group downloads have a download size limit and any selected +files not downloaded will be indicated in the ``.zip`` file. + +Downloading individual files in an alternate format where available is +straightforward. Choose the format from the Download As select box next +to the file and the file will download. + +**Subset or Analyze Files** + +Tabular and Network data files of recognized formats (Stata, SPSS, RData, +Graphml) can be further manipulated through downloading subsets of +variables and by performing various statistical analyses. Where +available these options appear as an additional link, Access +Subset/Analysis, below the Download As format select box next to each +file. The functionality is quite different for tabular versus network +data files so refer to the Users Guide for additional information. + +Publishing Data +==================== + +Publishing data through the Dataverse Network is straightforward: +create an account and a place to store your data, organize your data, +upload files, and release your data for public access. + +**Create a Dataverse and Account** + +The first step to publishing your data is to create a place to +store it that can be managed by you. To do this you need an account. +Create a dataverse and account by clicking on the Create a Dataverse +link on the upper right side of the network homepage. This leads you +through a series of steps at the end of which you will have a dataverse +and user account to manage it. + +Newly created dataverses are unreleased and not available for +browsing. Make note of the link to your dataverse at the end of the +process so you can return to it until it becomes released. Another way +to access your unreleased dataverse is to log in, click on your user +name in the upper right of the page, dataverses tab, then the name of +your dataverse. + +**Create Studies** + +Once you have a user account and a place to store your data, you +need to take the first step toward organizing your data into studies. +Many data have been or will be used to publish a study so this step may +be clear. If not, a study should represent a particular thesis or +inquiry with accompanying data. First, log in with your new user account +and navigate to your dataverse home page. Next, click Options in the +upper right of the page. From there click Create a Study and complete +the form. Most of the fields on the study form are optional -only the +title is required. If you are unsure of what these values should be, +enter a title and these fields can be completed later before releasing +the study. + +Be aware that a newly created study is unreleased and not available +for browsing. To access an unreleased study for further editing, click +on Options->Manage Studies and click on your study's name. You can also +click on your username, studies tab, then the study name. + +**Upload Files** + + +Now that you have a place to store and manage your data and a +study to associate it with, you can upload your data and documentation +files. Files are uploaded to a study. Navigate to the study you want to +upload particular files to and click on Add Files on the upper right +side of the page. The add files page requires you to first select a file +type, then browse for the file on your local system. Some file types +undergo additional processing to support extended functionality but if +you are unsure which type to choose, select Other. At this time you can +enter a descriptive Category which can be used to group related files +and a file description. If you are unsure of these values they can be +added later. + +Though files are selected individually, several files can be added +to this page at one time. It is recommended to upload only a few files +at a time since this can take some time to complete, depending on file +type. + +An alternative to selecting files individually is to first create an +archive of files in ``.zip`` or ``.tar`` format and then select the +appropriate "multiple files" Data Type when uploading your archive. The +zip file or tarball will be unpacked so that the individual files will +be added to the page. + +If you upload an SPSS (``.por``, ``.sav``), Stata (``.dta``) or R +(``.RData``) file, your study will be temporarily unavailable for +editing until the additional processing on the file is completed. This +can be brief or take some time depending on the size and complexity of +the file. A message at the top of the file indicates it is unavailable +for editing and an email will be sent when finished to the address you +indicate on the add files page. + +**Release Studies** + +Once your study is in a state where it's ready to be published or +shared with others, it should be released. This is done either by +clicking Release on the upper right of the study page or by navigating +to your dataverse, clicking Options, Manage Studies, then clicking +release next to the study you want released. Note that releasing a study +fixes the version number. Additional changes to the study will create a +new draft version. The draft can be repeatedly edited without changing +the version number until it is released. At this point your study is +visible within your dataverse. If your dataverse is also released it +will be searchable and viewable by others. If your dataverse is not yet +released, it will only be visible to people with access to your +dataverse. + +**Release Dataverse** + +Releasing a dataverse makes it appear in the list of dataverses on +the network home page and makes it viewable by others. This may require +adding a study or other details to your dataverse depending on site +policy. By default, releasing a dataverse requires nothing but changing +the Dataverse Release Settings to Released on the Manage Permissions +page. To release your dataverse, navigate to the dataverse home page, +choose Options from the upper right of the page, click on Dataverse +Settings, then Manage Permissions. At the top of the page, change +Dataverse Release Settiings to Released and click Save Changes. + +Any studies that are released are now visible to others. Those +that are unreleased do not appear in the list of studies on the +dataverse home page. + +At this point you have published one or more studies and their data and +made them available for browsing or searching. + +Things to Consider, Next Steps +=============================== + +The above tasks are fundamental activities and may be all that is +needed for most users. Some situations are more complex and require +additional consideration. These include publishing and organizing data +for large organizations, shared research between scholars, and enabling +contributions by a geographically diverse team while keeping data +private until ready for publication. + +For **large organizations**, a single dataverse may suffice. Collections +within a dataverse can further organize studies by sub unit or topic. +The dataverse itself can be **customized** with the organizations own +website header and footer. In some cases, sub units or organizations +want to maintain their own distinct branding. In such cases each can +create and maintain their own dataverse and the parent dataverse can +link to their studies through a link collection. + +For **shared research**, the model is similar: a single dataverse based +on the research project can be created to which both researchers have +administration rights. Additionally, researchers can maintain their own +dataverses for other work and link back to the studies in the shared +project dataverse. + +**Allowing a diverse team to contribute** to an unreleased dataverse is +simply a matter of granting the appropriate level of **permissions** to +each team member. At minimum, each team member would need to be added as +a contributor to the dataverse. By default, they can only contribute to +studies they themselves have created. However, this can be expanded from +the dataverse Manage Permissions page to allow contributors to edit all +studies in the dataverse. Changes made by contributors need to be +approved by a curator or admin before a study can be released. + +How the Guides Are Organized +============================ + +The guides are reference documents that explain how to use +the Dataverse Network functionality: Installers Guide, Developers Guide, APIs Guide, and Users +Guide. The Users Guide is further divided into primary activities: using +data, creating studies, administering dataverses or the network. Details +on all of the above tasks can be found in the Users Guide. The +Installers Guide is for people or organizations who want to host their +own Dataverse Network. The Developers Guide contains instructions for +people who want to contribute to the Open Source Dataverse Network +project or who want to modify the code to suit their own needs. Finally, the +APIs Guide is for people who would like to use our APIs in order to build apps that +can work with the Dataverse Network web application. This `page `__ lists some current apps +which have been developed with our APIs. + +Other Resources +========================= + +**Dataverse Network Project Site** + + +Additional information about the Dataverse Network project itself +including presentations, information about upcoming releases, data +management and citation, and announcements can be found at +`http://thedata.org `__ + +**User Group** + +As the user community grows we encourage people to shares ideas, ask +questions, or offer suggestions for improvement. Go to +`https://groups.google.com/group/dataverse-community `__ to register to our dataverse community group. + +**Follow Us on Twitter** + +For up to date news, information and developments, follow our twitter account: `https://twitter.com/thedataorg `__ + +**Support** + +We maintain an email based support service that's free of charge. We +attempt to respond within one business day to all questions and if it +cannot be resolved immediately, we'll let you know what to expect. + +Contact Us +======================= + +The support email address is +`support@thedata.org `__. + +This is the same address as the Report Issue link. We try to respond +within one business day. + +.. _finding-and-using-data: + +Finding and Using Data ++++++++++++++++++++++++ + +Ends users, without need to login to the Dataverse Network, can browse +dataverses, search studies, view study description and data files for +public studies, and subset, analyze and visualize data for public data +files. If entire studies or individual data files are restricted, end +users need to be given permission from the dataverse administrator to +access the data. + + +Search +======= + +To find a study or data set, you can search or browse studies offered +in any released dataverse on the Network homepage. Each dataverse offers +a hierarchical organization comprising one or more collections of data +sets with a particular theme. Most dataverses allow you to search for +data within their files, or you can start browsing through the dataverse +classifications that are closest to your substantive interests. + +**Browse Collections** + +You can browse all public dataverses from the Network homepage. Click +the title of a dataverse to browse that dataverse's collections and +studies. Click the title of a collection to view a list of studies and +subcollections for that selection. Click the title of a study to view +the Cataloging Information and study files for that selection. + +When you select a dataverse to view its contents, the homepage opens to +the \ *root collection*, and the dataverse's studies are displayed +directly under the root collection name. If the root collection contains +other collections, then those collections are listed and not the studies +within them. You must select a collection title to view the studies +contained within it. + +Note: If a dataverse includes links to collections from another +dataverse and the root collection does not contain other collections, +the homepage opens to a list of the root and linked collections. + +**Search - Basic** + +You can search for studies across the entire Dataverse Network from the +Network homepage, or search within a dataverse from the dataverse +homepage. When you search across the Network, studies from restricted +dataverses are not included in the search. Restricted studies are +included in search results, and a lock icon appears beside those studies +in the results list. After your search is complete, you can further +narrow your list of data by searching again in the results. See Search +Tips for search examples and guidelines. + +When you enter more than one term in the search text field, the results +list contains studies that have these terms near each other within the +study fields searched. For example, if you enter ``United Nations``, +the results include studies where the words *United* and *Nations* are +separated by no more than four words in the same study field, such as +abstract or title. + +It supports a search in any field of the studies' Cataloging +Information, which includes citation information, abstract and other +scope-related information, methodology, and Terms of Use. In addition, +file descriptions also are searched. + +**Search - Advanced** + +In an advanced search, you can refine your criteria by choosing which +Cataloging Information fields to search. You also can apply logic to the +field search. For text fields, you can specify that the field searched +either *contains* or *does not contain\ the text that you enter. For +date fields, you can specify that the field searched is either *later +than* nor *earlier than* the date that you enter. Refer to +the `Documentation `__ page for +the latest version at the Lucene website and look for *Query Syntax* for full details. + +To perform an advanced search, click the Advanced Search link at the +top-right of the Search panel. You can search the following study +metadata fields by using the Search Scope drop-down list: + +- Title - Title field of studies' Cataloging Information. +- Author - Author fields of studies' Cataloging Information. +- (Study) Global ID - ID assigned to studies. +- Other ID - A different ID previously given to the study by another + archive. +- Abstract - Any words in the abstract of the study. +- Keyword - A term that defines the nature or scope of a study. For + example, ``elections``. +- Keyword Vocabulary - Reference to the standard used to define the + keywords. +- Topic Classification - One or more words that help to categorize the + study. +- Topic Classification Vocabulary - Reference used to define the Topic + Classifications. +- Producer - Institution, group, or person who produced the study. +- Distributor - Institution that is responsible for distributing the + study. +- Funding Agency - Agency that funded the study. +- Production Date - Date on which the study was created or completed. +- Distribution Date - Date on which the study was distributed to the + public. +- Date of Deposit - Date on which the study was uploaded to the + Network. +- Time Period Cover Start - The beginning of the period covered by the + study. +- Time Period Cover End - The end of the period covered by the study. +- Country/Nation - The country or countries where the study took place. +- Geographic Coverage - The geographical area covered by the study. For + example, ``North America``. +- Geographic Unit - The smallest geographic unit in which the study + took place, such as ``state``. +- Universe - Universe of interest, population of interest, or target + population. +- Kind of Data - The type of data included in the file, such + as ``survey data``, ``census/enumeration data``, + or ``aggregate data``. +- Variable Information - The variable name and description in the + studies' data files, given that the data file is subsettable and + contains tabular data. It returns the studies that contain the file + and the variable name where the search term was found. + +**Sort Results** + +When your search is complete, the results page lists studies that met +the search criteria in order of relevance. For example, a study that +includes your search term within the Cataloging Information in ten +places appears before a study that includes your search term in the +Cataloging Information in only one place. + +You can sort search results by title, study ID, last updated, or number +of downloads (that is, the number of times users downloaded any file +belonging to that study). Click the Sort By drop-down list to choose +your sort order. + +**Search Tips** + +Use the following guidelines to search effectively within a Network or a +dataverse: + +- The default search syntax uses ``AND`` logic within individual + fields. That is, if you enter more than one term, the search engine + looks for all terms within a single field, such as title or abstract. + For example, if you enter ``United Nations report``, the results + list any studies that include the terms *United*, *Nations*, + and *report* within a single metadata field. +- The search logic looks for multiple terms within a specific proximity + to one another, and in the same field. The current proximity criteria + is four words. That is, if you enter two search terms, both terms + must be within four words of each other in the same field to be + returned as a result. + For example, you might enter ``10 year`` in a basic search. If a + study includes the string *10 millions deaths per year* within a + metadata field, such as abstract, that study is not included in the + search results. A study that contains the string *10 per year* within the abstract field is included in the search results. +- During the index process that supports searches, periods are removed + in strings and each term between periods is indexed individually. If + you perform a basic search for a term that contains one or more + periods, the search works because the analyzer applies + the *AND* logic. If you search on a specific field, though, note + that you should specify individually each component of the string + between periods to return your results. +- You can enter one term in the search field, and then search within + those results for another term to narrow the results further. This + might be more effective than searching for both terms at one time, if + those terms do not meet the proximity and field limits specified + previously. + You could search first for an author's name, and then search those + results for a specific term in the title. If you try searching for + both terms in the author and title fields together, you might not + find the study for which you are looking. + For example, you can search the Harvard Dataverse Network for the + following study: + + *Gary King; Will Lowe, 2003, "10 Million International Dyadic + Events", hdl:1902.1/FYXLAWZRIA UNF:3:um06qkr/1tAwpS4roUqAiw== + Murray Research Archive [Distributor]* + + If you type ``King, 10 Million`` in the Search field and click + Search, you see ``0 matches were found`` in the Results field. If + you type ``10`` in the Search field and click Search, you see + something like ``1621 matches were found`` in the Results field. + But if you first type ``King`` in the Search field and click + Search, then type ``10 Million`` in the Search field and click + Search again, you see something like ``4 matches were found`` in the + Results field. + + +View Studies / Download Data +============================ + +**Cataloging Information** + +When a study is created, a set of *metadata* is associated with that +study. This metadata is called the *Cataloging Information* for the +study. When you select a study to view it, you first see the Cataloging +Information tab listing the metadata associated with that study. This is +the default view of a study. + +Cataloging Information contains numerous fields that help to describe +the study. The amount of information you find for each study varies, +based on what was entered by the author (Contributor) or Curator of that +study. For example, one study might display the distributor, related +material, and geographic coverage. Another study might display only the +authors and the abstract. Every study includes the *Citation Information* fields in the Cataloging Information. + +Note: A comprehensive list of all Cataloging Information fields is +provided in the :ref:`List of Metadata References ` + +Cataloging Information is divided into four sections. These sections and +their details are displayed only when the author (Contributor) or +Curator provides the information when creating the study. Sections +consist of the following: + +- Citation Information - These fields comprise + the `citation `__ for the study, + consisting of a global identifier for all studies and a UNF, or + Universal Numerical Fingerprint, for studies that contain subsettable + data files. It also can include information about authors, producers + and distributors, and references to related studies or papers. +- Abstract and Scope - This section describes the research study, lists + the study's data sets, and defines the study's geographical scope. +- Data Collection/Methodology - This section includes the technical + details of how the author obtained the data. +- Terms of Use - This information explains that the study requires + users to accept a set of conditions or agreements before downloading + or analyzing the data. If any *Terms of Use* text is displayed in + the Cataloging Information section, you are prompted to accept the + conditions when you click the download or analyze icons in the Files + page. + Note: A study might not contain Terms of Use, but in some cases the + original parent dataverse might have set conditions for all studies + owned by that dataverse. In that case, the conditions are inherited + by the study and you must accept these conditions before downloading + files or analyzing the data. + +Study metadata can be downloaded in XML format using a link at the bottom +of the study Cataloging Information tab: `DDI (without variables) `__ +/ `DDI (full) `__. +These links appear for released studies whose metadata has been exported. +Studies are typically exported on a daily basis. + +**List of Study Files** + +When you view a study, click the Documentation, Data and Analysis tab to +view a list of all electronic files associated with the study that were +provided by the author or Curator. + +A study might contain documentation, data, or other files. When the +study contributor uploads data files of the type ``.dta``, ``.sav``, or ``.por`` to the Network, those files are converted +to ``.tab`` tab-delimited files. These ``.tab`` files +are subsettable, and can be subsetted and analyzed online by using the Dataverse Network +application. + +Data files of the type ``.xml`` also are considered to be subsettable, +and can be subsetted and analyzed to a minimal degree online. +An ``.xml`` type file indicates social network data that complies with +the `GraphML `__ file format. + +You can identify a subsettable data file by the *Subsetting* label and +the number of cases and variables listed next to the file name. Other +files that also contain data might be associated with a study, but the +Dataverse Network application does not recognize them as data (or +subsettable) files. + +**Download Study Files** + +You can download any of the following within a study: + +- All or selected data files within a *study* or a *category* (type + of files) +- Individual *data files* +- Individual subsets within a data file (see :ref:`Subset and Analyze + Tabular Data Sets ` + or :ref:`Subset and Analyze Network Data Sets ` for details) + +The default format for subsettable tabular data file downloads +is *tab-delimited*. When you download one or more subsettable files in +tab-delimited format, the file contains a header row. When you download +one subsettable file, you can select from the following formats in +addition to tab-delimited: + +- Original file +- Splus +- Stata +- R + +The default format for subsettable network data file downloads +is *Original file*. In addition, you can choose to download network +data files in *GraphML* format. + +If you select any other format for a tabular data file, the file is +downloaded in a zipped archive. You must unzip the archive to view or +use the individual data file. + +If you download all or a selection of data files within a study, the +files are downloaded in a zipped archive, and the individual files are +in tab-delimited or network format. You must unzip the archive to view +or use the individual data files. + +Note: Studies and data files often have user restrictions applied. If +prompted to accept Terms of Use for a study or file, check the *I Accept* box and then click the Continue button to view or download the +file. + +**User Comments** + +If the User Comment feature is enabled within a dataverse, users are +able to add comments about a study within that dataverse. + +When you view a study, click the User Comments tab to view all comments +associated with the study. Comments can be monitored and abuse reported +to the Network admin, who has permission to remove any comments deemed +inappropriate. Note that the dataverse admin does not have permission to +remove comments, to prevent bias. + +If you choose, you also can add your own comments to a study from the +User Comments tab. See :ref:`Comment on Studies or Data ` for +detailed information. + +Note: To add a comment to a study, you must register and create an +account in the dataverse that owns the study about which you choose to +comment. This helps to prevent abuse and SPAM issues. + +**Versions** + +Upon creating a study, a version is created. This is a way to archive +the *metadata* and *data files* associated with the study citation +or UNF. + +**View Citations** + +You can view a formatted citation for any of the following entities +within the Dataverse Network application: + +- Studies - For every study, you can view a citation for that study. + Go to the Cataloging Information tab for a study and view the *How + to Cite* field. +- Data sets - For any data set, you can view a citation for that set. + Go to the Documentation, Data and Analysis tab for a study to see the + list of study files. To view the citation for any data set click + the *View Data Citation* link associated with that subsettable + file. +- Data subsets - If you subset and analyze a data set, you can view a + citation for each subset. + See :ref:`Apply Descriptive Statistics ` or :ref:`Perform Advanced Analysis ` for + detailed information. + Also, when you download a workspace file, a copy + of the citation information for that subset is provided in the + download. + +Note: For individual variables within a subsettable data subset, you can +view the `UNF `__ for that variable. +This is not a full citation for the variable, but it is one component of +that citation. Note also that this does not apply to ``.xml`` data. + +Subset and Analysis +==================== + +Subsetting and analysis can be performed on tabular and network data +files. Refer to the appropriate section for more details. + +.. _tabular-data: + +Tabular Data +-------------- + +Tabular data files (subsettable files) can be subsetted and analyzed +online by using the Dataverse Network application. For analysis, the +Dataverse Network offers a user interface to Zelig, a powerful, R-based +statistical computing tool. A comprehensive set of Statistical Analysis +Models are provided. + +After you find the tablular data set that you want, access the Subset +and Analysis options to use the online tools. Then, you can *subset +data by variables or observations*, translate it into a convenient +format, download subsets, and apply statistics and analysis. + +Network data files (also subsettable) can be subsetted online, and then +downloaded as a subset. Note that network data files cannot be analyzed +online. + +Review the Tabular Data Subset and Recode Tips before you start. + +**Access Subset and Analysis Options** + +You can subset and analyze tabular data files before you download the +file or your subsets. + +To access the Subset and Analysis options for a data set: + +#. Click the title of the study from which you choose to analyze or + download a file or subset. +#. Click the Documentation, Data and Analysis tab for the study. +#. In the list of study files, locate the data file that you choose to + download, subset, or analyze. + You can download data sets for a file only if the file entry includes + the subset icon. +#. Click the *Access Subset/Analysis* link associated with the + selected file. + If prompted, check the *I accept* box and click Continue to accept + the Terms of Use. + You see the Data File page listing data for the file that you choose + to subset or analyze. + +**View Variable Quick Summary** + +When a subsettable data file is uploaded for a study, the Dataverse +Network code calculates summary statistics for each variable within that +data file. On any tab of the Data File page, you can view the summary +statistics for each variable in the data file. Information listed +comprises the following: + +- For continuous variables, the application calculates summary + statistics that are listed in the DDI schema. +- For discrete variables, the application tabulates values and their + labels as a frequency table. + Note, however, that if the number of categories is more than 50, the + values are not tabulated. +- The UNF value for each variable is included. + +To view summary statistics for a variable: + +#. In the Data File page, click any tab. +#. In the variable list on the bottom of the page, the right column is + labeled *Quick Summary*. + locate a variable for which you choose to view summary statistics. + Then, click the Quick Summary icon for that variable to toggle the + statistic's information on and off. + You see a small chart that lists information about that variable. The + information provided depends upon the variable selected. + +**Download Tabular Subsets** + +You can download a subset of variables within a tabular-data study file. +You also can recode a subset of those variables and download the recoded +subset, if you choose. + +To download a subset of variables in tabular data: + +#. In the Data File page, click the Download Subset tab. +#. Click the radio button for the appropriate File Format in which to + download the variables: Text, R Data, S plus, or Stata. +#. On the right side of the tab, use the Show drop-down list to select + the quantities of variables to list at one time: 10, 20, 50, or All. +#. Scroll down the screen and click the check boxes to select variables + from the table of available values. When you select a variable, it is + added to the Selected Variables box at the top of the tab. + To remove a variable from this box, deselect it from the Variable + Type list at the bottom of the screen. + To select all variables, click the check box beside the column name, + Variable Type. +#. Click the *Create Zip File* button. + The *Create Zip File* button label changes the following + format: ``zipFile_.zip``. +#. Click the ``zipFile_.zip`` button and follow your browser's + prompts to open or save the data file to your computer's disk drive + +.. _apply-descriptive-statistics: + +**Apply Descriptive Statistics** + +When you run descriptive statistics for data, you can do any of the +following with the analysis results: + +- Open the results in a new window to save or print the results. +- Download the R workspace in which the statistics were analyzed, for + replication of the analysis. See Replicate Analysis for more + information. +- View citation information for the data analyzed, and for the full + data set from which you selected variables to analyze. See View + Citations for more information. + +To apply descriptive statistics to a data set or subset: + +#. In the Data File page, click the Descriptive Statistics tab. +#. Click one or both of the Descriptive Statistics options: Univariate + Numeric Summaries and Univariate Graphic Summaries. +#. On the right side of the tab, use the Show drop-down list to select + one of the following options to show variables in predefined + quantities: 10, 20, 50, or All. +#. Scroll down the screen and click the check boxes to select variables + from the table of available values. When you select a variable, it is + added to the Selected Variables box at the top of the tab. + To remove a variable from this box, deselect it from the Variable + Type list at the bottom of the screen. + To select all variables, click the check box beside the column name, + Variable Type. +#. Click the Run Statistics button. + You see the Dataverse Analysis page. +#. To save or print the results, scroll to the Descriptive Statistics + section and click the link *Open results in a new window*. You then + can print or save the window contents. + To save the analysis, scroll to the Replication section and click the + button *zipFile_.zip*. + Review the Citation Information for the data set and for the subset + that you analyzed. +#. Click the link *Back to Analysis and Subsetting* to return the + previous page and continue analysis of the data. + +**Recode and Case-Subset Tabular Data** + +Review the Tabular Data Recode and Subset Tips before you start work +with a study's files. + +To recode and subset variables within a tabular data set: + +#. In the Data File page, click the Recode and Case-Subsetting tab. +#. One the right side of the variable list, use the Show drop-down list + and select one of the following options to show variables in + predefined quantities: 10, 20, 50, or All. +#. Scroll down the screen and click the check boxes to select variables + from the table of available values. When you select a variable, it is + added to the Selected Variables box at the top of the tab. + To remove a variable from this box, deselect it from the Variable + Type list at the bottom of the screen. + To select all variables, click the check box beside the column name, + Variable Type. +#. Select one variable in the Selected Variables box, and then + click *Start*. + The existing name and label of the variable appear in the New + Variable Name and New Variable Label boxes. +#. In the New Variable Label field, change the variable name to a unique + value that is not used in the data file. + The new variable label is optional. +#. In the table below the Variable Name fields, you can check one or + more values to drop them from the subset, or enter new values, + labels, or ranges (as a condition) as needed. Click the Add + Value/Range button to create more entries in the value table. + Note: Click the ``?`` Info buttons to view tips on how to use the + Recode and Subset table. Also, See Tabular Data Recode and Subset + Tips for more information about adding values and ranges. +#. Click the Apply Recodes button. + Your renamed variables appear at the bottom of the page in the List + of Recode Variables. +#. Select another variable in the Selected Variables box, click the + Start button, and repeat the recode action. + Repeat this process for each variable that you choose to recode. +#. To remove a recoded variable, scroll to the List of Recode Variables + at the bottom of the page and click the Remove link for the recoded + variable that you choose to delete from your subset. + +.. _perform-advanced-analysis: + +**Perform Advanced Analysis** + +When you run advanced statistical analysis for data, you can do any of +the following with the analysis results: + +- Open the results in a new window to save or print the results. +- Download the R workspace in which the statistics were analyzed, for + replication of the analysis. See Replicate Analysis for more + information. +- View citation information for the data analyzed, and for the full + data set from which you selected variables to analyze. See View + Citations for more information. + +To run statistical models for selected variables: + +#. In the Data File page, click the Advanced Statistical Analysis tab. +#. Scroll down the screen and click the check boxes to select variables + from the table of available values. When you select a variable, it is + added to the Selected Variables box at the top of the tab. + To remove a variable from this box, deselect it from the Variable + Type list at the bottom of the screen. + To select all variables, click the check box beside the column name, + Variable Type. +#. Select a model from the Choose a Statistical Model drop-down list. +#. Select one variable in the Selected Variables box, and then click the + applicable arrow button to assign a function to that variable from + within the analysis model. + You see the name of the variables in the appropriate function box. + Note: Some functions allow a specific type of variable only, while + other functions allow multiple variable types. Types include + Character, Continuous, and Discrete. If you assign an incorrect + variable type to a function, you see an ``Incompatible type`` error + message. +#. Repeat the variable and function assignments until your model is + complete. +#. Select your Output options. +#. Click the Run Model button. + If the statistical model that you defined is incomplete, you first + are prompted to correct the definition. Correct your model, and then + click Run Model again. + You see the Dataverse Analysis page. +#. To save or print the results, scroll to the Advanced Statistical + Analysis section and click the link *Open results in a new window*. + You then can print or save the window contents. + To save the analysis, scroll to the Replication section and click the + button ``zipFile_.zip``. + Review the Citation Information for the data set and for the subset + that you analyzed. +#. Click the link *Back to Analysis and Subsetting* to return the + previous page and continue analysis of the data. + +**Replicate Analysis** + +You can save the R workspace in which the Dataverse Network performed an +analysis. You can download the workspace as a zipped archive that +contains four files. Together, these files enable you to recreate the +subset analysis in another R environment: + +- ``citationFile..txt`` - The citation for the subset that you analyzed. +- ``rhistoryFile..R`` - The R code used to perform the analysis. +- ``tempsubsetfile..tab`` - The R object file used to perform the analysis. +- ``tmpRWSfile..RData`` - The subset data that you analyzed. + +To download this workspace for your analysis: + +#. For any subset, Apply Descriptive Statistics or Perform Advanced + Analysis. +#. On the Dataverse Analysis or Advanced Statistical Analysis page, + scroll to the Replication section and click the + button ``zipFile_.zip``. +#. Follow your browser's prompts to save the zipped archive. + When the archive file is saved to your local storage, extract the + contents to use the four files that compose the R workspace. + +**Statistical Analysis Models** + +You can apply any of the following advanced statistical models to all or +some variables in a tabular data set: + +Categorical data analysis: Cross tabulation + +Ecological inference model: Hierarchical mulitnomial-direct ecological +inference for R x C tables + +Event count models, for event count dependent variables: + +- Negative binomial regression +- Poisson regression + +Models for continuous bounded dependent variables: + +- Exponential regression for duration +- Gamma regression for continuous positives +- Log-normal regression for duration +- Weibull regression for duration + +Models for continuous dependent variables: + +- Least squares regression +- Linear regression for left-censoreds + +Models for dichotomous dependent variables: + +- Logistic regression for binaries +- Probit regression for binaries +- Rare events logistic regression for binaries + +Models for ordinal dependent variables: + +- Ordinal logistic regression for ordered categoricals +- Ordinal probit regression for ordered categoricals + +**Tabular Data Recode and Subset Tips** + +Use the following guidelines when working with tabular data files: + +- Recoding: + + - You must fill at least the first (new value) and last (condition) + columns of the table; the second column is optional and for a new + value label. + - If the old variable you chose for recoding has information about + its value labels, you can prefill the table with these data for + convenience, and then modify these prefilled data. + - To exclude a value from your recoding scheme, click the Drop check + box in the row for that value. + +- Subsetting: + + - If the variable you chose for subsetting has information about its + value labels, you can prefill the table with these data for + convenience. + - To exclude a value in the last column of the table, click the Drop + check box in row for that value. + - To include a particular value or range, enter it in the last + column whose header shows the name of the variable for subsetting. + +- Entering a value or range as a condition for subsetting or recoding: + + - Suppose the variable you chose for recoding is x. + If your condition is x==3, enter ``3``. + If your condition is x < -3, enter ``(--3``. + If your condition is x > -3, enter ``-3-)``. + If your condition is -3 < x < 3, enter ``(-3, 3)``. + - Use square brackets (``[]``) for closed ranges. + - You can enter non-overlapping values and ranges separated by a + comma, such as ``0,[7-9]``. + +.. _network-data: + +Network Data +-------------- + +Network data files (subsettable files) can be subsetted and analyzed +online by using the Dataverse Network application. For analysis, the +Dataverse Network offers generic network data analysis. A list of +Network Analysis Models are provided. + +Note: All subsetting and analysis options for network data assume a +network with undirected edges. + +After you find the network data set that you want, access the Subset and +Analysis options to use the online tools. Then, you can subset data +by *vertices* or *edges*, download subsets, and apply network +measures. + +**Access Network Subset and Analyze Options** + +You can subset and analyze network data files before you download the +file or your subsets. To access the Subset and Analysis options for a +network data set: + +#. Click the title of the study from which you choose to analyze or + download a file or subset. +#. Click the Documentation, Data and Analysis tab for the study. +#. In the list of study files, locate the network data file that you + choose to download, subset, or analyze. You can download data sets + for a file only if the file entry includes the subset icon. +#. Click the \ *Access Subset/Analysis* link associated with the + selected file. If prompted, check the \ *I accept* box and click + Continue to accept the Terms of Use. + You see the Data File page listing data for the file that you choose + to subset or analyze. + +**Subset Network Data** + +There are two ways in which you can subset network data. First, you can +run a manual query, and build a query of specific values for edge or +vertex data with which to subset the data. Or, you can select from among +three automatically generated queries with which to subset the data: + +- Largest graph - Subset the largest connected component of the + network. That is, the largest group of nodes that can reach one + another by walking across edges. +- Neighborhood - Subset the neighborhood of the selected + vertices. That is, generate a subgraph of the original network + composed of all vertices that are positioned at most steps away + from the currently selected vertices in the original network, plus + all of the edges that connect them. + +You also can successively subset data to isolate specific values +progressively. + +Continue to the next topics for detailed information about subsetting a +network data set. + +**Subset Manually** + +Perform a manual query to slice a graph based on the attributes of its +vertices or edges. You choose whether to subset the graph based on +vertices or edges, then use the Manual Query Builder or free-text Query +Workspace fields to construct a query based on that element's +attributes. A single query can pertain only to vertices or only to +edges, never both. You can perform separate, sequential vertex or edge +queries. + +When you perform a vertex query, all vertices whose attributes do not +satisfy the query are dropped from the graph, in addition to all edges +that touch them. When you perform an edge query, all edges whose +attributes do not satisfy the criteria are dropped, but all vertices +remain *unless* you enable the *Eliminate disconnected vertices* check box. Note that enabling this option drops all +disconnected vertices whether or not they were disconnected before the +edge query. + +Review the Network Data Tips before you start work with a study's files. + +To subset variables within a network data set by using a manually +defined query: + +#. In the Data File page, click the Manual Query radio button near the + top of the page. +#. Use the Attribute Set drop-down list and select Vertex to subset by + node or vertex values. + Select Edge to subset by edge values. +#. Build the first attribute selection value in the Manual Query Builder + panel: + + #. Select a value in the Attributes list to assign values on which to + subset. + #. Use the Operators drop-down list to choose the function by which + to define attributes for selection in this query. + #. In the Values field, type the specific values to use for selection + of the attribute. + #. Click *Add to Query* to complete the attribute definition for + selection. + You see the query string for this attribute in the Query Workspace + field. + + Alternatively, you can enter your query directly by typing it into + the Query Workspace field. + +#. Continue to add selection values to your query by using the Manual + Query Builder tools. +#. To remove any verticies that do not connect with other data in the + set, check the \ *Eliminate disconnected vertices* check box. +#. When you complete construction of your query string, click \ *Run* to + perform the query. +#. Scroll to the bottom of the window, and when the query is processed + you see a new entry in the Subset History panel that defines your + query. + +Continue to build a successive subset or download a subset. + +**Subset Automatically** + +Peform an Automatic Query to select a subgraph of the nextwork based on +structural properties of the network. Remember to review the Network +Data Tips before you start work with a study's files. + +To subset variables within a network data set by using an automatically +generated query: + +#. In the Data File page, click the Automatic Query radio button near + the middle of the page. +#. Use the Function drop-down list and select the type of function with + which to select your subset: + + - Largest graph - Subset the largest group of nodes that can + reach one another by walking across edges. + - Neighborhood - Generate a subgraph of the original network + composed of all vertices that are positioned at most steps + away from the currently selected vertices in the original network, + plus all of the edges that connect them. This is the only query + that can (and generally does) increase the number of vertices and + edges selected. + +#. In the Nth field, enter the degree with which to select data + using that function. +#. Click \ *Run* to perform the query. +#. Scroll to the bottom of the window, and when the query is processed + you see a new entry in the Subset History panel that defines your + query. + +Continue to build a successive subset or download a subset. + +**Build or Restart Subsets** + +**Build a Subset** + +To build successive subsets and narrow your data selection +progressively: + +#. Perform a manual or automatic subset query on a selected data set. +#. Perform a second query to further narrow the results of your previous + subset activity. +#. When you arrive at the subset with which you choose to work, continue + to analyze or download that subset. + +**Undo Previous Subset** + +You can reset, or undo, the most recent subsetting action for a data +set. Note that you can do this only one time, and only to the most +recent subset. + +Scroll to the Subset History panel at the bottom of the page and +click \ *Undo* in the last row of the list of successive subsets. +The last subset is removed, and the previous subset is available for +downloading, further subsetting, or analysis. + +**Restart Subsetting** + +You can remove all subsetting activity and restore data to the original +set. + +Scroll to the Subset History panel at the bottom of the page and +click \ *Restart* in the row labeled \ *Initial State*. +The data set is restored to the original condition, and is available +for downloading, subsetting, or analysis. + +**Run Network Measures** + +When you finish selecting the specific data that you choose to analyze, +run a Network Measure analysis on that data. Review the Network Data +Tips before you start your analysis. + +#. In the Data File page, click the Network Measure radio button near + the bottom of the page. +#. Use the Attributes drop-down list and select the type of analysis to + perform: + + - Page Rank - Determine how much influence comes from a specific + actor or node. + - Degree - Determine the number of relationships or collaborations + exist within a network data set. + - Unique Degree - Determine the number of collaborators that exist. + - In Largest Component - Determine the largest component of a + network. + - Bonacich Centrality - Determine the importance of a main actor or + node. + +#. In the Parameters field, enter the specific value with which to + subset data using that function: + + - Page Rank - Enter a value for the parameter , a proportion, + between 0 and 1. + - Degree - Enter the number of relationships to extract from a + network data set. + - Unique Degree - Enter the number of unique relationships to + extract. + - In Largest Component - Enter the number of components to extract + from a network data set, starting with the largest. + +#. Click *Run* to perform the analysis. +#. Scroll to the bottom of the window, and when the analysis is + processed you see a new entry in the Subset History panel that + contains your analyzed data. + +Continue to download the analyzed subset. + +**Download Network Subsets or Measures** + +When you complete subsetting and analysis of a network data set, you can +download the final set of data. Network data subsets are downloaded in a +zip archive, which has the name ``subset_.zip``. +This archive contains three files: + +- ``subset.xml`` - A GraphML formatted file that contains the final + subsetted or analyzed data. +- ``verticies.tab`` - A tabular file that contains all node data for + the final set. +- ``edges.tab`` - A tabular file that contains all relationship data + for the final set. + +Note: Each time you download a subset of a specific network data set, a +zip archive is downloaded that has the same name. All three zipped files +within that archive also have the same names. Be careful not to +overwrite a downloaded data set that you choose to keep when you perform +sucessive downloads. + +To download a final set of data: + +#. Scroll to the Subset History panel on the Data File page. +#. Click *Download Latest Results* at the bottom of the history list. +#. Follow your browser's prompts to open or save the data file to your + computer's disk drive. Be sure to save the file in a unique location + to prevent overwritting an existing downloaded data file. + +**Network Data Tips** + +Use these guidelines when subsetting or analyzing network data: + +- For a Page rank network measure, the value for the parameter is a + proportion and must be between 0 and 1. Higher values of increase + dispersion, while values of closer to zero produce a more uniform + distribution. PageRank is normalized so that all of the PageRanks sum + to 1. +- For a Bonacich Centrality network measure, the alpha parameter is a + proportion that must be between -1 and +1. It is normalized so that + all alpha centralities sum to 1. +- For a Bonacich Centrality network measure, the exo parameter must be + greater than 0. A higher value of exo produces a more uniform + distribution of centrality, while a lower value allows more + variation. +- For a Bonacich Centrality network measure, the original alpha + parameter of alpha centrality takes values only from -1/lambda to + 1/lambda, where lambda is the largest eigenvalue of the adjacency + matrix. In this Dataverse Network implementation, the alpha parameter + is rescaled to be between -1 and 1 and represents the proportion of + 1/lambda to be used in the calculation. Thus, entering alpha=1 sets + alpha to be 1/lambda. Entering alpha=0.5 sets alpha to be + 1/(2\*lambda). + +Data Visualization +================== + +Data Visualization allows contributors to make time series +visualizations available to end users. These visualizations may be +viewable and downloadable as graphs or data tables. Please see the +appropriate guide for more information on setting up a visualization or +viewing one. + +Explore Data +-------------- + +The study owner may make a data visualization interface available to +those who can view a study.  This will allow you to select various data +variables and see a time series graph or data table.  You will also be +able to download your custom graph for use in your own reports or +articles. + +The study owner will at least provide a list of data measures from which +to choose.   These measures may be divided into types.  If they are you +will be able to narrow the list of measures by first selecting a measure +type.  Once you have selected a measure, if there are multiple variables +associated with the measure you will be able to select one or more +filters to uniquely identify a variable. By default any filter assigned +to a variable will become the label associated with the variable in the +graph or table.   By pressing the Add Line button you will add the +selected variable to your custom graph. + +  |image0| + +Once you have added data to your graph you will be able to customize it +further.  You will be given a choice of display options made available +by the study owner.  These may include an interactive flash graph, a +static image graph and a numerical data table.   You will also be +allowed to edit the graph title, which by default is the name of the +measure or measures selected. You may also edit the Source Label.  +Other customizable features are the height and the legend location of +the image graph.  You may also select a subset of the data by selecting +the start and end points of the time series.  Finally, on the display +tab you may opt to display the series as indices in which case a single +data point known as the reference period will be designated as 100 and +all other points of the series will be calculated relative to the +reference period.  If you select data points that do not have units in +common (i.e. one is in percent while the other is in dollars) then the +display will automatically be set to indices with the earliest common +data point as the default reference period. + +|image1| + +On the Line Details tab you will see additional information on the data +you have selected.  This may include links to outside web pages that +further explain the data.  On this tab you will also be able to edit the +label or delete the line from your custom graph. + +On the Export tab you will be given the opportunity to export your +custom graph and/or data table.   If you select multiple files for +download they will be bound together in a single zip file.  + +The Refresh button clears any data that you have added to your custom +graph and resets all of the display options to their default values. + +Set Up +-------- + +This feature allows you to make time series visualizations available to +your end users.   These visualizations may be viewable and downloadable +as graphs or data tables.  In the current beta version of the feature +your data file must be subsettable and must contain at least one date +field and one or more measures.  You will be able to associate data +fields from your file to a time variable and multiple measures and +filters.  + +When you select Set Up Exploration from within a study, you must first +select the file for which you would like to set up the exploration.  The +list of files will include all subsettable data files within the study. + +Once you have selected a file you will go to a screen that has 5 tabs to +guide you through the data visualization set-up. (In general, changes +made to a visualization on the individual tabs are not saved to the +database until the form’s Save button is pressed.  When you are in add +or edit mode on a tab, the tab will have an update or cancel button to +update the “working copy” of a visualization or cancel the current +update.) + +If you have a previously set up an exploration for a data file you may copy that exploration to a new file. +When you select a file for set up you will be asked if you want to copy an exploration from another data file +and will be presented a list of files from which to choose. Please note that the data variable names must +be identical in both files for this migration to work properly. + +**Time Variable** + +On the first tab you select the time variable of your data file.  The +variable list will only include those variables that are date or time +variables.  These variables must contain a date in each row.  You may +also enter a label in the box labeled Units.  This label will be +displayed under the x-axis of the graph created by the end user. + +|image2| + +**Measures** + +On the Measures tab you may assign measures to the variables in your +data file.  First you may customize the label that the end user will see +for measures.  Next you may add measures by clicking the “Add Measure” +link.  Once you click that link you must give your measure a unique +name.  Then you may assign Units to it.  Units will be displayed as the +y-axis label of any graph produced containing that measure.  In order to +assist in the organizing of the measures you may create measure types +and assign your measures to one or more measure types.  Finally, the +list of variables for measures will include all those variables that are +entered as numeric in your data file.  If you assign multiple variables +to the same measure you will have to distinguish between them by +assigning appropriate filters.   For the end user, the measure will be +the default graph name. + +|image3|   + +**Filters** + +On the filters tab you may assign filters to the variables in your data +file.  Generally filters contain demographic, geographic or other +identifying information about the variables.  For a given group of +filters only one filter may be assigned to a single variable.  The +filters assigned to a variable must be sufficient to distinguish among +the variables assigned to a single measure.   Similar to measures, +filters may be assigned to one or more types.   For the end user the +filter name will be the default label of the line of data added to a +graph. + +|image4| + +| + +**Sources** + +On the Sources tab you can indicate the source of each of the variables +in your data file.  By default, the source will be displayed as a note +below the x-axis labels.  You may assign a single source to any or all +of your data variables.  You may also assign multiple sources to any of +your data variables. + +|image5| + +| + +**Display** + +On the Display tab you may customize what the end user sees in the Data +Visualization interface.  Options include the data visualization formats +made available to the end user and default view, the Measure Type label, +and the Variable Info Label. + +| +| |image6|   + +**Validate Button** + +When you press the “Validate” button the current state of your +visualization data will be validated.  In order to pass validation your +data must have one time variable defined.  There must also be at least +one measure variable assigned.  If more than one variable is assigned to +a given measure then filters must be assigned such that each single +variable is defined by the measure and one or more filters.  If the data +visualization does not pass validation a detailed error message +enumerating the errors will be displayed. + +**Release Button** + +Once the data visualization has been validated you may release it to end +users by pressing the “Release” button.  The release button will also +perform a validation.  Invalid visualizations will not be released, but +a detailed error message will not be produced.  + +**Save Button** + +The “Save” button will save any changes made to a visualization on the +tabs to the database.   If a visualization has been released and changes +are saved that would make it invalid the visualization will be set to +“Unreleased”. + +**Exit Button** + +To exit the form press the “Exit” button.  You will be warned if you +have made any unsaved changes. + +**Examples** + +Simplest case – a single measure associated with a single variable. + +Data variable contains information on average family income for all +Americans.  The end user of the visualization will see an interface as +below: + +|image7| + +Complex case - multiple measures and types along with multiple filters +and filter types.  If you have measures related to both income and +poverty rates you can set them up as measure types and associate the +appropriate measures with each type.  Then, if you have variables +associated with multiple demographic groups you can set them up as +filters.  You can set up filter types such as age, gender, race and +state of residence.  Some of your filters may belong to multiple types +such as males age 18-34. + +|image8| + +.. |image0| image:: ./datausers-guides_files/measure_selected.png +.. |image1| image:: ./datausers-guides_files/complex_graph_screenshot.png +.. |image2| image:: ./datausers-guides_files/edittimevariablescreenshot.png +.. |image3| image:: ./datausers-guides_files/editmeasuresscreenshot.png +.. |image4| image:: ./datausers-guides_files/editfiltersscreenshot.png +.. |image5| image:: ./datausers-guides_files/sourcetabscreenshot.png +.. |image6| image:: ./datausers-guides_files/displaytabscreenshot.png +.. |image7| image:: ./datausers-guides_files/simple_explore_data.png +.. |image8| image:: ./datausers-guides_files/complex_exploration.png + + +Dataverse Administration +++++++++++++++++++++++++++++ + +Once a user creates a dataverse becomes its owner and therefore is the +administrator of that dataverse. The dataverse administrator has access +to manage the settings described in this guide. + +Create a Dataverse +===================== + +A dataverse is a container for studies and is the home for an individual +scholar's or organization's data. + +Creating a dataverse is easy but first you must be a registered user. +Depending on site policy, there may be a "Create a Dataverse" link on +the Network home page. This first walks you through creating an account, +then a dataverse.  + +1. Fill in the required information: + + * **Type of Dataverse**: Choose Scholar if it represents an individual's work otherwise choose Basic. + * **Dataverse Name**: This will be displayed on the network and dataverse home pages. If this is a Scholar dataverse it will automatically be filled in with the scholar's first and last name. + * **Dataverse Alias**: This is an abbreviation, usually lower-case, that becomes part of the URL for the new dataverse. + + The required fields to create a dataverse are configurable in the Network Options, so fields that are required may also include + Affiliation, Network Home Page Description, and Classification. + +2. Click "Save" and you're done! An email will be sent to you with more information, including the URL to access you new dataverse. + +\*Required information can vary depending on site policy. Required fields are noted with a **red asterisk**. + +Edit General Settings +===================== + +Use the General Settings tab on the Options page to release your +dataverse, change the name, alias, and classification of your +dataverse. The classifications are used to browse to your dataverse from +the Network home page. + +Navigate to the General Settings from the Options page: + +Dataverse home page > Options page > Settings tab > General subtab + +To edit release your dataverse: + +Select *Released* from the drop-down list when your dataverse is ready +to go public. Select *Not Released* if you wish to block public access +to your dataverse. + +Your dataverse cannot be released if it does not contain any released +studies. Create a study or define a collection with studies from other +dataverses before you attempt to make your dataverse public. + +To edit the affiliation, name, or alias settings of your dataverse: + +If you edit a Scholar dataverse type, you can edit the following fields: + +- First Name - Edit your first name, which appears with your last name + on the Network home page in the Scholar Dataverse group. +- Last Name - Edit your last name, which appears with your first name + on the Network home page in the Scholar Dataverse group. + +If you edit either Scholar or basic types, you can edit any of the +following fields: + +- Affiliation - Edit your institutional identity. +- Dataverse Name - Edit the title for your dataverse, which appears on + your dataverse home page. There are no naming restrictions. +- Dataverse Alias - Edit your dataverse's URL. Special characters + (~,\`, !, @, #, $, %, ^, &, and \*) and spaces are not allowed. + **Note**: if you change the Dataverse Alias field, the URL for your + Dataverse changes (http//.../dv/'alias'), which affects links to this + page. +- Network Home Page Description - Edit the text that appears beside the + name of your dataverse on the Network home page. +- Classification - Check the classifications, or groups, in which you + choose to include your dataverse. Remove the check for any + classifications that you choose not to join. + +.. _edit-layout-branding: + +Edit Layout Branding +==================== + +**Customize Layout Branding (header/footer) to match your website** + +The Layout Branding allows you to customize your dataverse, by +**adding HTML to the default banner and footer**, such as that used on +your personal website. If your website has such layout elements as a +navigation menu or images, you can add them here. Each dataverse is +created with a default customization added, which you can leave as is, +edit to change the background color, or add your own customization. + +Navigate to the Layout Branding from the Options page: + +``Dataverse home page > Options page > Settings tab > Customization subtab`` + +To edit the banner and footer of your dataverse: + +#. In the Custom Banner field, enter your plain text, and HTML to define + your custom banner. +#. In the Custom Footer field, enter your plain text, and HTML to define + your custom footer. + +**Embed your Dataverse into your website (iframes)** + +Want to embed your Dataverse on an OpenScholar site? Follow :ref:`these special instructions `. + +For dataverse admins that are more advanced HTML developers, or that +have HTML developers available to assist them, you can create a page on +your site and add the dataverse with an iframe. + +1. Create a new page, that you will host on your site. +2. Add the following HTML code to the content area of that new + page. + + + | ```` + +3. Edit that code by adding the URL of your dataverse (replace the + SAMPLE\_ONLY URL in the example, including the brackets “[ ]”), and + adjusting the height.  We suggest you keep the height at or under + 600px in order to fit the iframe into browser windows on computer + monitor of all sizes, with various screen resolutions. +#. The dataverse is set to have a min-width of 724px, so try give the + page a width closer to 800px. +#. Once you have the page created on your site, with the iframe code, go + to the Setting tab, then the Customization subtab on your dataverse + Options page, and click the checkbox that disables customization for + your dataverse. +#. Then enter the URL of the new page on your site. That will redirect + all users to the new page on your site. + +**Layout Branding Tips** + +- HTML markup, including ``script`` tags for JavaScript, and ``style`` + tags for an internal style sheet, are permitted. The ``html,`` + ``head`` and ``body`` element tags are not allowed. +- When you use an internal style sheet to insert CSS into your + customization, it is important to avoid using universal ("``*``\ ") + and type ("``h1``\ ") selectors, because these can overwrite the + external style sheets that the dataverse is using, which can break + the layout, navigation or functionality in the app. +- When you link to files, such as images or pages on a web server + outside the network, be sure to use the full URL (e.g. + ``http://www.mypage.com/images/image.jpg``). +- If you recreate content from a website that uses frames to combine + content on the sides, top, or bottom, then you must substitute the + frames with ``table`` or ``div`` element types. You can open such an + element in the banner field and close it in the footer field. +- Each time you click "Save", your banner and footer automatically are + validated for HTML and other code errors. If an error message is + displayed, correct the error and then click "Save" again. +- You can use the banner or footer to house a link from your homepage + to your personal website. Be sure to wait until you release your + dataverse to the public before you add any links to another website. + And, be sure to link back from your website to your homepage. +- If you are using an OpenScholar or iframe site and the redirect is + not working, you can edit your branding settings by adding a flag to + your dataverse URL: disableCustomization=true. For example: + ``dvn.iq.harvard.edu/dvn/dv/mydv?disableCustomization=true``. To + reenable: ``dvn.iq.harvard.edu/dvn/dv/mydv?disableCustomization=false``. + Disabling the customization lasts for the length of the user session. + +Edit Description +================== + +The Description is displayed on your dataverse Home page. Utilize this +field to display announcements or messaging. + +Navigate to the Description from the Options page: + +``Dataverse home page > Options page > Settings tab > General subtab >Home Page Description`` + +To change the content of this description: + +- Enter your description or announcement text in the field provided. + Note: A light blue background in any form field indicates HTML, JavaScript, and style tags are permitted. The ``html,``, ``head`` and ``body`` element types are not allowed. + +Previous to the Version 3.0 release of the Dataverse Network, the +Description had a character limit set at 1000, which would truncate +longer description with a **more >>** link. This functionality has been +removed, so that you can add as much text or code to that field as you +wish. If you would like to add the character limit and truncate +functionality back to your dataverse, just add this snippet of +Javascript to the end of your description. + + + | ```` + +.. _edit-study-comments-settings: + +Edit Study Comments Settings +============================ + +You can enable or disable the Study User Comments feature in your +dataverse. If you enable Study User Comments, any user has the option to +add a comment to a study in this dataverse. By default, this feature is +enabled in all new dataverses. Note that you should ensure there are +terms of use at the network or dataverse level that define acceptable +use of this feature if it is enabled. + +Navigate to the Study User Comments from the Options page: + +``Dataverse home page > Options page > Settings tab > General subtab >Allow Study Comments`` + +A user must create an account in your dataverse to use the comment +feature. When you enable this feature, be aware that new accounts will +be created in your dataverse when users add comments to studies. In +addition, the Report Abuse function in the comment feature is managed by +the network admin. If a user reads a comment that might be +inappropriate, that user can log in or register an account and access +the Report Abuse option. Comments are reported as abuse to the network +admin. + +To manage the Study User Comments feature in your dataverse: + +- Click the "Allow Study Comments" check box to enable comments. +- Click the checked box to remove the check and disable comments. + +Manage E-Mail Notifications +=========================== + +You can edit the e-mail address used on your dataverse’s Contact Us page +and by the network when sending notifications on processes and errors. +By default, the e-mail address used is from the user account of the +dataverse creator. + +Navigate to the E-Mail Notifications from the Options page: + +``Dataverse home page > Options page > Settings tab > General subtab >E-Mail Address(es)`` + +To edit the contact and notification e-mail address for your dataverse: + +- Enter one or more e-mail addresses in the **E-Mail Address** field. + Provide the addresses of users who you choose to receive notification + when contacted from this dataverse. Any time a user submits a request + through your dataverse, including the Request to Contribute link and + the Contact Us page, e-mail is sent to all addresses that you enter + in this field. Separate each address from others with a comma. Do not + add any spaces between addresses. + +Add Fields to Search Results +============================ + +Your dataverse includes the network's search and browse features to +assist your visitors in locating the data that they need. By default, +the Cataloging Information fields that appear in the search results or +in studies' listings include the following: study title, authors, ID, +production date, and abstract. You can customize other Cataloging +Information fields to appear in search result listings after the default +fields. Additional fields appear only if they are populated for the +study. + +Navigate to the Search Results Fields from the Options page: + +``Dataverse home page > Options page > Settings tab > Customization subtab > Search Results Fields`` + +To add more Cataloging Information fields listed in the Search or Browse +panels: + +- Click the check box beside any of the following Cataloging + Information fields to include them in your results pages: Production + Date, Producer, Distribution Date, Distributor, Replication For, + Related Publications, Related Material, and Related Studies. + +Note: These settings apply to your dataverse only. + +Set Default Study Listing Sort Order +==================================== + +Use the drop-down menu to set the default sort order of studies on the +Study Listing page. By default, they are sorted by Global ID, but you +can also sort by Title, Last Released, Production Date, or Download +Count. + +Navigate to the Default Study Listing Sort Order from the Options page: + +``Dataverse home page > Options page > Settings tab > Customization subtab > Default Sort Order`` + +Enable Twitter +============== + +If your Dataverse Network has been configured for Automatic Tweeting, +you will see an option listed as "Enable Twitter." When you click this, +you will be redirected to Twtter to authorize the Dataverse Network +application to send tweets for you. + +Once authorized, tweets will be sent for each new study or study version +that is released. + +To disable Automatic Tweeting, go to the Options page, and click +"Disable Twitter." + +Navigate to Enable Twitter from the Options page: + +``Dataverse home page > Options page > Settings tab > Promote Your Dataverse subtab > Sync Dataverse With Twitter`` + +Get Code for Dataverse Link or Search Box +========================================= + +Add a dataverse promotional link or dataverse search box on your +personal website by copying the code for one of the sample links on this +page, and then pasting it anywhere on your website to create the link. + +Navigate to the Code for Dataverse Link or Search Box from the Options +page: + +``Dataverse home page > Options page > Settings tab > Promote Your Dataverse subtab`` + +Edit Terms for Study Creation +============================= + +You can set up Terms of Use for the dataverse that require users to +acknowledge your terms and click "Accept" before they can contribute to +the dataverse. + +Navigate to the Terms for Study Creation from the Options page: + +``Dataverse home page > Options page > Permissions tab > Terms subtab > Deposit Terms of Use`` + +To set Terms of Use for creating or uploading to the dataverse: + +#. Click the Enable Terms of Use check box. +#. Enter a description of your terms to which visitors must agree before + they can create a study or upload a file to an existing study. + Note: A light blue background in any form field indicates HTML, + JavaScript, and style tags are permitted. The ``html`` and ``body`` + element types are not allowed. + +Edit Terms for File Download +============================ + +You can set up Terms of Use for the network that require users to +acknowledge your terms and click "Accept" before they can download or +subset contents from the network. + +Navigate to the Terms for File Download from the Options page: + +``Dataverse home page > Options page > Permissions tab > Terms subtab > Download Terms of Use`` + +To set Terms of Use for downloading or subsetting contents from any +dataverse in the network: + +#. Click the Enable Terms of Use check box. +#. Enter a description of your terms to which visitors must agree before + they can download or analyze any file. + Note: A light blue background in any form field indicates HTML, + JavaScript, and style tags are permitted. The ``html`` and ``body`` + element types are not allowed. + +Manage Permissions +================== + +Enable contribution invitation, grant permissions to users and groups, +and manage dataverse file permissions. + +Navigate to Manage Permissions from the Options page: + +``Dataverse home page > Options page > Permissions tab > Permissions subtab`` + +**Contribution Settings** + +Choose the access level contributors have to your dataverse. Whether +they are allowed to edit only their own studies, all studies, or whether +all registered users can edit their own studies (Open dataverse) or all +studies (Wiki dataverse). In an Open dataverse, users can add studies by +simply creating an account, and can edit their own studies any time, +even after the study is released. In a Wiki dataverse, users cannot only +add studies by creating an account, but also edit any study in that +dataverse. Contributors cannot, however, release a study directly. After +their edits, they submit it for review and a dataverse administrator or +curator will release it. + +**User Permission Settings** + +There are several roles defined for users of a Dataverse Network +installation: + +- Data Users - Download and analyze all types of data +- Contributors - Distribute data and receive recognition and citations + to it +- Curators - Summarize related data, organize data, or manage multiple + sets of data +- Administrators - Set up and manage contributions to your dataverse, + manage the appearance of your dataverse, organize your dataverse + collections + +**Privileged Groups** + +Enter group name to allow a group access to the dataverse. Groups are +created by network administrators. + +**Dataverse File Permission Settings** + +Choose 'Yes' to restrict ALL files in this dataverse. To restrict files +individually, go to the Study Permissions page of the study containing +the file. + +Create User Account +=================== + +As a registered user, you can: + +- Add studies to open and wiki dataverses, if available +- Contribute to existing studies in wiki dataverses, if available +- Add user comments to studies that have this option +- Create your own dataverse + +**Network Admin Level** + +Navigate to Create User Account from the Options page: + +``Network home page > Options page > Permissions tab > Users subtab > Create User link`` + +To create an account for a new user in your Network: + +#. Complete the account information page. + Enter values in all required fields. Note: an email address can also be used as a username +#. Click Create Account to save your entries. +#. Go to the Permissions tab on the Options page to give the user + Contributor, Curator or Admin access to your dataverse. + +**Dataverse Admin Level** + +Navigate to Create User Account from the Options page: + +``Dataverse home page > Options page > Permissions tab > Permissions subtab > Create User link`` + +To create an account for a new user in your Dataverse: + +#. Complete the account information page. + Enter values in all required fields. Note: an email address can also be used as a username +#. Click Create Account to save your entries. +#. Go to the Permissions tab on the Options page to give the user + Contributor, Curator or Admin access to your dataverse. + + +**New User: Network Homepage** + +As a new user, to create an account at the **Dataverse Network homepage**, select "Create Account" +at the top-right hand side of the page. + +Complete the required information denoted by the red asterisk and save. + +**New User: Dataverse Level** + +As a new user, to create an account at the **Dataverse level**, select "Create Account" +at the top-right hand side of the page. Note: For Open Dataverses select "Create Account" in the orange box +on the top right hand side of the page labelled: "OPEN DATAVERSE". + +Complete the required information denoted by the red asterisk and save. + +Download Tracking Data +====================== + +You can view any guestbook responses that have been made in your +dataverse. Beginning with version 3.2 of Dataverse Network, if the +guestbook is not enabled, data will be collected silently based on the +logged-in user or anonymously. The data displayed includes user account +data or the session ID of an anonymous user, the global ID, study title +and file name of the file downloaded, the time of the download, the type +of download and any custom questions that have been answered. The +username/session ID and download type were not collected in the 3.1 +version of Dataverse Network. A comma separated values file of all +download tracking data may be downloaded by clicking the Export Results +button. + +Navigate to the Download Tracking Data from the Options page: + +``Dataverse home page > Options page > Permissions tab > Download Tracking Data subtab`` + +Edit File Download Guestbook +============================ + +You can set up a guestbook for your dataverse to collect information on +all users before they can download or subset contents from the +dataverse. The guestbook is independent of Terms of Use. Once it has +been enabled it will be shown to any user for the first file a user +downloads from a given study within a single session. If the user +downloads additional files from the study in the same session a record +will be created in the guestbook response table using data previously +entered. Beginning with version 3.2 of Dataverse Network, if the +dataverse guestbook is not enabled in your dataverse, download +information will be collected silently based on logged-in user +information or session ID. + +Navigate to the File Download Guestbook from the Options page: + +``Dataverse home page > Options page > Permissions tab > Guestbook subtab`` + +To set up a Guestbook for downloading or subsetting contents from any study in the dataverse: + +#. Click the Enable File Download Guestbook check box. +#. Select or unselect required for any of the user account identifying + data points (First and last name, E-Mail address, etc.) +#. Add any custom questions to collect additional data. These questions + may be marked as required and set up as free text responses or + multiple choice. For multiple choice responses select Radio Buttons + as the Custom Field Type and enter the possible answers. +#. Any custom question may be removed at any time, so that it won’t show + for the end user. If there are any responses associated with question + that has been removed they will continue to appear in the Guestbook + Response data table. + +.. _openscholar: + +OpenScholar +=========== + +**Embed your Dataverse easily on an OpenScholar site** + +Dataverse integrates seamlessly with +`OpenScholar `__, a self-service site builder for higher education. + +To embed your dataverse on an OpenScholar site: + +#. On your Dataverse Options page, Go to the Setting tab +#. Go to the Customization subtab +#. Click the checkbox that disables customization for your dataverse +#. Make note of your Dataverse alias URL (i.e. + `http://thedata.harvard.edu/dvn/dv/myvalue `__) +#. Follow the `OpenScholar Support Center + instructions `__ to + enable the Dataverse App + +.. _enabling-lockss-access-to-the-dataverse: + +Enabling LOCKSS access to the Dataverse +======================================= + +**Summary:** + +`LOCKSS Project `__ or *Lots +of Copies Keeps Stuff Safe* is an international initiative based at +Stanford University Libraries that provides a way to inexpensively +collect and preserve copies of authorized e-content. It does so using an +open source, peer-to-peer, decentralized server infrastructure. In order +to make a LOCKSS server crawl, collect and preserve content from a DVN, +both the server (the LOCKSS daemon) and the client (the DVN) sides must +be properly configured. In simple terms, the LOCKSS server needs to be +pointed at the DVN, given its location and instructions on what to +crawl, the entire network, or a particular Dataverse; on the DVN side, +access to the data must be authorized for the LOCKSS daemon. The section +below describes the configuration tasks that the administrator of a +Dataverse will need to do on the client side. It does not describe how +LOCKSS works and what it does in general; it's a fairly complex system, +so please refer to the documentation on the `LOCKSS +Project `__\ site for more +information. Some information intended to a LOCKSS server administrator +is available in the :ref:`"Using LOCKSS with DVN" +` of the :ref:`DVN Installers Guide ` +(our primary sysadmin-level manual). + +**Configuration Tasks:** + +In order for a LOCKSS server to access, crawl and preserve any data on a +given Dataverse Network, it needs to be granted an authorization by the +network administrator. (In other words, an owner of a dataverse cannot +authorize LOCKSS access to its files, unless LOCKSS access is configured +on the Dataverse Network level). By default, LOCKSS crawling of the +Dataverse Network is not allowed; check with the administrator of +your Dataverse Network for details.  + +But if enabled on the Dataverse Network level, the dataverse owner can +further restrict LOCKSS access. For example, if on the network level all +LOCKSS servers are allowed to crawl all publicly available data, the +owner can limit access to the materials published in his or her +dataverse to select servers only; specified by network address or +domain. + +In order to configure LOCKSS access, navigate to the Advanced tab on the +Options page: + +``Dataverse home page > Options page > Settings tab > Advanced subtab`` + +It's important to understand that when a LOCKSS daemon is authorized to +"crawl restricted files", this does not by itself grant the actual +access to the materials! This setting only specifies that the daemon +should not be skipping such restricted materials outright. If it is +indeed desired to have non-public materials collected and preserved by +LOCKSS, in addition to selecting this option, it will be the +responsibility of the DV Administrator to give the LOCKSS daemon +permission to actually access the files. As of DVN version 3.3, this can +only be done based on the IP address of the LOCKSS server (by creating +an IP-based user group with the appropriate permissions). + +Once LOCKSS crawling of the Dataverse is enabled, the Manifest page +URL will be + +``http``\ ``:///dvn/dv//faces/ManifestPage.xhtml``. + + +Study and Data Administration ++++++++++++++++++++++++++++++ + +Study Options are available for Contributors, Curators, and +Administrators of a Dataverse. + +Create New Study +==================== + +Brief instructions for creating a study: + +Navigate to the dataverse in which you want to create a study, then +click Options->Create New Study. + +Enter at minimum a study title and click Save. Your draft study is now +created. Add additional cataloging information and upload files as +needed. Release the study when ready to make it viewable by others. + +**Data Citation widget** + +At the top of the edit study form, there is a data citation widget that +allows a user to quickly enter fields that appear in the data citation, +ie. title, author, date, distributor Otherwise, the information can be +entered as the fields appear in the data entry form. + +See the information below for more details and recommendations for +creating a study. + +**Steps to Create a Study** + +#. Enter Cataloging Information, including an abstract of the study. + Set Terms of Use for the study in the Cataloging fields, if you choose. +#. Upload files associated with the study. +#. Set permissions to access the study, all of the study files, or some + of the study files. +#. Delete your study if you choose, before you submit it for review. +#. Submit your study for review, to make it available to the public. + +There are several guidelines to creating a study: + +- You must create a study by performing steps in the specified order. +- If multiple users edit a study at one time, the first user to click + Save assumes control of the file. Only that user's changes are + effective. +- When you save the study, any changes that you make after that do not + effect the study's citation. + +**Enter Cataloging Information** + +To enter the Cataloging Information for a new study: + +#. Prepopulate Cataloging Information fields based on a study template + (if a template is available), use the Select Study Template pull-down + list to select the appropriate template. + + A template provides default values for basic fields in the + Cataloging Information fields. The default template prepopulates the + Deposit Date field only. +#. Enter a title in the Title field. +#. Enter data in the remaining Cataloging Information fields. + To list all fields, including the Terms of Use fields, click the Show + All Fields button after you enter a title. Use the following + guidelines to complete these fields: + + - A light blue background in any form field indicates that HTML, + JavaScript, and style tags are permitted. You cannot use the + ``html`` and ``body`` element types. + - To use the inline help and view information about a field, roll + your cursor over the field title. + - Be sure to complete the Abstract field. + - To set Terms of Use for your study, scroll to the bottom of the Cataloging Information tab. + Eight fields appear under the Terms of Use label. You must + complete at least one of these fields to enable Terms for this + study. +#. Click the *Save* button and then add comments or a brief description + in the Study Version Notes popup. Then click the *Continue* button + and your study draft version is saved. + +**Upload Study Files** + +To upload files associated with a new study: + +#. For each file that you choose to upload to your study, first select + the Data Type from the drop-down list. Then click the Browse button + to select the file, and then click Upload to add each file at a time. + + When selecting a CSV (character-separated values) data type, an SPSS Control Card file is first required. + + When selecting a TAB (tab-delimited) data type, a DDI Control Card file is first required. There is no restriction to the number or types of files that you can upload to the Dataverse Network. + + + There is a maximum file size of 2 gigabytes for each file that you upload. + +#. After you upload one file, enter the type of file in the *Category* + field and then click Save. + If you do not enter a category and click Save, the Category + drop-down list does not have any value. You can create any category + to add to this list. +#. For each file that you upload, first click the check box in front of + the file's entry in the list, and then use the Category drop-down + list to select the type of file that you uploaded. + + Every checked file is assigned the category that you select. Be sure + to click the checked box to remove the check before you select a new + value in the Category list for another file. +#. In the Description field, enter a brief message that identifies the + contents of your file. +#. Click Save when you are finished uploading files. **Note:** If you upload a subsettable file, that process takes a few + moments to complete. During the upload, the study is not available for editing. When you receive e-mail notification that the + subsettable file upload is complete, click *Refresh* to continue editing the study. + + You see the Documentation, Data and Analysis tab of the study page + with a list of the uploaded files. For each *subsettable tabular* + data set file that you upload, the number of cases and variables and + a link to the Data Citation information for that data set are + displayed. If you uploaded an SPSS (``.sav`` or ``.por``) file, the + Type for that file is changed to *Tab delimited* and the file + extension is changed to ``.tab`` when you click Save. + + For each *subsettable network* data set file that you upload, the number of edges and verticies and a link to the Data Citation + information for that data set are displayed. +#. Continue to the next step and set file permissions for the study or + its files. + +**Study File Tips** + + +Keep in mind these tips when uploading study files to your dataverse: + +- The following subsettable file types are supported: + + - SPSS ``sav`` and ``por`` - Versions 7.x to 20.x (See the :ref:`Note on SPSS ingest ` in the Appendix) + - STATA ``dta`` - Versions 4 to 12 + - R ``RData`` - All versions (NEW as of DVN v.3.5! See :ref:`Ingest of R data files ` in the Appendix) + - GraphML ``xml`` - All versions + - CSV data file with a :ref:`control card ` + - TAB-delimited data file with a :ref:`DDI XML control card ` + +- A custom ingest for FITS Astronomical data files has been added in v.3.4. (see :ref:`FITS File format Ingest ` in the Appendix) + +- You can add information for each file, including: + + - File name + - Category (documentation or data) + - Description + +- If you upload the wrong file, click the Remove link before you click + Save. + To replace a file after you upload it and save the study, first + remove the file and then upload a new one. +- If you upload a STATA (``.dta``), SPSS (``.sav`` or ``.por``), or + network (``.xml``) file, the file automatically becomes subsettable + (that is, subset and analysis tools are available for that file in + the Network). In this case, processing the file might take some time + and you will not see the file listed immediately after you click + Save. +- When you upload a *subsettable* data file, you are prompted to + provide or confirm your e-mail address for notifications. One e-mail + lets you know that the file upload is in progress; a second e-mail + notifies you when the file upload is complete. +- While the upload of the files takes place, your study is not + available for editing. When you receive e-mail notification that the + upload is completed, click *Refresh* to continue editing the study. + +**Set Study and File Permissions** + +You can restrict access to a study, all of its files, or some of its +files. This restriction extends to the search and browse functions. + +To permit or restrict access: + +#. On the study page, click the Permissions link. +#. To set permissions for the study: + + A. Scroll to the Entire Study Permission Settings panel, and click + the drop-down list to change the study to Restricted or Public. + #. In the *User Restricted Study Settings* field, enter a user or + group to whom you choose to grant access to the study, then click + Add. + + To enable a request for access to restricted files in the study, + scroll to the File Permission Settings panel, and click the + Restricted File Settings check box. This supplies a request link on + the Data, Documentation and Analysis tab for users to request access + to restricted files by creating an account. + + + To set permission for individual files in the study: + + A. Scroll to the Individual File Permission Settings panel, and enter + a user or group in the Restricted File User Access *Username* + field to grant permissions to one or more individual files. + #. Use the File Permission pull-down list and select the permission + level that you choose to apply to selected files: Restricted or + Public. + #. In the list of files, click the check box for each file to which + you choose to apply permissions. + To select all files, click the check box at the top of the list. + #. Click Update. + The users or groups to which you granted access privileges appear + in the File Permissions list after the selected files. + +Note: You can edit or delete your study if you choose, but only until +you submit the study for reveiw. After you submit your study for review, +you cannot edit or delete it from the dataverse. + + +**Delete Studies** + +You can delete a study that you contribute, but only until you submit +that study for review. After you submit your study for review, you +cannot delete it from the dataverse. + +If a study is no longer valid, it can now be deaccessioned so it's +unavailable to users but still has a working citation. A reference to a +new study can be provided when deaccessioning a study. Only Network +Administrators can now permanently delete a study once it has been +released. + +To delete a draft version: + +#. Click the Delete Draft Version link in the top-right area of the + study page. + + You see the Delete Draft Study Version popup. +#. Click the Delete button to remove the draft study version from the + dataverse. + +To deaccession a study: + +#. Click the Deaccession link in the top-right area of the study page. + You see the Deaccession Study page. +#. You have the option to add your comments about why the study was + deaccessioned, and a link reference to a new study by including the + Global ID of the study. +#. Click the Deaccession button to remove your study from the + dataverse. + +**Submit Study for Review** + +When you finish setting options for your study, click *Submit For +Review* in the top-right corner of the study page. The page study +version changes to show *In Review*. + +You receive e-mail after you click *Submit For Review*, notifying you +that your study was submitted for review by the Curator or Dataverse +Admin. When a study is in review, it is not available to the public. You +receive another e-mail notifying you when your study is released for +public use. + +After your study is reviewed and released, it is made available to the +public, and it is included in the search and browse functions. The +Cataloging Information tab for your study contains the Citation +Information for the complete study. The Documentation, Data and Analysis +tab lists the files associated with the study. For each subsettable file +in the study, a link is available to show the Data Citation for that +specific data set. + + +**UNF Calculation** + +When a study is created, a UNF is calculated for each subsettable file +uploaded to that study. All subsettable file UNFs then are combined to +create another UNF for the study. If you edit a study and upload new +subsettable files, a new UNF is calculated for the new files and for the +study. + +If the original study was created before version 2.0 of the Dataverse +Network software, the UNF calculations were performed using version 3 of +that standard. If you upload new subsettable files to an existing study +after implementation of version 2.0 of the software, the UNFs are +recalculated for all subsettable files and for the study using version 5 +of that standard. This prevents incompatibility of UNF version numbers +within a study. + +Manage Studies +================== + +You can find all studies that you uploaded to the dataverse, or that +were submitted by a Contributor for review. Giving you access to view, +edit, release, or delete studies. + + +**View, Edit, and Delete/Deaccession Studies** + +To view and edit studies that you uploaded: + +#. Click a study Global ID, title, or *Edit* link to go to the study + page. +#. From the study page, do any of the following: + + - Edit Cataloging Information + - Edit/Delete File + Information + - Add File(s) + - Edit Study Version Notes + - Permissions + - Create Study Template + - Release + - Deaccession + - Destroy Study + +To delete or deaccession studies that you uploaded: + +#. If the study has not been released, click the *Delete* link to open + the Delete Draft Study Version popup. +#. If the study has been released, click the *Deaccession* link to open + the Deaccession Study page. +#. Add your comments about why the study was deaccessioned, and a + reference link to another study by including the Global ID, then + click the *Deaccession* button. + +**Release Studies** + +When you release a study, you make it available to the public. Users can +browse it or search for it from the dataverse or Network homepage. + +You receive e-mail notification when a Contributor submits a study for +review. You must review each study submitted to you and release that +study to the public. You receive a second e-mail notification after you +release a study. + +To release a study draft version: + +#. Review the study draft version by clicking the Global ID, or title, + to go to the Study Page, then click Release in the upper right + corner. For a quick release, click *Release* from the Manage Studies + page. +#. If the study draft version is an edit of an existing study, you will + see the Study Version Differences page. The table allows you to view + the changes compared to the current public version of the study. + Click the *Release* button to continue. +#. Add comments or a brief description in the Study Version Notes popup. + Then click the *Continue* button and your study is now public. + +Manage Study Templates +====================== + +You can set up study templates for a dataverse to prepopulate any of +the Cataloging Information fields of a new study with default values. +When a user adds a new study, that user can select a template to fill in +the defaults. + + +**Create Template** + +Study templates help to reduce the work needed to add a study, and to +apply consistency to studies within a dataverse. For example, you can +create a template to include the Distributor and Contact details so that +every study has the same values for that metadata. + +To create a new study template: + +#. Click Clone on any Template. +#. You see the Study Template page. +#. In the Template Name field, enter a descriptive name for this + template. +#. Enter generic information in any of the Cataloging Information + metadata fields.  You may also change the input level of any field to + make a certain field required, recommended, optional or hidden. +  Hidden fields will not be visible to the user creating studies from + the template. +#. After you complete entry of generic details in the fields that you + choose to prepopulate for new studies, click Save to create the + template. + +Note: You also can create a template directly from the study page to +use that study's Cataloging Information in the template. + + +**Enable a template** + +Click the Enabled link for the given template. Enabled templates are +available to end users for creating studies. + + +**Edit Template** + +To edit an existing study template: + +#. In the list of templates, click the Edit link for the template that + you choose to edit. +#. You see the Study Template page, with the template setup that you + selected. +#. Edit the template fields that you choose to change, add, or remove. + +Note: You cannot edit any Network Level Template. + + +**Make a Template the Default** + +To set any study template as the default template that applies +automatically to new studies: +In the list of templates, click the Make Default link next to the name +of the template that you choose to set as the default. +| The Current Default Template label is displayed next to the name of +the template that you set as the default. + +| **Remove Template** +| To delete a study template from a dataverse: + +#. In the list of templates, click the Delete link for the template that + you choose to remove from the dataverse. +#. You see the Delete Template page. +#. Click Delete to remove the template from the dataverse. + +Note:  You cannot delete any network template, default template or +template in use by any study. + +Data Uploads +================ + +**Troubleshooting Data Uploads:** + +Though the add files page works for the majority of our users, there can +be situations where uploading files does not work. Below are some +troubleshooting tips, including situations where uploading a file might +fail and things to try. + + +**Situations where uploading a file might fail:** + +#. File is too large, larger than the maximum size, should fail immediately with an error. +#. File takes too long and connection times out (currently this seems to happen after 5 mins) Failure behavior is vague, depends + on browser. This is probably an IceFaces issue. +#. User is going through a web proxy or firewall that is not passing through partial submit headers. There is specific failure + behavior here that can be checked and it would also affect other web site functionality such as create account link. See + redmine ticket `#2352 `__. +#. AddFilesPage times out, user begins adding files and just sits there idle for a long while until the page times out, should + see the red circle slash. +#. For subsettable files, there is something wrong with the file + itself and so is not ingested. In these cases they should upload as other and we can test here. +#. For subsettable files, there is something wrong with our ingest code that can't process something about that particular file, + format, version. +#. There is a browser specific issue that is either a bug in our + software that hasn't been discovered or it is something unique to their browser such as security settings or a conflict with a + browser plugin like developer tools. Trying a different browser such as Firefox or Chrome would be a good step. +#. There is a computer or network specific issue that we can't determine such as a firewall, proxy, NAT, upload versus download + speed, etc. Trying a different computer at a different location might be a good step. +#. They are uploading a really large subsettable file or many files and it is taking a really long time to upload. +#. There is something wrong with our server such as it not responding. +#. Using IE 8, if you add 2 text or pdf files in a row it won't upload but if you add singly or also add a subsettable file they + all work. Known issue, reported previously, `#2367 `__ + + +**So, general information that would be good to get and things to try would be:** + +#. Have you ever been able to upload a file? +#. Does a small text file work? +#. Which browser and operating system are you using? Can you try Firefox or Chrome? +#. Does the problem affect some files or all files? If some files, do they work one at a time? Are they all the same type such as + Stata or SPSS? Which version? Can they be saved as a supported version, e.g. Stata 12 or SPSS 20? Upload them as type "other" + and we'll test here. +#. Can you try a different computer at a different location? +#. Last, we'll try uploading it for you (may need DropBox to facilitate upload). + +.. _manage-collections: + +Manage Collections +=================== + +Collections can contain studies from your own dataverse or another, +public dataverse in the Network. + + +**Create Collection** + +You can create new collections in your dataverse, but any new collection +is a child of the root collection except for Collection Links. When you +create a child in the root collection, you also can create a child +within that child to make a nested organization of collections. The root +collection remains the top-level parent to all collections that are not +linked from another dataverse. + +There are three ways in which you can create a collection: + +- Static collection - You assign specific studies to this type of + collection. +- Dynamic collection - You can create a query that gathers studies into + a collection based on matching criteria, and keep the contents + current. If a study matches the query selection criteria one week, + then is changed and no longer matches the criteria, that study is + only a member of the collection as long as it's criteria matches the + query. +- Linked collection - You can link an existing collection from another + dataverse to your dataverse homepage. Note that the contents of that + collection can be edited only in the originating dataverse. + +**Create Static Collection by Assigning Studies** + +To create a collection by assigning studies directly to it: + +#. Locate the root collection to create a direct subcollection in the + root, or locate any other existing collection in which you choose + create a new collection. Then, click the *Create* link in the Create + Child field for that collection. + + You see the Study Collection page. +#. In the Type field, click the Static option. +#. Enter your collection Name. +#. Select the Parent in which you choose to create the collection. + The default is the collection in which you started on the *Manage + Collections* page. You cannot create a collection in another + dataverse unless you have permission to do so. +#. Populate the Selected Studies box: + + - Click the *Browse* link to use the Dataverse and Collection + pull-down lists to create a list of studies. + - Click the *Search* link to select a query field and search for + specific studies, enter a term to search for in that query field, + and then click Search. + + A list of available studies is displayed in the Studies to Choose + from box. + +#. In the Studies to Choose from box, click a study to assign it to your + collection. + + + You see the study you clicked in the Selected Studies box. +#. To remove studies from the list of Selected Studies, click the study + in that box. + + The study is remove from the Selected Studies box. +#. If needed, repopulate the Studies to Choose from box with new + studies, and add additional studies to the Studies Selected list. + +**Create Linked Collection** + +You can create a collection as a link to one or more collections from +other dataverses, thereby defining your own collections for users to +browse in your dataverse. + +Note: A collection created as a link to a collection from another +dataverse is editable only in the originating dataverse. Also, +collections created by use of this option might not adhere to the +policies for adding Cataloging Information and study files that you +require in your own dataverse. + +To create a collection as a link to another collection: + +#. In the Linked Collections field, click Add Collection Link. + + You see the Add Collection Link window. +#. Use the Dataverse pull-down list to select the dataverse from which + you choose to link a collection. +#. Use the Collection pull-down list to select a collection from your + selected dataverse to add a link to that collection in your + dataverse. + + The collection you select will be displayed in your dataverse + homepage, and will be included in your dataverse searches. + +**Create Dynamic Collection as a Query** + +When you create a collection by assigning the results of a query to it, +that collection is dynamic and is updated regularly based on the query +results. + +To create a collection by assigning the results of a query: + +#. Locate the root collection to create a direct subcollection in the + root, or locate any other existing collection in which you choose + create a new collection. Then, click the *Create* link in the Create + Child field for that collection. + + You see the Study Collection page. +#. In the Type field, click the Dynamic option. +#. Enter your collection Name. +#. Select the Parent in which you choose to create the collection. + + The default is the collection in which you started on the *Manage Collections* page. You cannot create a collection in another + dataverse unless you have permission to do so. +#. Enter a Description of this collection. +#. In the Enter query field, enter the study field terms for which to + search to assign studies with those terms to this collection. + Use the following guidelines: + + - Almost all study fields can be used to build a collection query. + + The study fields must be entered in the appropriate format to + search the fields' contents. + - Use the following format for your query: + ``title:Elections AND keywordValue:world``. + + For more information on query syntax, refer to the + `Documentation `__ page at + the Lucene website and look for *Query Syntax*. See the + `cataloging fields `__ + document for field query names. + - For each study in a dataverse, the Study Global Id field in the + Cataloging Information consists of three query terms: + ``protocol``, ``authority``, and ``globalID``. + + If you build a query using ``protocol``, your collection can + return any study that uses the ``protocol`` you specified. + + If you build a query using all three terms, you collection + returns only one study. + +#. To limit this collection to search for results in your own dataverse, + click the *Only your dataverse* check box. + +**Edit Collections** + +#. Click a collection title to edit the contents or setup of that + collection. + + You see the Collection page, with the current collection settings + applied. +#. Change, add, or delete any settings that you choose, and then click + Save Collection to save your edits. + +**Delete Collections or Remove Links** + +To delete existing static or dynamic collections: + +#. For the collection that you choose to delete, click the Delete link. +#. Confirm the delete action to remove the collection from your + dataverse. + +To remove existing linked collections: + +#. For the linked collection that you choose to remove, click the + *Remove* link. (Note: There is no confirmation for a Remove action. + When you click the Remove link, the Dataverse Network removes the linked collection immediately.) + +Managing User File Access +========================== + +User file access is managed through a set of access permissions that +together determines whether or not a user can access a particular file, +study, or dataverse. Generally speaking, there are three places where +access permissions can be configured: at the dataverse level, at the +study level, and at the file level. Think of each of these as a security +perimeter or lock with dataverse being the outer most perimeter, study +the next, and finally the file level. When configuring user file access, +it might be helpful to approach this from the dataverse access level +first and so on. + +For example, a user would like access to a particular file. Since files +belong to studies and studies belong to dataverses, first determine +whether the user has access to the dataverse. If the dataverse is +released, all users have access to it. If it is unreleased, the user +must appear in the User Permissions section on the dataverse permissions +page. + +Next, they would need access to the study. If the study is public, then +everyone has access. If it is restricted, the user must appear in the +User Restricted Study Settings section on the study permissions page. + +Last, they would need access to the file. If the file is public, +everyone has access. If the file is restricted, then the user must be +granted access. + +**There are two ways a file can be restricted.** + +First, on the dataverse permissions page, all files in the dataverse +could be restricted using Restrict ALL files in this Dataverse. To +enable user access in this case, add the username to the Restricted File +User Access section on this page. + +Second, individual files can be restricted at the study level on the +study permissions page in the "Files" subtab. These can be restricted on a file-by-file basis. +If this is the case, the file(s) will be displayed +as restricted in the Individual File Permission Settings section. To +enable user access to a particular file in this case, check the file to +grant access to, type the username in the Restricted File User Access +section, click update so their name appears next to the file, then click +save. + +Another option at the study level when restricting files is to allow users the ability to +request access to restricted files. This can be done in the study Permissions page in the "Files" subtab where +you must first select the files you want to restrict, click on "update permissions" to restrict, and then under +"File Permission Settings" check off the box to "Allow users to request access..." and click on Save at the bottom +of the page. The contact(s) set for the Dataverse (``Dataverse Options > Settings > General``) will get an email +notification each time a user sends a request. The request access email will displays a list of the file(s) +requested and a DOI or Handle for the study. To approve or deny access to these file(s) go back to the study +permissions page under the "Files" subtab and Approve or Deny the specific files that were requested. If you +choose to deny any files you will have the option to add a reason why. Be sure to remember to click on the "update" +button and then select Save so that your selections are saved and an email is sent to the requestor granting or +denying them access. The email then sent to the requestor will list out which files were approved with a DOI or +Handle URL, and any files which were denied along with any reasons that may have been provided. + +Finally, a somewhat unusual configuration could exist where both +Restrict all files in a dataverse is set and an individual file is +restricted. In this case access would need to be granted in both places +-think of it as two locks. This last situation is an artifact of +integrating these two features and will be simplified in a future +release. + +Network Administration ++++++++++++++++++++++++ + +The Dataverse Network provides several options for configuring and +customizing your application. To access these options, login to the +Dataverse Network application with an account that has Network +Administrator privileges. By default, a brand new installation of the +application will include an account of this type - the username and +password is 'networkAdmin'. + +After you login, the Dataverse Network home page links to the Options +page from the "Options" gear icon, in the menu bar. Click on the icon to +view all the options available for customizing and configuring the +applications, as well as some network adminstrator utilities. + +The following tasks can be performed from the Options page: + +- Manage dataverses, harvesting, exporting, and OAI sets - Create, + edit, and manage standard and harvesting dataverses, manage + harvesting schedules, set study export schedules, and manage OAI + harvesting sets. +- Manage subnetworks - Create, edit, and manage subnetworks, manage network and subnetwork level study templates. +- Customize the Network pages and description - Brand your Network and + set up your Network e-mail contact. +- Set and edit Terms of Use - Apply Terms of Use at the Network level + for accounts, uploads, and downloads. +- Create and manage user accounts and groups and Network privileges, + and enable option to create a dataverse - Manage logins, permissions, + and affiliate access to the Network. +- Use utilities and view software information - Use the administrative + utilities and track the current Network installation. + +Dataverses Section +==================== + +Create a New Dataverse +------------------------- + +A dataverse is a container for studies and is the home for an individual +scholar's or organization's data. + +Creating a dataverse is easy but first you must be a registered user. +Depending on site policy, there may be a link on the Network home page, +entitled "Create a Dataverse". This first walks you through creating an +account, then a dataverse. If this is not the case on your site, log in, +then navigate to the Create a New Dataverse page and complete the +required information. That's it! + +#. Navigate to the Create a New Dataverse page: + Network home page > Options page >Dataverses tab > Dataverse subtab > "Create Dataverse" link. +#. Fill in the required information: + + + **Type of Dataverse** + + + Choose Scholar if it represents an individual's work otherwise choose Basic. + + + **Dataverse Name** + + + This will be displayed on the network and dataverse home + pages. If this is a Scholar dataverse it will automatically be + filled in with the scholar's first and last name. + + + **Dataverse Alias** + + + This is an abbreviation, usually lower-case, that becomes part of the URL for the new dataverse. +#. Click Save and you're done! + + An email will be sent to you with more information, including + the url to access you new dataverse. + +**Required information** can vary depending on site policy. Required fields are noted with a red asterisk. + +Note: If "Allow users to create a new Dataverse when they create an account" is enabled, there is a Create a Dataverse link on the Network home page. + +Manage Dataverses +-------------------- + +As dataverses increase in number it's useful to view summary information +in table form and quickly locate a dataverse of interest. The Manage +Dataverse table does just that. + +Navigate to Network home page > Options page > Dataverses tab > +Dataverses subtab > Manage Dataverse table: + +- Dataverses are listed in order of most recently created. +- Clicking on a column name sorts the list by that column such as Name + or Affiliation. +- Clicking on a letter in the alpha selector displays only those + dataverses beginning with that letter. +- Move through the list of dataverses by clicking a page number or the + forward and back buttons. +- Click Delete to remove a dataverse. + +Subnetwork Section +====================== + +A subnetwork is a container for a group of dataverses. Users will be able to create their dataverses in a particular subnetwork. It may include its own branding and its own custom study templates. + +Create a New Subnetwork +------------------------ + +You must be a network admin in order to create a subnetwork. These are the steps to create a subnetwork: + +#. Navigate to Create a New Subnetwork Page: + Network home page > Options page > Subnetworks tab> Create Subnetwork Link + +#. Fill in required information: + + **Subnetwork Name** + + The name to be displayed in the menubar. Please use a short name. + + **Subnetwork Alias** + + Short name used to build the URL for this Subnetwork. It is case sensitive. + + **Subnetwork Short Description** + + This short description is displayed on the Network Home page + +#. Fill in Optional Branding + These fields include a logo file, Subnetwork affiliation, description, and custom banner and footer. + +#. Click Save and you’re done! + + +Manage Subnetworks +-------------------- + +The Manage Subnetworks page gives summary information about all of the subnetworks in your installation. + +Navigate to Network home page > Options Page > Subnetworks tab: + +- Subnetworks are listed alphabetically +- Clicking on a column name sorts the list by that column +- Click Edit to edit the subnetwork’s information or branding +- Click Delete to remove a subnetwork. Note: this will not remove the dataverses assigned to the subnetwork. The dataverses will remain and may be reassigned to another subnetwork. + + +Manage Classifications +------------------------ + +Classifications are a way to organize dataverses on the network home +page so they are more easily located. They appear on the left side of +the page and clicking on a classification causes corresponding +dataverses to be displayed. An example classification might be +Organization, Government. + +Classifications typically form a hierarchy defined by the network +administrator to be what makes sense for a particular site. A top level +classification could be Organization, the next level Association, +Business, Government, and School. + +The classification structure is first created on the Options page, from +the Manage Classifications table. Once a classification is created, +dataverses can be assigned to it either when the dataverse is first +created or later from the Options page: Network home page > (Your) +Dataverse home page > Options page > Settings tab > General subtab. + +To manage classifications, navigate to the Manage Classifications table: + +Network home page > Options page > Classifications tab > Manage +Classifications table + +From here you can view the current classification hierarchy, create a +classification, edit an existing classification including changing its +place in the hierarchy, and delete a classification. + +Manage Study Comments Notifications +--------------------------------------- + +Dataverse admins can enable or disable a User Comment feature within +their dataverses. If this feature is enabled, users are able to add +comments to studies within that dataverse. Part of the User Comment +feature is the ability for users to report comments as abuse if they +deem that comment to be inappropriate in some way. + +Note that it is a best practice to explicitly define terms of use +regarding comments when the User Comments feature is enabled. If you +define those terms at the Network level, then any study to which +comments are added include those terms. + +When a user reports another's comment as abuse, that comment is listed +on the Manage Study Comment Notifications table on the Options page. For +each comment reported as abuse, you see the study's Global ID, the +comment reported, the user who posted the comment, and the user who +reported the comment as abuse. + +There are two ways to manage abuse reports: In the Manage Study Comment +Notifications table on the Options page, and on the study page User +Comments tab. In both cases, you have the options to remove the comment +or to ignore the abuse report. + +The Manage Study Comments Notifications table can be found here: + +Network home page > Options page > Dataverses tab > Study Comments +subtab > Manage Study Comment Notifications table + +Manage Controlled Vocabulary +---------------------------------- + +You can set up controlled vocabulary for a dataverse network to give the +end user a set list of choices to select from for most fields in a study +template. Study fields which do not allow controlled vocabulary include +the study title and subtitle, certain date fields and geographic +boundaries. + +To **manage controlled vocabulary**, navigate to the Manage Controlled +Vocabulary table: + +``Network home page > Options page > Vocabulary tab > Manage Controlled Vocabulary table`` + + +**To create a new controlled vocabulary:** + +#. Click Create New Controlled Vocabulary. +#. You see the Edit Controlled Vocabulary page. +#. In the Name field, enter a descriptive name for this Controlled + Vocabulary. In the Description field enter any additional information + that will make it easier to identify a particular controlled + vocabulary item to assign to a given custom field. In the Values + field enter the controlled vocabulary values that you want to make + available to users for a study field. Here you can submit an entire list of terms at once. Use the "add" and "remove" buttons + to add or subtract values from the list. You may also copy and paste a list of values separated by carriage returns. +#. After you complete entry of values, click Save to create the + controlled vocabulary. + +**Edit Controlled Vocabulary** + + +To edit an existing controlled vocabulary: + +#. In the list of controlled vocabulary, click the Edit link for the + controlled vocabulary that you choose to edit. You see the Edit + Controlled Vocabulary page, with the controlled vocabulary setup that + you selected. +#. Edit the controlled vocabulary items that you choose to change, add, + or remove. You may also copy and paste a list of values separated by carriage returns. + +Manage Network Study Templates +------------------------------------- + +You can set up study templates for a dataverse network to prepopulate +any of the Cataloging Information fields of a new study with default +values. Dataverse administrators may clone a Network template and modify +it for users of that dataverse. You may also change the input level of +any field to make a certain field required, recommended, optional, +hidden or disabled. Hidden fields will not be available to the user, but +will be available to the dataverse administrator for update in cloned +templates. Disabled field will not be available to the dataverse +administrator for update. You may also add your own custom fields. When +a user adds a new study, that user can select a template to fill in the +defaults. + +To manage study templates, navigate to the Manage Study Templates table: + +``Network home page > Options page > Templates tab > Manage Study Templates table`` + + +**Create Template** + +Study templates help to reduce the work needed to add a study, and to +apply consistency to studies across a dataverse network. For example, +you can create a template to include the Distributor and Contact details +so that every study has the same values for that metadata. + +To create a new study template: + +#. Click Create New Network Template. +#. You see the Study Template page. +#. In the Template Name field, enter a descriptive name for this + template. +#. Enter generic information in any of the Cataloging Information + metadata fields. You can also add your own custom fields to the Data + Collection/Methodology section of the template. Each custom field + must be assigned a Name, Description and Field Type. You may also + apply controlled vocabulary to any of the custom fields that are set + to Plain Text Input as Field Type. +#. After you complete entry of generic details in the fields that you + choose to prepopulate for new studies, click Save to create the + template. + +**Enable a template** + +Click the Enabled link for the given template. Enabled templates are +available to database administrators for cloning and end users for +creating studies. + + +**Edit Template** + +To edit an existing study template: + +#. In the list of templates, click the Edit link for the template that + you choose to edit. +#. You see the Study Template page, with the template setup that you + selected. +#. Edit the template fields that you choose to change, add, or remove. + +**Make a Template the Default** + +To set any study template as the default template that applies +automatically to the creation of new network templates: + + +In the list of templates, click the Make Default Selection link next to the name +of the template that you choose to set as the default for a subnetwork(s). A pop-up window with the names of the subnetworks will appear and you may select the appropriate subnetworks. The subnetwork name(s) is displayed in the Default column of the template that you set as the +default for each given subnetwork. + +**Remove Template** + +To delete a study template from a dataverse: + +#. In the list of templates, click the Delete link for the template that + you choose to remove from the network. +#. You see the Delete Template page. +#. Click Delete to remove the template from the network. Note that you + cannot delete any template that is in use or is a default template at + the network or dataverse level. + +Harvesting Section +======================= + +Create a New Harvesting Dataverse +------------------------------ + +A harvesting dataverse allows studies from another site to be imported +so they appear to be local, though data files remain on the remote site. +This makes it possible to access content from data repositories and +other sites with interesting content as long as they support the OAI or +Nesstar protocols. + +Harvesting dataverses differ from ordinary dataverses in that study +content cannot be edited since it is provided by a remote source. Most +dataverse functions still apply including editing the dataverse name, +branding, and setting permissions. + +Aside from providing the usual name, alias, and affiliation information, +Creating a harvesting dataverse involves specifying the harvest +protocol, OAI or Nesstar, the remote server URL, possibly format and set +information, whether or how to register incoming studies, an optional +harvest schedule, and permissions settings. + +To create a harvesting dataverse navigate to the Create a New Harvesting +Dataverse page: + +``Network home page > Options page > Harvesting tab > Harvesting Dataverses subtab > "Create Harvesting Dataverse" link`` + +Complete the form by entering required information and click Save. + +An example dataverse to harvest studies native to the Harvard dataverse: + +- **Harvesting Type:** OAI Server +- **Dataverse Name:** Test IQSS Harvest +- **Dataverse Alias:** testiqss +- **Dataverse Affiliation:** Our Organization +- **Server URL:** `http://dvn.iq.harvard.edu/dvn/OAIHandler `__ +- **Harvesting Set:** No Set (harvest all) +- **Harvesting Format:** DDI +- **Handle Registration:** Do not register harvested studies (studies must already have a handle) + +Manage Harvesting +-------------------- + +Harvesting is a background process meaning once initiated, either +directly or via a timer, it conducts a transaction with a remote server +and exists without user intervention. Depending on site policy and +considering the update frequency of remote content this could happen +daily, weekly, or on-demand. How does one determine what happened? By +using the Manage Harvesting Dataverses table on the Options page. + +To manage harvesting dataverses, navigate to the **Manage Harvesting +Dataverses** table: + +``Network home page > Options page > Harvesting tab > Harvesting Dataverses subtab > Manage Harvesting Dataverses table`` + +The Manage Harvesting table displays all harvesting dataverses, their +schedules, and harvest results in table form. The name of each +harvesting dataverse is a link to that harvesting dataverse's +configuration page. The schedule, if configured, is displayed along with +a button to enable or disable the schedule. The last attempt and result +is displayed along with the last non-zero result. It is possible for the +harvest to check for updates and there are none. A Run Now button +provides on-demand harvesting and a Remove link deletes the harvesting +dataverse. + +Note: the first time a dataverse is harvested the entire catalog is +harvested. This may take some time to complete depending on size. +Subsequent harvests check for additions and changes or updates. + +Harvest failures can be investigated by examining the import and server +logs for the timeframe and dataverse in question. + +Schedule Study Exports +------------------------ + +Sharing studies programmatically or in batch such as by harvesting +requires information about the study or metadata to be exported in a +commonly understood format. As this is a background process requiring no +user intervention, it is common practice to schedule this to capture +updated information. + +Our export process generates DDI, Dublin Core, Marc, and FGDC formats +though DDI and Dublin Core are most commonly used. Be aware that +different formats contain different amounts of information with DDI +being most complete because it is our native format. + +To schedule study exports, navigate to the Harvesting Settings subtab: + +``Network home page > Options page > Harvesting tab > Settings subtab > Export Schedule`` + +First enable export then choose frequency: daily using hour of day or +weekly using day of week. Click Save and you are finished. + +To disable, just choose Disable export and Save. + +Manage OAI Harvesting Sets +----------------------------- + +By default, a client harvesting from the Dataverse Network that does not +specify a set would fetch all unrestricted, locally owned +studies - in other words public studies that were not harvested +from elsewhere. For various reasons it might be desirable to define sets +of studies for harvest such as by owner, or to include a set that was +harvested from elsewhere. This is accomplished using the Manage OAI +Harvesting Sets table on the Options page. + +The Manage OAI Harvesting Sets table lists all currently defined OAI +sets, their specifications, and edit, create, and delete functionality. + +To manage OAI harvesting sets, navigate to the Manage OAI Harvesting +Sets table: + +``Network home page > Options page > Harvesting tab > OAI Harvesting Sets subtab > Manage OAI Harvesting Sets table`` + +To create an OAI set, click Create OAI Harvesting Set, complete the +required fields and Save. The essential parameter that defines the set +is the Query Definition. This is a search query using `Lucene +syntax `__ +whose results populate the set. + +Once created, a set can later be edited by clicking on its name. + +To delete a set, click the appropriately named Delete Set link. + +To test the query results before creating an OAI set, a recommended +approach is to create a :ref:`dynamic study +collection ` using the +proposed query and view the collection contents. Both features use the +same `Lucene +syntax `__ +but a study collection provides a convenient way to confirm the results. + +Generally speaking, basic queries take the form of study metadata +field:value. Examples include: + +- ``globalId:"hdl 1902 1 10684" OR globalId:"hdl 1902 1 11155"``: Include studies with global ids hdl:1902.1/10684 and + hdl:1902.1/11155 +- ``authority:1902.2``: Include studies whose authority is 1902.2. Different authorities usually represent different sources such + as IQSS, ICPSR, etc. +- ``dvOwnerId:184``: Include all studies belonging to dataverse with database id 184 +- ``studyNoteType:"DATAPASS"``: Include all studies that were tagged with or include the text DATAPASS in their study note field. + +**Study Metadata Search Terms:** + +| title +| subtitle +| studyId +| otherId +| authorName +| authorAffiliation +| producerName +| productionDate +| fundingAgency +| distributorName +| distributorContact +| distributorContactAffiliation +| distributorContactEmail +| distributionDate +| depositor +| dateOfDeposit +| seriesName +| seriesInformation +| studyVersion +| relatedPublications +| relatedMaterial +| relatedStudy +| otherReferences +| keywordValue +| keywordVocabulary +| topicClassValue +| topicClassVocabulary +| abstractText +| abstractDate +| timePeriodCoveredStart +| timePeriodCoveredEnd +| dateOfCollection +| dateOfCollectionEnd +| country +| geographicCoverage +| geographicUnit +| unitOfAnalysis +| universe +| kindOfData +| timeMethod +| dataCollector +| frequencyOfDataCollection +| samplingProcedure +| deviationsFromSampleDesign +| collectionMode +| researchInstrument +| dataSources +| originOfSources +| characteristicOfSources +| accessToSources +| dataCollectionSituation +| actionsToMinimizeLoss +| controlOperations +| weighting +| cleaningOperations +| studyLevelErrorNotes +| responseRate +| samplingErrorEstimate +| otherDataAppraisal +| placeOfAccess +| originalArchive +| availabilityStatus +| collectionSize +| studyCompletion +| confidentialityDeclaration +| specialPermissions +| restrictions +| contact +| citationRequirements +| depositorRequirements +| conditions +| disclaimer +| studyNoteType +| studyNoteSubject +| studyNoteText + +.. _edit-lockss-harvest-settings: + +Edit LOCKSS Harvest Settings +----------------------------- + +**Summary:** + +`LOCKSS Project `__ or *Lots +of Copies Keeps Stuff Safe* is an international initiative based at +Stanford University Libraries that provides a way to inexpensively +collect and preserve copies of authorized e-content. It does so using an +open source, peer-to-peer, decentralized server infrastructure. In order +to make a LOCKSS server crawl, collect and preserve content from a Dataverse Network, +both the server (the LOCKSS daemon) and the client (the Dataverse Network) sides must +be properly configured. In simple terms, the LOCKSS server needs to be +pointed at the Dataverse Network, given its location and instructions on what to +crawl; the Dataverse Network needs to be configured to allow the LOCKSS daemon to +access the data. The section below describes the configuration tasks +that the Dataverse Network administrator will need to do on the client side. It does +not describe how LOCKSS works and what it does in general; it's a fairly +complex system, so please refer to the documentation on the `LOCKSS Project `__\ site for more +information. Some information intended to a LOCKSS server administrator +is available in the `"Using LOCKSS with Dataverse Network (DVN)" +`__ of the +`Dataverse Network Installers Guide `__ + (our primary sysadmin-level manual).  + +**Configuration Tasks:** + +Note that neither the standard LOCKSS Web Crawler, nor the OAI plugin +can properly harvest materials from a Dataverse Network.  A custom LOCKSS plugin +developed and maintained by the Dataverse Network project is available here: +`http://lockss.hmdc.harvard.edu/lockss/plugin/DVNOAIPlugin.jar `__. +For more information on the plugin, please see the `"Using LOCKSS with +Dataverse Network (DVN)" `__ section of +the Dataverse Network Installers Guide. In order for a LOCKSS daemon to collect DVN +content designated for preservation, an Archival Unit must be created +with the plugin above. On the Dataverse Network side, a Manifest must be created that +gives the LOCKSS daemon permission to collect the data. This is done by +completing the "LOCKSS Settings" section of the: +``Network Options -> Harvesting -> Settings tab.`` + +For the Dataverse Network, LOCKSS can be configured at the network level +for the entire site and also locally at the dataverse level. The network +level enables LOCKSS harvesting but more restrictive policies, including +disabling harvesting, can be configured by each dataverse. A dataverse +cannot enable LOCKSS harvesting if it has not first been enabled at the +network level. + +This "Edit LOCKSS Harvest Settings" section refers to the network level +LOCKSS configuration. + +To enable LOCKSS harvesting at the network level do the following: + +- Navigate to the LOCKSS Settings page: ``Network home page -> Network Options -> Harvesting -> Settings``. +- Fill in the harvest information including the level of harvesting allowed (Harvesting Type, Restricted Data Files), the scope + of harvest by choosing a predefined OAI set, then if necessary a list of servers or domains allowed to harvest. +- It's important to understand that when a LOCKSS daemon is authorized + to "crawl restricted files", this does not by itself grant the actual + access to the materials! This setting only specifies that the daemon + should not be skipping such restricted materials outright. (The idea + behind this is that in an archive with large amounts of + access-restricted materials, if only public materials are to be + preserved by LOCKSS, lots of crawling time can be saved by instructing + the daemon to skip non-public files, instead of having it try to access + them and get 403/Permission Denied). If it is indeed desired to have + non-public materials collected and preserved by LOCKSS, it is the + responsibility of the DVN Administrator to give the LOCKSS daemon + permission to access the files. As of DVN version 3.3, this can only be + done based on the IP address of the LOCKSS server (by creating an + IP-based user group with the appropriate permissions). +- Next select any licensing options or enter additional terms, and click "Save Changes".  +- Once LOCKSS harvesting has been enabled, the LOCKSS Manifest page will + be provided by the application. This manifest is read by LOCKSS servers + and constitutes agreement to the specified terms. The URL for the + network-level LOCKSS manifest is + ``http``\ ``:///dvn/faces/ManifestPage.xhtml`` (it will be + needed by the LOCKSS server administrator in order to configure an + *Archive Unit* for crawling and preserving the DVN). + +Settings Section +================== + +Edit Name +----------------- + +The name of your Dataverse Network installation is displayed at the top +of the Network homepage, and as a link at the top of each dataverse +homepage in your Network. + +To create or change the name of your Network, navigate to the Settings +tab on the Options page: + +``Network home page > Options page > Settings tab > General subtab > Network Name`` + +Enter a descriptive title for your Network. There are no naming +restrictions, but it appears in the heading of every dataverse in your +Network, so a short name works best. + +Click Save and you are done! + +Edit Layout Branding +------------------------- + +When you install a Network, there is no banner or footer on any page in +the Network. You can apply any style to the Network pages, such as that +used on your organization's website. You can use plain text, HTML, +JavaScript, and style tags to define your custom banner and footer. If +your website has such elements as a navigation menu or images, you can +add them to your Network pages. + +To customize the layout branding of your Network, navigate to the +Customization subtab on the Options page: + +Network home page > Options page > Settings tab > Customization subtab > +Edit Layout Branding + +Enter your banner and footer content in the Custom Banner and Custom +Footer fields and Save. + +See :ref:`Layout Branding Tips ` for guidelines. + +Edit Description +--------------------- + +By default your Network homepage has the following description: +``A description of your Dataverse Network or announcements may be added here. Use Network Options to edit or remove this text.`` +You can edit that text to describe or announce such things as new +Network features, new dataverses, or maintenance activities. You also +can disable the description to not appear on the homepage. + +To manage the Network description, navigate to: + +``Network home page > Options page > Settings tab > General subtab > Network Description`` + +Create a description by entering your desired content in the text box. +HTML, JavaScript, and style tags are permitted. The ``html`` and +``body`` element types are not allowed. Next enable the description +display by checking the Enable Description in Homepage checkbox. Click +Save and you're done. You can disable the display of the description but +keep the content by unchecking and saving. + +Edit Dataverse Requirements +---------------------------- + +Enforcing a minimum set of requirements can help ensure content +consistency. + +When you enable dataverse requirements, newly created dataverses cannot +be made public or released until the selected requirements are met. +Existing dataverses are not affected until they are edited. Edits to +existing dataverses cannot be saved until requirements are met. + +To manage the requirements, navigate to: + +``Network home page > Options page > Settings tab > Advanced subtab > Release Dataverse Requirements`` + +Available requirements include: + +- Require Network Homepage Dataverse Description +- Require Dataverse Affiliation +- Require Dataverse Classification +- Require Dataverse Studies included prior to release + +Manage E-Mail Notifications +--------------------------- + +The Dataverse Network sends notifications via email for a number of +events on the site, including workflow events such as creating a +dataverse, uploading files, releasing a study, etc. Many of these +notifications are sent to the user initiating the action as well as to +the network administrator. Additionally, the Report Issue link on the +network home page sends email to the network administrator. By default, +this email is sent to +`support@thedata.org `. + +To change this email address navigate to the Options page: + +``Network home page > Options page > Settings tab > General subtab > E-Mail Address(es)`` + +Enter the address of network administrators who should receive these +notifications and Save. + +Please note the Report Issue link when accessed within a dataverse gives +the option of sending notification to the network or dataverse +administrator. Configuring the dataverse administrator address is done +at the dataverse level: +``(Your) Dataverse home page > Options page > Settings tab > General subtab > E-Mail Address(es)`` + +Enable Twitter +--------------------- + +If your Dataverse Network has been configured for Automatic Tweeting, +you will see an option listed as "Enable Twitter." When you click this, +you will be redirected to Twitter to authorize the Dataverse Network +application to send tweets for you. + +To manage the Dataverse Twitter configuration, navigate to: + +``Dataverse home page > Options page > Settings tab > Promote Your Dataverse subtab > Sync Dataverse With Twitter`` + +Once authorized, tweets will be sent for each new dataverse that is +released. + +To disable Automatic Tweeting, go to the options page, and click +"Disable Twitter." + +Terms Section +================= + +Edit Terms for Account Creation +-------------------------------- + +You can set up Terms of Use that require users with new accounts to +accept your terms before logging in for the first time. + +To configure these terms navigate to the Options page: + +``Network home page > Options page > Permissions tab > Terms subtab > Account Term of Use`` + +Enter your required terms as you would like them to appear to users. +HTML, JavaScript, and style tags are permitted. The ``html`` and +``body`` element types are not allowed. Check Enable Terms of Use to +display these terms. Click Save and you are finished. To disable but +preserve your current terms, uncheck the Enable checkbox and save. + +Edit Terms for Study Creation +------------------------------- + +You can set up Terms of Use for the Network that require users to accept +your terms before they can create or modify studies, including adding +data files. These terms are defined at the network level so they apply +across all dataverses. Users will be presented with these terms the +first time they attempt to modify or create a study during each session. + +To configure these terms of use navigate to the Options page: + +``Network home page > Options page > Permissions tab > Terms subtab > Deposit Term of Use`` + +Enter your terms as you would like to display them to the user. HTML, +JavaScript, and style tags are permitted. The ``html`` and ``body`` +element types are not allowed. Check Enable Terms of Use and save. +Uncheck Enable Terms of Use and save to disable but preserve existing +terms of use. + +Edit Terms for File Download +----------------------------- + +You can set up Terms of Use for the Network that require users to accept +your terms before they can download or subset files from the Network. +Since this is defined at the network level it applies to all dataverses. +Users will be presented with these terms the first time they attempt to +download a file or access the subsetting and analysis page each session. + +To configure these terms, navigate to the Options page: + +``Network home page > Options page > Permissions tab > Terms subtab > Download Term of Use`` + +Enter the terms as you want them to appear to the user. HTML, +JavaScript, and style tags are permitted. The ``html`` and ``body`` +element types are not allowed. Check Enable Terms of Use and save. +Unchecking the checkbox and saving disables the display of the terms but +preserves the current content. + +Download Tracking Data +---------------------------- + +You can view any guestbook responses that have been made in all +dataverses. Beginning with version 3.2 of Dataverse Network, for any +dataverse where the guestbook is not enabled data will be collected +silently based on the logged in user or anonymously. The data displayed +includes user account data or the session id of an anonymous user, the +global ID, study title and filename of the file downloaded, the time of +the download, the type of download and any custom questions that have +been answered. The username/session ID and download type were not +collected in the 3.1 version of DVN. A comma separated values file of +all download tracking data may be downloaded by clicking the Export +Results button. + +To manage the Network download tracking data, navigate to: + +``Network home page > Options page > Permissions tab > Download Tracking Data subtab > Manage Download Tracking Data table`` + +Permissions and Users Section +============================== + +Manage Network Permissions +--------------------------------------- + +Permissions that are configured at the network level include: + +- Enabling users to create an account when they create a dataverse. +- Granting privileged roles to existing users including network + administrator and dataverse creator. +- Changing and revoking privileged roles of existing users. + +Enabling users to create an account when they create a dataverse +displays a "Create a Dataverse" link on the network home page. New and +unregistered users coming to the site can click on this link, create an +account and a dataverse in one workflow rather than taking two separate +steps involving the network administrator. + +Granting a user account network administrator status gives that user +full control over the application as managed through the UI. + +Granting a user account dataverse creator status is somewhat a legacy +function since any user who creates a dataverse has this role. + +To manage these permissions, navigate to the Manage Network Permissions +table on the Options page: + +``Network home page > Options page > Permissions tab > Permissions subtab > Manage Network Permissions table`` + +Enable account with dataverse creation by checking that option and +saving. + +Granting privileged status to a user requires entering a valid, existing +user name, clicking add, choosing the role, then saving changes. + +Roles by Version State Table +------------------------------ + ++---------------------+-----------+----------------+------------------+------------------+---------------------+ +| | **Role** | | | | | ++=====================+===========+================+==================+==================+=====================+ +| **Version State** | None | Contributor +, | Curator | Admin | Network Admin** | +| | | ++ | | | | ++---------------------+-----------+----------------+------------------+------------------+---------------------+ +| Draft | | E,E2,D3,S,V | E,E2,P,T,D3,R,V | E,E2,P,T,D3,R,V | E,E2,P,T,D3,D2,R,V | ++---------------------+-----------+----------------+---+--------------+------------------+---------------------+ +| In Review | | E,E2,D3,V | E,E2,P,T,D3,R,V | E,E2,P,T,D3,R,V | E,E2,P,T,D3,R,D2,V | ++---------------------+-----------+----------------+------------------+------------------+---------------------+ +| Released | V | E,V | E,P,T,D1,V | E,P,T,D1,V | E,P,T,D2,D1,V | ++---------------------+-----------+----------------+------------------+------------------+---------------------+ +| Archived | V | V | P,T,V | P,T,V | P,T,D2,V | ++---------------------+-----------+----------------+------------------+------------------+---------------------+ +| Deaccessioned | | | P,T,R2,V | P,T,R2,V | P,T,R2,D2,V | ++---------------------+-----------+----------------+------------------+------------------+---------------------+ + + +**Legend:** + +E = Edit (Cataloging info, File meta data, Add files) + +E2 = Edit Study Version Notes + +D1 = Deaccession + +P = Permission + +T = Create Template + +D2 = Destroy + +D3 = Delete Draft, Delete Review Version + +S = Submit for Review + +R = Release + +R2 = Restore + +V = View + +  + +**Notes:** + +*\Same as Curator + +**\Same as Curator + D2 + ++\ Contributor actions (E,D3,S,V) depend on new DV permission settings. A +contributor role can act on their own studies (default) or all studies +in a dv, and registered users can become contributors and act on their +own studies or all studies in a dv. + +++ A contributor is defined either as a contributor role or as any +registered user in a DV that allows all registered users to contribute. + +  + +Authorization to access Terms-protected files via the API +-------------------------------------------------------------------- + +As of DVN v. 3.2, a programmatic API has been provided for accessing DVN +materials. It supports Basic HTTP Auth where the client authenticates +itself as an existing DVN (or anonymous) user. Based on this, the API +determines whether the client has permission to access the requested +files or metadata. It is important to remember however, that in addition +to access permissions, DVN files may also be subject to "Terms of Use" +agreements. When access to such files is attempted through the Web +Download or Subsetting interfaces, the user is presented with an +agreement form. The API however is intended for automated clients, so +the remote party's compliance with the Terms of Use must be established +beforehand. **We advise you to have a written agreement with authorized +parties before allowing them to access data sets, bypassing the Terms of +Use. The authorized party should be responsible for enforcing the Terms +of Use to their end users.**\ Once such an agreement has been +established, you can grant the specified user unrestricted access to +Terms-protected materials on the Network home page > Options page > +PERMISSIONS tab > Permissions subtab, in the "Authorize Users to bypass +Terms of Use" section. + +Please consult the Data Sharing section of the Guide for additional +information on the :ref:`Data Sharing API `. + +Create Account +-------------------- + +There are several ways to create accounts: at the network level by the +network administrator, at the dataverse level by the dataverse +administrator, and by the new user themselves if the option to create an +account when creating a dataverse is enabled. + +Accounts created by all methods are equivalent with the exception of +granting dataverse creator status during the create a dataverse +workflow. That status can be granted afterwards by the network +administrator if necessary. + +To create an account at the **network admin level**, navigate to the Create +Account page from the Options page: + +``Network home page > Options page > Permissions tab > Users subtab > Create User link > Create Account page`` + +Complete the required information denoted by the red asterisk and save. +Note: an email address can also be used as a username. + + +Manage Users +------------------- + +The Manage Users table gives the network administrator a list of all +user accounts in table form. It lists username, full name, roles +including at which dataverse the role is granted, and the current status +whether active or deactivated. + +Usernames are listed alphabetically and clicking on a username takes you +to the account page that contains detailed information on that account. +It also provides the ability to update personal details and change +passwords. + +The Manage Users table also provides the ability to deactivate a user +account. + +To view the Manage Users table navigate to the Options page: + +``Network home page > Options page > Permissions tab > Users subtab > Manage Users table`` + +Manage Groups +-------------------- + +Groups in the Dataverse Network are a way to identify collections of +users so permissions can be applied collectively rather than +individually. This allows controlling permissions for individuals by +altering membership in the group without affecting permissions of other +members. Groups can be defined by user names or IP addresses. + +The Manage Groups table lists information about existing groups in table +form including name, display or friendly name, and group membership. + +Clicking on the name takes you to the Edit Group page where the group's +configuration can be changed. It is also possible to create and delete +groups from the Manage Groups table. + +To view the Manage Groups table, navigate to the Options page: + +``Network home page > Options page > Permissions tab > Groups subtab > +Manage Groups table`` + +Once on the Groups subtab, viewing the Manage Groups table, you can +create or delete a group. + +When creating a group you must choose whether to identify users by +username or by IP address with a Username Group or IP User Group. + +With a Username Group, enter an existing username into the edit box, +click the "+" symbol to enter additional users, then save. + +With an IP User Group, enter an IP address or domain name into the edit +box. Wildcards can be used by specifying an asterisk (\*) in place of an +IP address octet (eg. 10.20.30.\*), or for the sub-domain or host +portion of the domain name (eg. \*.mydomain.edu). + +Last, an optional special feature of the IP User Group is to allow for +an Affiliate Login Service. Effectively this allows for the use of a +proxy to access the Dataverse Network on behalf of a group such as a +University Library where identification and authorization of users is +managed by their proxy service. To enable this feature, enter IP +addresses of any proxy servers that will access Dataverse Network, check +This IP group has an affiliate login service, enter the Affiliate Name +as it will appear on the Dataverse Network Login page, and the Affiliate +URL which would go to the proxy server. Save and you are finished. + +Utilities +=========== + +The Dataverse Network provides the network administrator with tools to +manually execute background processes, perform functions in batch, and +resolve occasional operational issues. + +Navigate to the Utilities from the Options page: + +``Network home page > Options page > Utilities tab`` + +Available tools include: + +- **Study Utilities** - Create draft versions of studies, release file locks and delete multiple studies by inputting ID's. +- **Index Utilities** - Create a search index.  +- **Export Utilities** - Select files and export them.  +- **Harvest Utilities** - Harvest selected studies from another Network.  +- **File Utilities** - Select files and apply the JHOVE file validation process to them.  +- **Import Utilities** - Import multiple study files by using this custom batch process. +- **Handle Utilities** - Register and re-register study handles. + +**Study Utilities** + +Curating a large group of studies sometimes requires direct database +changes affecting a large number of studies that may belong to different +dataverses. An example might be changing the distributor name and logo +or the parent dataverse. Since the Dataverse Network employs study +versioning, it was decided that any such backend changes should +increment the affected studies' version. However, incrementing a study's +version is nontrivial as a database update. So, this utility to create a +draft of an existing study was created. + +The practice would involve generating a list of study database ID's that +need changing, use the utility to create drafts of those studies, then +run the database update scripts. The result is new, unreleased draft +versions of studies with modifications made directly through the +database. These studies would then need to be reviewed and released +manually. + +Due to the transactional nature of study updates, particularly when +uploading large files, it is possible a study update is interrupted such +as during a system restart. When this occurs, the study lock, created to +prevent simultaneous updates while one is already in progress, remains +and the study cannot be edited until it is cleared. + +Checking for this condition and clearing it is easy. Open this utility, +check if any locks are listed and remove them. The user should once +again be able to edit their study. + +The user interface provides a convenient way to delete individual +studies but when faced with deleting a large number of studies that do +not conveniently belong to a single dataverse, use the Delete utility. + +Specify studies by their database id single, as a comma-separated list +(1,7,200, etc.), or as a hyphen-separated range (1-1000, 2005, +2500-2700). + +**Index Utilities** + +Indexing is the process of making study metadata searchable. The Lucence +search engine used by the Dataverse Network uses file-based indexes. +Normally, any time a study or new study version is released the study +information is automatically indexed. Harvesting also indexes studies in +small batches as they are harvested. Sometimes this does not occur, such +as when the harvest process is interrupted. The index could also become +corrupt for some reason though this would be extremely rare. + +The index utility allows for reindexing of studies, dataverses, and the +entire site. Studies and dataverses can be specified by their database +id's alone, in a comma separated list, or in a hyphenated range: 1-1000. +Use index all sparingly, particularly if you have a large site. This is +a single transaction and should not be interrupted or you will need to +start again. A more flexible approach is to determine the lowest and +highest study ID's and index in smaller ranges: 1-1000, 1001-2000, etc. + +Note: if for some reason a study change was not indexed, there is an +automatic background process that will detect this, inform the +administrator and will be reindexed once every 24 hours so manually +reindexing is not required. + +**Export Utilities** + +Export is a background process that normally runs once every 24 hours. +Its purpose is to produce study metadata files in well known formats +such as DDI, DC, MIF, and FGDC that can be used to import studies to +other systems such as through harvesting. + +Sometimes it's useful to manually export a study, dataverse, any updated +studies, or all studies. Studies and dataverses are specified by +database id rather than global id or handle. + +Export is tied to OAI set creation and Harvesting. To enable harvesting +of a subset of studies by another site, first an OAI set is created that +defines the group of studies. Next, the scheduled export runs and +creates the export files if they're not already available. It also +associates those studies defined by the set with the set name so future +requests for the set receive updates — additions or deletions from the +set. This way remote sites harvesting the set maintain an updated study +list. + +If you do not want to wait 24 hours to test harvest a newly created set, +use the export utility. Click "Run Export" to export any changed studies +and associate studies to the set. Exporting studies or dataverses alone +will not associate studies to a set, in those cases Update Harvest +Studies must also be run. + +**Harvest Utilities** + +The Harvest utility allows for on-demand harvesting of a single study. +First select one of the predefined harvesting dataverses which provide +remote server connection information as well as the local dataverse +where the study will be harvested to. Specify the harvest ID of the +study to be harvested. The harvest id is particular to the study and +server being harvested from. It can be obtained from the OAI protocol +ListIdentifiers command, from the harvest log if previously harvested, +or if from another DVN it takes the form: //. +A Dataverse Network study with ``globalID: hdl:1902.1/10004``, from the OAI +set "My Set", having alias "myset", would have a harvest identifier of: +``myset//hdl:1902.1/10004`` + +**File Utilities** + +The Dataverse Network attempts to identify file types on upload to +provide more information to an end user. It does this by calling a file +type identification library called JHOVE. Though JHOVE is a very +comprehensive library, sometimes a file type may not be recognized or is +similar to another type and misidentified. For these cases we provide an +override mechanism — a list of file extensions and a brief text +description. Since these are created after the files have been uploaded, +this file utility provides a way to re-identify the file types and +furthermore limits this process to specific file types or to studies, +specified by database ID singly, as a comma separated, or as a +hype-separated range. + +**Import Utilities** + +Importing studies usually is done by harvesting study metadata from a +remote site via the OAI protocol. This causes study metadata to be +hosted locally but files are served by the remote server. The Import +utility is provided for cases where an OAI server is unavailable or +where the intent is to relocate studies and their files to the Dataverse +Network. + +At present this requires the help of the network administrator and can +be manually intensive. First, study metadata may need to be modified +slightly then saved in a specific directory structure on the server file +system. Next, the study metadata import format and destination dataverse +is chosen. Last, the top level directory where the study metadata and +files are stored and "Batch Import" is clicked. Because the DDI input +format can be quite complex and usage varies, verify the results are +what's intended. + +A single study import function is also provided as a test for importing +your study's metadata syntax but is not meant for actual import. It will +not import associated files. + +Before performing a batch import, you must organize your files in the +following manner: + +#. If you plan to import multiple files or studies, create a master + directory to hold all content that you choose to import. +#. Create a separate subdirectory for each study that you choose to + import. + The directory name is not important. +#. In each directory, place a file called ``study.xml`` and use that + file to hold the XML-formatted record for one study. + Note: Do not include file description elements in + the ``study.xml`` file. Including those fields results in the + addition of multiple blank files to that study. +#. Also place in the directory any additional files that you choose to + upload for that study. + +For an example of a simple study DDI, refer to the :ref:`Metadata References ` +section. + +**Handle Utilities** + +When a study is created, the global ID is first assigned, then +registered with handle.net as a persistent identifier. This identifier +becomes part of the study's citation and is guaranteed to always resolve +to the study. For the study with global ID, hdl:1902.1/16598 or handle +1902.1/16596, the URL in the citation would be: +`http://hdl.handle.net/1902.1/16598 `__. + +If for any reason a study is created and not registered or is registered +in a way that needs to be changed, use the Handle utility to either +register currently unregistered studies or to re-register all registered +studies. + +Web Statistics +=============== + +The Dataverse Network provides the capability to compile and analyze +site usage through Google Analytics. A small amount of code is embedded +in each page so when enabled, any page access along with associated +browser and user information is recorded by Google. Later analysis of +this compiled access data can be performed using the `Google Analytics `__ utility. + +Note: Access to Google Analytics is optional. If access to this utility +is not configured for your network, in place of the Manage Web Usage +menu option is a message +stating: ``Google Analytics are not configured for this Network.`` + +**To enable Google Analytics:** + +#. Create a Gmail account. +#. Go to `Google Analytics `__ and create a profile for the server or website domain. You will + be assigned a Web Property ID. +#. Using the Glassfish Admin console, add a JVM option and assign it the value of the newly assigned Web Property ID: + ``Ddvn.googleanalytics.key=`` +#. Restart Glassfish. +#. It takes about 24 hours after installation and set up of this option for tracking data to become available for use. + +Note: Google provides the code necessary for tracking. This has already +been embedded into the Dataverse Network but not the Web Property ID. +That is configured as a JVM option by the network admin when enabling +this feature. + +**To view Web Statistics, navigate to:** + +- Network home page > Options page > Settings tab > General subtab > Web Statistics +- You will be redirected to `Google Analytics `__. Log in using your Gmail account used to + create the profile. + + +Appendix +++++++++ + +Additional documentation complementary to Users Guides. + +Control Card-Based Data Ingest +===================== + +As of version 2.2 the DVN supports ingesting plain text data files, in +addition to SPSS and STATA formats. This allows users and institutions +to ingest raw data into Dataverse Networks without having to purchase +and maintain proprietary, commercial software packages. + +Tab-delimited and CSV files are supported. In order to ingest a plain +data file, an additional file containing the variable metadata needs to +be supplied. + +**Two Metadata Types Are Supported** + +#. A simplified format based on the classic SPSS control card syntax; + this appears as "CSV/SPSS" in the menu on the Add Files page. +#. DDI, an xml format from the Data Documentation Inititative + consortium. Choose "TAB/DDI" to ingest a tab file with a DDI metadata sheet. + +The specifics of the formats are documented in the 2 sections below. + + + +.. _controlcard-datafile-ingest: + +CSV Data, SPSS-style Control Card +------------------------------ + +Unlike other supported “subsettable” formats, this ingest mechanism +requires 2 files: the CSV raw data file proper and an SPSS Setup file +("control card") with the data set metadata. In the future, support for +other data definition formats may be added (STATA, SAS, etc.). As +always, user feedback is welcome. + +**The supported SPSS command syntax:** + +Please note that it is not our goal to attempt to support any set of +arbitrary SPSS commands and/or syntax variations. The goal is to enable +users who do not own proprietary statistical software to prepare their +raw data for DVN ingest, using a select subset of SPSS data definitional +syntax. + +(In addition to its simplicity and popularity, we chose to use the SPSS +command syntax because Dataverse Network already has support for the SPSS ``.SAV`` and ``.POR`` formats, so we have a good working knowledge of the SPSS formatting +conventions.) + +The following SPSS commands are supported: + +| ``DATA LIST `` +| ``VARIABLE LABELS `` +| ``NUMBER OF CASES`` +| ``VALUE LABELS`` +| ``FORMATS`` (actually, not supported as of now -- see below) +| ``MISSING VALUES`` + +We support mixed cases and all the abbreviations of the above commands +that are valid under SPSS. For example, both "var labels" and "Var Lab" +are acceptable commands. + +Individual command syntax. + +**1. DATA LIST** + +An explicit delimiter definition is required. For example: + +``DATA LIST LIST(',')`` + +specifies ``','`` as the delimiter. This line is followed by the ``'/'`` +separator and variable definitions. Explicit type definitions are +required. Each variable is defined by a name/value pair ``VARNAME`` + +``(VARTYPE)`` where ``VARTYPE`` is a standard SPSS fortran-type +definition. + +**Note** that this is the only **required** section. The minimum +amount of metadata required to ingest a raw data file is the delimiter +character, the names of the variables and their data type. All of these +are defined in the ``DATA LIST`` section. Here’s an example of a +complete, valid control card: + +``DATA LIST LIST(’,’)`` +``CASEID (f) NAME (A) RATIO (f)`` +``.`` + +It defines a comma-separated file with 3 variables named ``CASEID``, +``NAME`` and ``RATIO``, two of them of the types numeric and one character +string. + +Examples of valid type definitions: + +| **A8** 8 byte character string; +| **A** character string; +| **f10.2** numeric value, 10 decimal digits, with 2 fractional digits; +| **f8** defaults to F8.0 +| **F** defaults to F.0, i.e., numeric integer value +| **2** defaults to F.2, i.e., numeric float value with 2 fractional digits. + +The following SPSS date/time types are supported: + +type                            format + +``DATE``                       ``yyyy-MM-dd`` + +``DATETIME``                ``yyyy-MM-dd HH:mm:ss`` + +The variable definition pairs may be separated by any combination of +white space characters and newlines. **Wrapped-around lines must start +with white spaces** (i.e., newlines must be followed by spaces). The +list must be terminated by a line containing a single dot. + +Please note, that the actual date values should be stored in the CSV +file as strings, in the format above. As opposed to how SPSS stores the +types of the same name (as integer numbers of seconds). + +**2. VARIABLE LABELS** + +Simple name/value pairs, separated by any combination of white space +characters and newlines (as described in section 1 above). The list is +terminated by a single dot. + +For example: + +| ``VARIABLE LABELS`` +| ``CELLS "Subgroups for sample-see documentation"`` +| ``STRATA "Cell aggregates for sample”`` +| ``.`` + +**3. NUMBER OF CASES (optional)** + +The number of cases may be explicitly specified. For example: + +``num of cases 1000`` + +When the number of cases is specified, it will be checked against the +number of observations actually found in the CSV file, and a mismatch +would result in an ingest error. + +**4. VALUE LABELS** + +Each value label section is a variable name followed by a list of +value/label pairs, terminated by a single "/" character. The list of +value label sections is terminated by a single dot. + +For example, + +| ``VALUE labels`` +| ``FOO 0 "NADA"`` +| ``1 "NOT MUCH"`` +| ``99999999 "A LOT"`` +| ``/`` +| ``BAR 97 "REFUSAL"`` +| ``98 "DONT KNOW"`` +| ``99 "MISSING"`` +| ``/`` +| ``.`` + +**5. FORMATS** + +This command is actually redundant if you explicitly supply the variable +formats in the ``DATA LIST`` section above. + +**NOTE:** It appears that the only reason the``FORMATS`` command exists is +that ``DATA LIST`` syntax does not support explicit fortran-style format +definitions when fixed-field data is defined. So it is in fact redundant +when we're dealing with delimited files only. + +Please supply valid, fortran-style variable formats in the ``DATA +LIST`` section, as described above. + +**6. MISSING VALUES** + +This is a space/newline-separate list of variable names followed by a +comma-separated list of missing values definition, in parentheses. For +example:  + +| ``INTVU4 (97, 98, 99)`` +| The list is terminated with a single dot. + +An example of a valid ``MISSING VALUES`` control card section: + +| ``MISSING VALUES`` +| ``INTVU4 (97, 98, 99)`` +| ``INTVU4A ('97', '98', '99')`` +| ``.`` + +| **An example of a control card ready for ingest:** + +.. code-block:: guess + + data list list(',') / + CELLS (2) STRATA (2) WT2517 (2) + SCRNRID (f) CASEID (f) INTVU1 (f) + INTVU2 (f) INTVU3 (f) INTVU4 (f) + INTVU4A (A) + . + VARIABLE LABELS + CELLS "Subgroups for sample-see documentation" + STRATA "Cell aggregates for sample-see documenta" + WT2517 "weight for rep. sample-see documentation" + SCRNRID "SCREENER-ID" + CASEID "RESPONDENT'S CASE ID NUMBER" + INTVU1 "MONTH RESPONDENT BEGAN INTERVIEW" + INTVU2 "DAY RESPONDENT BEGAN INTERVIEW" + INTVU3 "HOUR RESPONDENT BEGAN INTERVIEW" + INTVU4 "MINUTE RESPONDENT BEGAN INTERVIEW" + INTVU4A "RESPONDENT INTERVIEW BEGAN AM OR PM" + . + VALUE labels + CASEID 99999997 "REFUSAL" + 99999998 "DONT KNOW" + 99999999 "MISSING" + / + INTVU1 97 "REFUSAL" + 98 "DONT KNOW" + 99 "MISSING" + / + INTVU2 97 "REFUSAL" + 98 "DONT KNOW" + 99 "MISSING" + / + INTVU3 97 "REFUSAL" + 98 "DONT KNOW" + 99 "MISSING" + / + INTVU4 97 "REFUSAL" + 98 "DONT KNOW" + 99 "MISSING" + / + INTVU4A "97" "REFUSAL" + "98" "DONT KNOW" + "99" "MISSING" + "AM" "MORNING" + "PM" "EVENING" + . + MISSING VALUES + CASEID (99999997, 99999998, 99999999) + INTVU1 (97, 98, 99) + INTVU2 (97, 98, 99) + INTVU3 (97, 98, 99) + INTVU4 (97, 98, 99) + INTVU4A ('97', '98', '99') + . + NUMBER of CASES 2517 + +**DATA FILE.** + +Data must be stored in a text file, one observation per line. Both DOS +and Unix new line characters are supported as line separators. On each +line, individual values must be separated by the delimiter character +defined in the DATA LISTsection. There may only be exactly (``NUMBER OF +VARIABLES - 1``) delimiter characters per line; i.e. character values must +not contain the delimiter character. + +**QUESTIONS, TODOS:** + +Is there any reason we may want to support ``RECODE`` command also? + +--- comments, suggestions are welcome! --- + +.. _ddixml-datafile-ingest: + +Tab Data, with DDI Metadata +------------------------ + +As of version 2.2, another method of ingesting raw TAB-delimited data +files has been added to the Dataverse Network. Similarly to the SPSS control +card-based ingest (also added in this release), this ingest mechanism +requires 2 files: the TAB raw data file itself and the data set metadata +in the DDI/XML format. + +**Intended use case:** + +Similarly to the SPSS syntax-based ingest, the goal is to provide +another method of ingesting raw quantitative data into the DVN, without +having to first convert it into one of the proprietary, commercial +formats, such as SPSS or STATA. Pleaes note, that in our design +scenario, the DDI files supplying the ingest metadata will be somehow +machine-generated; by some software tool, script, etc. In other words, +this design method is targeted towards more of an institutional user, +perhaps another data archive with large quantities of data and some +institutional knowledge of its structure, and with some resources to +invest into developing an automated tool to generate the metadata +describing the datasets. With the final goal of ingesting all the data +into a DVN by another automated, batch process. The DVN project is also +considering developing a standalone tool of our own that would guide +users through the process of gathering the information describing their +data sets and producing properly formatted DDIs ready to be ingested. + +For now, if you are merely looking for a way to ingest a single +“subsettable” data set, you should definitely be able to create a +working DDI by hand to achieve this goal. However, we strongly recommend +that you instead consider the CSV/SPSS control card method, which was +designed with this use case in mind. If anything, it will take +considerably fewer keystrokes to create an SPSS-syntax control card than +a DDI encoding the same amount of information. + +**The supported DDI syntax:** + +You can consult the DDI project for complete information on the DDI +metadata (`http://icpsr.umich.edu/DDI `__). +However, only a small subset of the published format syntax is used for +ingesting individual data sets. Of the 7 main DDI sections, only 2, +fileDscr and dataDscr are used. Inside these sections, only a select set +of fields, those that have direct equivalents in the DVN data set +structure, are supported. + +These fields are outlined below. All the fields are mandatory, unless +specified otherwise. An XSD schema of the format subset is also +provided, for automated validation of machine-generated XML. + +.. code-block:: guess + + + + + + + NUMBER OF OBSERVATIONS + NUMBER OF VARIABLES + + + + + + + + VARIABLE LABEL + + CATEGORY VALUE + + … + + + + + + + VARIABLE LABEL + + + + + + VARIABLE LABEL + + + + + + + +--- comments, suggestions are welcome! --- + +.. _spss-datafile-ingest: + +SPSS Data File Ingest +===================== + +Ingesting SPSS (.por) files with extended labels +------------------------------------------------ + +This feature has been added to work around the limit on the length of +variable labels in SPSS Portable (.por) files. To use this +feature, select "SPSS/POR,(w/labels)" from the list of file types on +the AddFiles page. You will be prompted to first upload a text file +containing the extended, "long" versions of the labels, and then +upload the .por file. The label text file should contain one +TAB-separated variable name/variable label pair per line. + +.. _r-datafile-ingest: + +Ingest of R (.RData) files +========================== + +Overview. +--------- + +Support for ingesting R data files has been added in version 3.5. R +has been increasingly popular in the research/academic community, +owing to the fact that it is free and open-source (unlike SPSS and +STATA). Consequently, more and more data is becoming available +exclusively in RData format. This long-awaited feature makes it +possible to ingest such data into DVN as "subsettable" files. + +Requirements. +------------- + +R ingest relies on R having been installed, configured and made +available to the DVN application via RServe (see the Installers +Guide). This is in contrast to the SPSS and Stata ingest - which can +be performed without R present. (though R is still needed to perform +most subsetting/analysis tasks on the resulting data files). + +The data must be formatted as an R dataframe (using data.frame() in +R). If an .RData file contains multiple dataframes, only the 1st one +will be ingested. + +Data Types, compared to other supported formats (Stat, SPSS) +------------------------------------------------------------ + +Integers, Doubles, Character strings +************************************ + +The handling of these types is intuitive and straightforward. The +resulting tab file columns, summary statistics and UNF signatures +should be identical to those produced by ingesting the same vectors +from SPSS and Stata. + +**A couple of features that are unique to R/new in DVN:** + +R explicitly supports Missing Values for all of the types above; +Missing Values encoded in R vectors will be recognized and preserved +in TAB files (as 'NA'), counted in the generated summary statistics +and data analysis. + +In addition to Missing Values, R recognizes "Not a Number" (NaN) and +positive and negative infinity for floating point values. These +are now properly supported by the DVN. + +Also note that, unlike Stata, where "float" and "double" are supported +as distinct data types, all floating point values in R are double +precision. + +R Factors +********* + +These are ingested as "Categorical Values" in the DVN. + +One thing to keep in mind: in both Stata and SPSS, the actual value of +a categorical variable can be both character and numeric. In R, all +factor values are strings, even if they are string representations of +numbers. So the values of the resulting categoricals in the DVN will +always be of string type too. + +| **New:** To properly handle *ordered factors* in R, the DVN now supports the concept of an "Ordered Categorical" - a categorical value where an explicit order is assigned to the list of value labels. + +(New!) Boolean values +********************* + +R Boolean (logical) values are supported. + + +Limitations of R data format, as compared to SPSS and STATA. +************************************************************ + +Most noticeably, R lacks a standard mechanism for defining descriptive +labels for the data frame variables. In the DVN, similarly to +both Stata and SPSS, variables have distinct names and labels; with +the latter reserved for longer, descriptive text. +With variables ingested from R data frames the variable name will be +used for both the "name" and the "label". + +| *Optional R packages exist for providing descriptive variable labels; + in one of the future versions support may be added for such a + mechanism. It would of course work only for R files that were + created with such optional packages*. + +Similarly, R categorical values (factors) lack descriptive labels too. +**Note:** This is potentially confusing, since R factors do +actually have "labels". This is a matter of terminology - an R +factor's label is in fact the same thing as the "value" of a +categorical variable in SPSS or Stata and DVN; it contains the actual +meaningful data for the given observation. It is NOT a field reserved +for explanatory, human-readable text, such as the case with the +SPSS/Stata "label". + +Ingesting an R factor with the level labels "MALE" and "FEMALE" will +produce a categorical variable with "MALE" and "FEMALE" in the +values and labels both. + + +Time values in R +---------------- + +This warrants a dedicated section of its own, because of some unique +ways in which time values are handled in R. + +R makes an effort to treat a time value as a real time instance. This +is in contrast with either SPSS or Stata, where time value +representations such as "Sep-23-2013 14:57:21" are allowed; note that +in the absence of an explicitly defined time zone, this value cannot +be mapped to an exact point in real time. R handles times in the +"Unix-style" way: the value is converted to the +"seconds-since-the-Epoch" Greenwitch time (GMT or UTC) and the +resulting numeric value is stored in the data file; time zone +adjustments are made in real time as needed. + +Things get ambiguous and confusing when R **displays** this time +value: unless the time zone was explicitly defined, R will adjust the +value to the current time zone. The resulting behavior is often +counter-intuitive: if you create a time value, for example: + + timevalue<-as.POSIXct("03/19/2013 12:57:00", format = "%m/%d/%Y %H:%M:%OS"); + +on a computer configured for the San Francisco time zone, the value +will be differently displayed on computers in different time zones; +for example, as "12:57 PST" while still on the West Coast, but as +"15:57 EST" in Boston. + +If it is important that the values are always displayed the same way, +regardless of the current time zones, it is recommended that the time +zone is explicitly defined. For example: + + attr(timevalue,"tzone")<-"PST" +or + timevalue<-as.POSIXct("03/19/2013 12:57:00", format = "%m/%d/%Y %H:%M:%OS", tz="PST"); + +Now the value will always be displayed as "12:57 PST", regardless of +the time zone that is current for the OS ... **BUT ONLY** if the OS +where R is installed actually understands the time zone "PST", which +is not by any means guaranteed! Otherwise, it will **quietly adjust** +the stored GMT value to **the current time zone**, yet still +display it with the "PST" tag attached! One way to rephrase this is +that R does a fairly decent job **storing** time values in a +non-ambiguous, platform-independent manner - but gives no guarantee that +the values will be displayed in any way that is predictable or intuitive. + +In practical terms, it is recommended to use the long/descriptive +forms of time zones, as they are more likely to be properly recognized +on most computers. For example, "Japan" instead of "JST". Another possible +solution is to explicitly use GMT or UTC (since it is very likely to be +properly recognized on any system), or the "UTC+" notation. Still, none of the above +**guarantees** proper, non-ambiguous handling of time values in R data +sets. The fact that R **quietly** modifies time values when it doesn't +recognize the supplied timezone attribute, yet still appends it to the +**changed** time value does make it quite difficult. (These issues are +discussed in depth on R-related forums, and no attempt is made to +summarize it all in any depth here; this is just to made you aware of +this being a potentially complex issue!) + +An important thing to keep in mind, in connection with the DVN ingest +of R files, is that it will **reject** an R data file with any time +values that have time zones that we can't recognize. This is done in +order to avoid (some) of the potential issues outlined above. + +It is also recommended that any vectors containing time values +ingested into the DVN are reviewed, and the resulting entries in the +TAB files are compared against the original values in the R data +frame, to make sure they have been ingested as expected. + +Another **potential issue** here is the **UNF**. The way the UNF +algorithm works, the same date/time values with and without the +timezone (e.g. "12:45" vs. "12:45 EST") **produce different +UNFs**. Considering that time values in Stata/SPSS do not have time +zones, but ALL time values in R do (yes, they all do - if the timezone +wasn't defined explicitely, it implicitly becomes a time value in the +"UTC" zone!), this means that it is **impossible** to have 2 time +value vectors, in Stata/SPSS and R, that produce the same UNF. + +**A pro tip:** if it is important to produce SPSS/Stata and R versions of +the same data set that result in the same UNF when ingested, you may +define the time variables as **strings** in the R data frame, and use +the "YYYY-MM-DD HH:mm:ss" formatting notation. This is the formatting used by the UNF +algorithm to normalize time values, so doing the above will result in +the same UNF as the vector of the same time values in Stata. + +Note: date values (dates only, without time) should be handled the +exact same way as those in SPSS and Stata, and should produce the same +UNFs. + +.. _fits-datafile-ingest: + +FITS File format Ingest +======================= + +This custom ingest is an experiment in branching out into a discipline +outside of the Social Sciences. It has been added in v.3.4 as part of the +collaboration between the IQSS and the Harvard-Smithsonian Center for +Astrophysics. FITS is a multi-part file format for storing +Astronomical data (http://fits.gsfc.nasa.gov/fits_standard.html). DVN +now offers an ingest plugin that parses FITS file headers for +key-value metadata that are extracted and made searchable. + +FITS is now listed on the DVN AddFiles page as a recognized file +format. The same asynchronous process is used as for "subsettable" +files: the processing is done in the background, with an email +notification sent once completed. + +Unlike with the "subsettable" file ingest, no format conversion takes +place and the FITS file is ingested as is, similarly to "other +materials" files. The process is limited to the extaction of the +searchable metadata. Once the file is ingested and the study is +re-indexed, these file-level FITS metadata fields can be searched on +from the Advanced Search page, on either the Dataverse or Network +level. Choose one of the FITS file Information listed in the drop +down, and enter the relevant search term. Search results that match +the query will show individual files as well as studies. + +The ingest also generates a short summary of the file contents (number +and type of Header-Data Units) and adds it to the file description. + + +.. _metadata-references: + +Metadata References +=================== + +The Dataverse Network metadata is compliant with the `DDI schema +version 2 `__. The Cataloging +Information fields associated with each study contain most of the fields +in the study description section of the DDI. That way the Dataverse +Network metadata can be mapped easily to a DDI, and be exported into XML +format for preservation and interoperability. + +Dataverse Network data also is compliant with `Simple Dublin +Core `__ (DC) requirements. For imports +only, Dataverse Network data is compliant with the `Content Standard +for Digital Geospatial Metadata (CSDGM), Vers. 2 (FGDC-STD-001-1998) `__ (FGDC). + +Attached is a PDF file that defines and maps all Dataverse Network +Cataloging Information fields. Information provided in the file includes +the following: + +- Field label - For each Cataloging Information field, the field label + appears first in the mapping matrix. + +- Description - A description of each field follows the field label. + +- Query term - If a field is available for use in building a query, the + term to use for that field is listed. + +- Dataverse Network database element name - The Dataverse Network + database element name for the field is provided. + +- Advanced search - If a field is available for use in an advanced + search, that is indicated. + +- DDI element mapping for imports - For harvested or imported studies, + the imported DDI elements are mapped to Dataverse Network fields. + +- DDI element mapping for exports - When a study or dataverse is + harvested or exported in DDI format, the Dataverse Network fields are + mapped to DDI elements. + +- DC element mapping for imports - For harvested or imported studies, + the imported DC elements are mapped to specific Dataverse Network + fields. + +- DC element mapping for exports - When a study or dataverse is + harvested or exported in DC format, specific Dataverse Network fields + are mapped to the DC elements. + +- FGDC element mapping for imports - For harvested or imported studies, + the imported FGDC elements are mapped to specific Dataverse Network fields. + +Also attached is an example of a DDI for a simple study containing +title, author, description, keyword, and topic classification cataloging +information fields suitable for use with batch import. + +|image9| +`catalogingfields11apr08.pdf `__ + +|image10| +`simple\_study.xml `__ + +Zelig Interface +========== + +Zelig is statistical software for everyone: researchers, instructors, +and students. It is a front-end and back-end for R (Zelig is written in +R). The Zellig software: + +- Unifies diverse theories of inference + +- Unifies different statistical models and notation + +- Unifies R packages in a common syntax + +Zelig is distributed under the GNU General Public License, Version 2. +After installation, the source code is located in your R library +directory. You can download a tarball of the latest Zelig source code +from \ `http://projects.iq.harvard.edu/zelig `__. + +The Dataverse Network software uses Zelig to perform advanced +statistical analysis functions. The current interface schema used by the +Dataverse Network for Zelig processes is in the following location: + +**Criteria for Model Availability** + +Three factors determine which Zelig models are available for analysis in +the Dataverse Network:  + +- Some new models require data structures and modeling parameters that + are not compatible with the current framework of the Dataverse Network + and other web-driven applications. These types of models are not + available in the Dataverse Network. + +- Models must be explicitly listed in the Zelig packages to be used in + the Dataverse Network, and all models must be disclosed fully, including + runtime errors. Zelig models that do not meet these specifications are + excluded from the Dataverse Network until they are disclosed with a + complete set of information. + +- An installation-based factor also can limit the Zelig models available + in the Dataverse Network. A minimum version of the core software package + GCC 4.0 must be installed on any Linux OS-based R machine used with the + Dataverse Network, to install and run a key Zelig package, MCMCpack. If + a Linux machine that is designated to R is used for DSB services and + does not have the minimum version of the GCC package installed, the + Dataverse Network looses at least eight models from the available + advanced analysis models. + +|image11| +`configzeliggui.xml `__ + +.. |image9| image:: ./appendix-0_files/application-pdf.png +.. |image10| image:: ./appendix-0_files/application-octet-stream.png +.. |image11| image:: ./appendix-0_files/application-octet-stream.png + diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_sources/index.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/doc/guides/_sources/index.txt Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,38 @@ +.. The Dataverse Network documentation master file, created by + sphinx-quickstart on Thu Aug 1 10:00:58 2013. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Dataverse Network Guides +=============================== + +Contents: + +.. toctree:: + :maxdepth: 2 + + dataverse-user-main + dataverse-installer-main + dataverse-developer-main + dataverse-api-main + +.. index:: + single: execution; context + module: __main__ + module: sys + triple: module; search; path + + +The execution context +--------------------- + +.. index:: BNF, grammar, syntax, notation + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_static/agogo.css --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/doc/guides/_static/agogo.css Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,489 @@ +/* + * agogo.css_t + * ~~~~~~~~~~~ + * + * Sphinx stylesheet -- agogo theme. + * + * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +* { + margin: 0px; + padding: 0px; +} + +body { + font-family: "Verdana", Arial, sans-serif; + line-height: 1.4em; + color: black; + background-color: #eeeeec; +} + + +/* Page layout */ + +div.header, div.content, div.footer { + width: 70em; + margin-left: auto; + margin-right: auto; +} + +div.header-wrapper { + background: white; + padding-top: 10px; + border-top: 40px solid #000; + border-bottom: 4px solid #000; +} + + +/* Default body styles */ +a { + color: #ce5c00; +} + +div.bodywrapper a, div.footer a { + text-decoration: underline; +} + +.clearer { + clear: both; +} + +.left { + float: left; +} + +.right { + float: right; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +h1, h2, h3, h4 { + font-family: "Georgia", "Times New Roman", serif; + font-weight: normal; + color: #3465a4; + margin-bottom: .8em; +} + +h1 { + color: #204a87; +} + +h2 { + padding-bottom: .5em; + border-bottom: 1px solid #3465a4; +} + +a.headerlink { + visibility: hidden; + color: #dddddd; + padding-left: .3em; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink { + visibility: visible; +} + +img { + border: 0; +} + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 2px 7px 1px 7px; + border-left: 0.2em solid black; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +dt:target, .highlighted { + background-color: #fbe54e; +} + +/* Header */ + +div.header { + padding-top: 10px; + padding-bottom: 10px; + padding-left: 220px; + background: url(logo.png) no-repeat 20px 0; + background-size: 183px 80px; +} + +div.header .headertitle { + font-family: "Georgia", "Times New Roman", serif; + font-weight: normal; + font-size: 180%; + margin-bottom: .8em; +} + +div.header .headertitle a { + color: #000; +} + +div.header div.rel { + margin-top: 1em; +} + +div.header div.rel a { + color: #fcaf3e; + letter-spacing: .1em; + text-transform: uppercase; +} + +p.logo { + float: right; +} + +img.logo { + border: 0; +} + + +/* Content */ +div.content-wrapper { + background-color: white; + padding-top: 20px; + padding-bottom: 20px; +} + +div.document { + width: 50em; + float: left; +} + +div.body { + padding-right: 2em; + text-align: justify; +} + +div.document h1 { + line-height: 120%; +} + +div.document ul { + margin: 1.5em; + list-style-type: square; +} + +div.document dd { + margin-left: 1.2em; + margin-top: .4em; + margin-bottom: 1em; +} + +div.document .section { + margin-top: 1.7em; +} +div.document .section:first-child { + margin-top: 0px; +} + +div.document div.highlight { + padding: 3px; + background-color: #eeeeec; + border-top: 2px solid #dddddd; + border-bottom: 2px solid #dddddd; + margin-top: .8em; + margin-bottom: .8em; +} + +div.document h2 { + margin-top: .7em; +} + +div.document p { + margin-bottom: .5em; +} + +div.document li.toctree-l1 { + margin-bottom: 1em; +} + +div.document .descname { + font-weight: bold; +} + +div.document .docutils.literal { + background-color: #eeeeec; + padding: 1px; +} + +div.document .docutils.xref.literal { + background-color: transparent; + padding: 0px; +} + +div.document blockquote { + margin: 1em; +} + +div.document ol { + margin: 1.5em; +} + +div.document pre { + white-space: pre-wrap; /* css-3 */ + white-space: -moz-pre-wrap; /* Mozilla, since 1999 */ + white-space: -pre-wrap; /* Opera 4-6 */ + white-space: -o-pre-wrap; /* Opera 7 */ + word-wrap: break-word; /* Internet Explorer 5.5+ */ +} + +/* Sidebar */ + +div.sidebar { + width: 20em; + float: right; + font-size: .9em; +} + +div.sidebar a, div.header a { + text-decoration: none; +} + +div.sidebar a:hover, div.header a:hover { + text-decoration: underline; +} + +div.sidebar h3 { + color: #2e3436; + text-transform: uppercase; + font-size: 130%; + letter-spacing: .1em; +} + +div.sidebar ul { + list-style-type: none; +} + +div.sidebar li.toctree-l1 a { + display: block; + padding: 1px; + border: 1px solid #dddddd; + background-color: #eeeeec; + margin-bottom: .4em; + padding-left: 3px; + color: #2e3436; +} + +div.sidebar li.toctree-l2 a { + background-color: transparent; + border: none; + margin-left: 1em; + border-bottom: 1px solid #dddddd; +} + +div.sidebar li.toctree-l3 a { + background-color: transparent; + border: none; + margin-left: 2em; + border-bottom: 1px solid #dddddd; +} + +div.sidebar li.toctree-l4 a { + background-color: transparent; + border: none; + margin-left: 3em; + border-bottom: 1px solid #dddddd; +} + +div.sidebar li.toctree-l5 a { + background-color: transparent; + border: none; + margin-left: 4em; + border-bottom: 1px solid #dddddd; +} + +div.sidebar li.toctree-l2:last-child a { + border-bottom: none; +} + +div.sidebar li.toctree-l1.current a { + border-right: 5px solid #fcaf3e; +} + +div.sidebar li.toctree-l1.current li.toctree-l2 a { + border-right: none; +} + +div.sidebar input[type="text"] { + width: 170px; +} + +div.sidebar input[type="submit"] { + width: 30px; +} + + +/* Footer */ + +div.footer-wrapper { + background: url(bgfooter.png) top left repeat-x; + border-top: 4px solid #babdb6; + padding-top: 10px; + padding-bottom: 10px; + min-height: 80px; +} + +div.footer, div.footer a { + color: #888a85; +} + +div.footer .right { + text-align: right; +} + +div.footer .left { + text-transform: uppercase; +} + + +/* Styles copied from basic theme */ + +img.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li div.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable dl, table.indextable dd { + margin-top: 0; + margin-bottom: 0; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +/* -- viewcode extension ---------------------------------------------------- */ + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family:: "Verdana", Arial, sans-serif; +} + +div.viewcode-block:target { + margin: -1px -3px; + padding: 0 3px; + background-color: #f4debf; + border-top: 1px solid #ac9; + border-bottom: 1px solid #ac9; +} diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_static/ajax-loader.gif Binary file DVN-web/installer/dvninstall/doc/guides/_static/ajax-loader.gif has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_static/basic.css --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/doc/guides/_static/basic.css Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,540 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 0px; + margin-left: -100%; + font-size: 90%; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox input[type="text"] { + width: 170px; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + width: 30px; +} + +img { + border: 0; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li div.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable dl, table.indextable dd { + margin-top: 0; + margin-bottom: 0; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- general body styles --------------------------------------------------- */ + +a.headerlink { + visibility: hidden; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.field-list ul { + padding-left: 1em; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px 7px 0 7px; + background-color: #ffe; + width: 40%; + float: right; +} + +p.sidebar-title { + font-weight: bold; +} + +/* -- topics ---------------------------------------------------------------- */ + +div.topic { + border: 1px solid #ccc; + padding: 7px 7px 0 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +div.admonition dl { + margin-bottom: 0; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + border: 0; + border-collapse: collapse; +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +table.field-list td, table.field-list th { + border: 0 !important; +} + +table.footnote td, table.footnote th { + border: 0 !important; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +dl { + margin-bottom: 15px; +} + +dd p { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +dt:target, .highlighted { + background-color: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.refcount { + color: #060; +} + +.optional { + font-size: 1.3em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +td.linenos pre { + padding: 5px 0px; + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + margin-left: 0.5em; +} + +table.highlighttable td { + padding: 0 0.5em 0 0.5em; +} + +tt.descname { + background-color: transparent; + font-weight: bold; + font-size: 1.2em; +} + +tt.descclassname { + background-color: transparent; +} + +tt.xref, a tt { + background-color: transparent; + font-weight: bold; +} + +h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_static/bgfooter.png Binary file DVN-web/installer/dvninstall/doc/guides/_static/bgfooter.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_static/bgtop.png Binary file DVN-web/installer/dvninstall/doc/guides/_static/bgtop.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_static/comment-bright.png Binary file DVN-web/installer/dvninstall/doc/guides/_static/comment-bright.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_static/comment-close.png Binary file DVN-web/installer/dvninstall/doc/guides/_static/comment-close.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_static/comment.png Binary file DVN-web/installer/dvninstall/doc/guides/_static/comment.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_static/doctools.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/doc/guides/_static/doctools.js Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,235 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Sphinx JavaScript utilities for all documentation. + * + * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/** + * select a different prefix for underscore + */ +$u = _.noConflict(); + +/** + * make the code below compatible with browsers without + * an installed firebug like debugger +if (!window.console || !console.firebug) { + var names = ["log", "debug", "info", "warn", "error", "assert", "dir", + "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", + "profile", "profileEnd"]; + window.console = {}; + for (var i = 0; i < names.length; ++i) + window.console[names[i]] = function() {}; +} + */ + +/** + * small helper function to urldecode strings + */ +jQuery.urldecode = function(x) { + return decodeURIComponent(x).replace(/\+/g, ' '); +}; + +/** + * small helper function to urlencode strings + */ +jQuery.urlencode = encodeURIComponent; + +/** + * This function returns the parsed url parameters of the + * current request. Multiple values per key are supported, + * it will always return arrays of strings for the value parts. + */ +jQuery.getQueryParameters = function(s) { + if (typeof s == 'undefined') + s = document.location.search; + var parts = s.substr(s.indexOf('?') + 1).split('&'); + var result = {}; + for (var i = 0; i < parts.length; i++) { + var tmp = parts[i].split('=', 2); + var key = jQuery.urldecode(tmp[0]); + var value = jQuery.urldecode(tmp[1]); + if (key in result) + result[key].push(value); + else + result[key] = [value]; + } + return result; +}; + +/** + * highlight a given string on a jquery object by wrapping it in + * span elements with the given class name. + */ +jQuery.fn.highlightText = function(text, className) { + function highlight(node) { + if (node.nodeType == 3) { + var val = node.nodeValue; + var pos = val.toLowerCase().indexOf(text); + if (pos >= 0 && !jQuery(node.parentNode).hasClass(className)) { + var span = document.createElement("span"); + span.className = className; + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + node.parentNode.insertBefore(span, node.parentNode.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling)); + node.nodeValue = val.substr(0, pos); + } + } + else if (!jQuery(node).is("button, select, textarea")) { + jQuery.each(node.childNodes, function() { + highlight(this); + }); + } + } + return this.each(function() { + highlight(this); + }); +}; + +/** + * Small JavaScript module for the documentation. + */ +var Documentation = { + + init : function() { + this.fixFirefoxAnchorBug(); + this.highlightSearchWords(); + this.initIndexTable(); + }, + + /** + * i18n support + */ + TRANSLATIONS : {}, + PLURAL_EXPR : function(n) { return n == 1 ? 0 : 1; }, + LOCALE : 'unknown', + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext : function(string) { + var translated = Documentation.TRANSLATIONS[string]; + if (typeof translated == 'undefined') + return string; + return (typeof translated == 'string') ? translated : translated[0]; + }, + + ngettext : function(singular, plural, n) { + var translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated == 'undefined') + return (n == 1) ? singular : plural; + return translated[Documentation.PLURALEXPR(n)]; + }, + + addTranslations : function(catalog) { + for (var key in catalog.messages) + this.TRANSLATIONS[key] = catalog.messages[key]; + this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); + this.LOCALE = catalog.locale; + }, + + /** + * add context elements like header anchor links + */ + addContextElements : function() { + $('div[id] > :header:first').each(function() { + $('\u00B6'). + attr('href', '#' + this.id). + attr('title', _('Permalink to this headline')). + appendTo(this); + }); + $('dt[id]').each(function() { + $('\u00B6'). + attr('href', '#' + this.id). + attr('title', _('Permalink to this definition')). + appendTo(this); + }); + }, + + /** + * workaround a firefox stupidity + */ + fixFirefoxAnchorBug : function() { + if (document.location.hash && $.browser.mozilla) + window.setTimeout(function() { + document.location.href += ''; + }, 10); + }, + + /** + * highlight the search words provided in the url in the text + */ + highlightSearchWords : function() { + var params = $.getQueryParameters(); + var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; + if (terms.length) { + var body = $('div.body'); + window.setTimeout(function() { + $.each(terms, function() { + body.highlightText(this.toLowerCase(), 'highlighted'); + }); + }, 10); + $('') + .appendTo($('#searchbox')); + } + }, + + /** + * init the domain index toggle buttons + */ + initIndexTable : function() { + var togglers = $('img.toggler').click(function() { + var src = $(this).attr('src'); + var idnum = $(this).attr('id').substr(7); + $('tr.cg-' + idnum).toggle(); + if (src.substr(-9) == 'minus.png') + $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); + else + $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); + }).css('display', ''); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { + togglers.click(); + } + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords : function() { + $('#searchbox .highlight-link').fadeOut(300); + $('span.highlighted').removeClass('highlighted'); + }, + + /** + * make the url absolute + */ + makeURL : function(relativeURL) { + return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; + }, + + /** + * get the current relative url + */ + getCurrentURL : function() { + var path = document.location.pathname; + var parts = path.split(/\//); + $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { + if (this == '..') + parts.pop(); + }); + var url = parts.join('/'); + return path.substring(url.lastIndexOf('/') + 1, path.length - 1); + } +}; + +// quick alias for translations +_ = Documentation.gettext; + +$(document).ready(function() { + Documentation.init(); +}); diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_static/down-pressed.png Binary file DVN-web/installer/dvninstall/doc/guides/_static/down-pressed.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_static/down.png Binary file DVN-web/installer/dvninstall/doc/guides/_static/down.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_static/file.png Binary file DVN-web/installer/dvninstall/doc/guides/_static/file.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_static/jquery.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/doc/guides/_static/jquery.js Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,4 @@ +/*! jQuery v1.7.1 jquery.com | jquery.org/license */ +(function(a,b){function cy(a){return f.isWindow(a)?a:a.nodeType===9?a.defaultView||a.parentWindow:!1}function cv(a){if(!ck[a]){var b=c.body,d=f("<"+a+">").appendTo(b),e=d.css("display");d.remove();if(e==="none"||e===""){cl||(cl=c.createElement("iframe"),cl.frameBorder=cl.width=cl.height=0),b.appendChild(cl);if(!cm||!cl.createElement)cm=(cl.contentWindow||cl.contentDocument).document,cm.write((c.compatMode==="CSS1Compat"?"":"")+""),cm.close();d=cm.createElement(a),cm.body.appendChild(d),e=f.css(d,"display"),b.removeChild(cl)}ck[a]=e}return ck[a]}function cu(a,b){var c={};f.each(cq.concat.apply([],cq.slice(0,b)),function(){c[this]=a});return c}function ct(){cr=b}function cs(){setTimeout(ct,0);return cr=f.now()}function cj(){try{return new a.ActiveXObject("Microsoft.XMLHTTP")}catch(b){}}function ci(){try{return new a.XMLHttpRequest}catch(b){}}function cc(a,c){a.dataFilter&&(c=a.dataFilter(c,a.dataType));var d=a.dataTypes,e={},g,h,i=d.length,j,k=d[0],l,m,n,o,p;for(g=1;g0){if(c!=="border")for(;g=0===c})}function S(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function K(){return!0}function J(){return!1}function n(a,b,c){var d=b+"defer",e=b+"queue",g=b+"mark",h=f._data(a,d);h&&(c==="queue"||!f._data(a,e))&&(c==="mark"||!f._data(a,g))&&setTimeout(function(){!f._data(a,e)&&!f._data(a,g)&&(f.removeData(a,d,!0),h.fire())},0)}function m(a){for(var b in a){if(b==="data"&&f.isEmptyObject(a[b]))continue;if(b!=="toJSON")return!1}return!0}function l(a,c,d){if(d===b&&a.nodeType===1){var e="data-"+c.replace(k,"-$1").toLowerCase();d=a.getAttribute(e);if(typeof d=="string"){try{d=d==="true"?!0:d==="false"?!1:d==="null"?null:f.isNumeric(d)?parseFloat(d):j.test(d)?f.parseJSON(d):d}catch(g){}f.data(a,c,d)}else d=b}return d}function h(a){var b=g[a]={},c,d;a=a.split(/\s+/);for(c=0,d=a.length;c)[^>]*$|#([\w\-]*)$)/,j=/\S/,k=/^\s+/,l=/\s+$/,m=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,n=/^[\],:{}\s]*$/,o=/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,p=/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,q=/(?:^|:|,)(?:\s*\[)+/g,r=/(webkit)[ \/]([\w.]+)/,s=/(opera)(?:.*version)?[ \/]([\w.]+)/,t=/(msie) ([\w.]+)/,u=/(mozilla)(?:.*? rv:([\w.]+))?/,v=/-([a-z]|[0-9])/ig,w=/^-ms-/,x=function(a,b){return(b+"").toUpperCase()},y=d.userAgent,z,A,B,C=Object.prototype.toString,D=Object.prototype.hasOwnProperty,E=Array.prototype.push,F=Array.prototype.slice,G=String.prototype.trim,H=Array.prototype.indexOf,I={};e.fn=e.prototype={constructor:e,init:function(a,d,f){var g,h,j,k;if(!a)return this;if(a.nodeType){this.context=this[0]=a,this.length=1;return this}if(a==="body"&&!d&&c.body){this.context=c,this[0]=c.body,this.selector=a,this.length=1;return this}if(typeof a=="string"){a.charAt(0)!=="<"||a.charAt(a.length-1)!==">"||a.length<3?g=i.exec(a):g=[null,a,null];if(g&&(g[1]||!d)){if(g[1]){d=d instanceof e?d[0]:d,k=d?d.ownerDocument||d:c,j=m.exec(a),j?e.isPlainObject(d)?(a=[c.createElement(j[1])],e.fn.attr.call(a,d,!0)):a=[k.createElement(j[1])]:(j=e.buildFragment([g[1]],[k]),a=(j.cacheable?e.clone(j.fragment):j.fragment).childNodes);return e.merge(this,a)}h=c.getElementById(g[2]);if(h&&h.parentNode){if(h.id!==g[2])return f.find(a);this.length=1,this[0]=h}this.context=c,this.selector=a;return this}return!d||d.jquery?(d||f).find(a):this.constructor(d).find(a)}if(e.isFunction(a))return f.ready(a);a.selector!==b&&(this.selector=a.selector,this.context=a.context);return e.makeArray(a,this)},selector:"",jquery:"1.7.1",length:0,size:function(){return this.length},toArray:function(){return F.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this[this.length+a]:this[a]},pushStack:function(a,b,c){var d=this.constructor();e.isArray(a)?E.apply(d,a):e.merge(d,a),d.prevObject=this,d.context=this.context,b==="find"?d.selector=this.selector+(this.selector?" ":"")+c:b&&(d.selector=this.selector+"."+b+"("+c+")");return d},each:function(a,b){return e.each(this,a,b)},ready:function(a){e.bindReady(),A.add(a);return this},eq:function(a){a=+a;return a===-1?this.slice(a):this.slice(a,a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(F.apply(this,arguments),"slice",F.call(arguments).join(","))},map:function(a){return this.pushStack(e.map(this,function(b,c){return a.call(b,c,b)}))},end:function(){return this.prevObject||this.constructor(null)},push:E,sort:[].sort,splice:[].splice},e.fn.init.prototype=e.fn,e.extend=e.fn.extend=function(){var a,c,d,f,g,h,i=arguments[0]||{},j=1,k=arguments.length,l=!1;typeof i=="boolean"&&(l=i,i=arguments[1]||{},j=2),typeof i!="object"&&!e.isFunction(i)&&(i={}),k===j&&(i=this,--j);for(;j0)return;A.fireWith(c,[e]),e.fn.trigger&&e(c).trigger("ready").off("ready")}},bindReady:function(){if(!A){A=e.Callbacks("once memory");if(c.readyState==="complete")return setTimeout(e.ready,1);if(c.addEventListener)c.addEventListener("DOMContentLoaded",B,!1),a.addEventListener("load",e.ready,!1);else if(c.attachEvent){c.attachEvent("onreadystatechange",B),a.attachEvent("onload",e.ready);var b=!1;try{b=a.frameElement==null}catch(d){}c.documentElement.doScroll&&b&&J()}}},isFunction:function(a){return e.type(a)==="function"},isArray:Array.isArray||function(a){return e.type(a)==="array"},isWindow:function(a){return a&&typeof a=="object"&&"setInterval"in a},isNumeric:function(a){return!isNaN(parseFloat(a))&&isFinite(a)},type:function(a){return a==null?String(a):I[C.call(a)]||"object"},isPlainObject:function(a){if(!a||e.type(a)!=="object"||a.nodeType||e.isWindow(a))return!1;try{if(a.constructor&&!D.call(a,"constructor")&&!D.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}var d;for(d in a);return d===b||D.call(a,d)},isEmptyObject:function(a){for(var b in a)return!1;return!0},error:function(a){throw new Error(a)},parseJSON:function(b){if(typeof b!="string"||!b)return null;b=e.trim(b);if(a.JSON&&a.JSON.parse)return a.JSON.parse(b);if(n.test(b.replace(o,"@").replace(p,"]").replace(q,"")))return(new Function("return "+b))();e.error("Invalid JSON: "+b)},parseXML:function(c){var d,f;try{a.DOMParser?(f=new DOMParser,d=f.parseFromString(c,"text/xml")):(d=new ActiveXObject("Microsoft.XMLDOM"),d.async="false",d.loadXML(c))}catch(g){d=b}(!d||!d.documentElement||d.getElementsByTagName("parsererror").length)&&e.error("Invalid XML: "+c);return d},noop:function(){},globalEval:function(b){b&&j.test(b)&&(a.execScript||function(b){a.eval.call(a,b)})(b)},camelCase:function(a){return a.replace(w,"ms-").replace(v,x)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toUpperCase()===b.toUpperCase()},each:function(a,c,d){var f,g=0,h=a.length,i=h===b||e.isFunction(a);if(d){if(i){for(f in a)if(c.apply(a[f],d)===!1)break}else for(;g0&&a[0]&&a[j-1]||j===0||e.isArray(a));if(k)for(;i1?i.call(arguments,0):b,j.notifyWith(k,e)}}function l(a){return function(c){b[a]=arguments.length>1?i.call(arguments,0):c,--g||j.resolveWith(j,b)}}var b=i.call(arguments,0),c=0,d=b.length,e=Array(d),g=d,h=d,j=d<=1&&a&&f.isFunction(a.promise)?a:f.Deferred(),k=j.promise();if(d>1){for(;c
a",d=q.getElementsByTagName("*"),e=q.getElementsByTagName("a")[0];if(!d||!d.length||!e)return{};g=c.createElement("select"),h=g.appendChild(c.createElement("option")),i=q.getElementsByTagName("input")[0],b={leadingWhitespace:q.firstChild.nodeType===3,tbody:!q.getElementsByTagName("tbody").length,htmlSerialize:!!q.getElementsByTagName("link").length,style:/top/.test(e.getAttribute("style")),hrefNormalized:e.getAttribute("href")==="/a",opacity:/^0.55/.test(e.style.opacity),cssFloat:!!e.style.cssFloat,checkOn:i.value==="on",optSelected:h.selected,getSetAttribute:q.className!=="t",enctype:!!c.createElement("form").enctype,html5Clone:c.createElement("nav").cloneNode(!0).outerHTML!=="<:nav>",submitBubbles:!0,changeBubbles:!0,focusinBubbles:!1,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0},i.checked=!0,b.noCloneChecked=i.cloneNode(!0).checked,g.disabled=!0,b.optDisabled=!h.disabled;try{delete q.test}catch(s){b.deleteExpando=!1}!q.addEventListener&&q.attachEvent&&q.fireEvent&&(q.attachEvent("onclick",function(){b.noCloneEvent=!1}),q.cloneNode(!0).fireEvent("onclick")),i=c.createElement("input"),i.value="t",i.setAttribute("type","radio"),b.radioValue=i.value==="t",i.setAttribute("checked","checked"),q.appendChild(i),k=c.createDocumentFragment(),k.appendChild(q.lastChild),b.checkClone=k.cloneNode(!0).cloneNode(!0).lastChild.checked,b.appendChecked=i.checked,k.removeChild(i),k.appendChild(q),q.innerHTML="",a.getComputedStyle&&(j=c.createElement("div"),j.style.width="0",j.style.marginRight="0",q.style.width="2px",q.appendChild(j),b.reliableMarginRight=(parseInt((a.getComputedStyle(j,null)||{marginRight:0}).marginRight,10)||0)===0);if(q.attachEvent)for(o in{submit:1,change:1,focusin:1})n="on"+o,p=n in q,p||(q.setAttribute(n,"return;"),p=typeof q[n]=="function"),b[o+"Bubbles"]=p;k.removeChild(q),k=g=h=j=q=i=null,f(function(){var a,d,e,g,h,i,j,k,m,n,o,r=c.getElementsByTagName("body")[0];!r||(j=1,k="position:absolute;top:0;left:0;width:1px;height:1px;margin:0;",m="visibility:hidden;border:0;",n="style='"+k+"border:5px solid #000;padding:0;'",o="
"+""+"
",a=c.createElement("div"),a.style.cssText=m+"width:0;height:0;position:static;top:0;margin-top:"+j+"px",r.insertBefore(a,r.firstChild),q=c.createElement("div"),a.appendChild(q),q.innerHTML="
t
",l=q.getElementsByTagName("td"),p=l[0].offsetHeight===0,l[0].style.display="",l[1].style.display="none",b.reliableHiddenOffsets=p&&l[0].offsetHeight===0,q.innerHTML="",q.style.width=q.style.paddingLeft="1px",f.boxModel=b.boxModel=q.offsetWidth===2,typeof q.style.zoom!="undefined"&&(q.style.display="inline",q.style.zoom=1,b.inlineBlockNeedsLayout=q.offsetWidth===2,q.style.display="",q.innerHTML="
",b.shrinkWrapBlocks=q.offsetWidth!==2),q.style.cssText=k+m,q.innerHTML=o,d=q.firstChild,e=d.firstChild,h=d.nextSibling.firstChild.firstChild,i={doesNotAddBorder:e.offsetTop!==5,doesAddBorderForTableAndCells:h.offsetTop===5},e.style.position="fixed",e.style.top="20px",i.fixedPosition=e.offsetTop===20||e.offsetTop===15,e.style.position=e.style.top="",d.style.overflow="hidden",d.style.position="relative",i.subtractsBorderForOverflowNotVisible=e.offsetTop===-5,i.doesNotIncludeMarginInBodyOffset=r.offsetTop!==j,r.removeChild(a),q=a=null,f.extend(b,i))});return b}();var j=/^(?:\{.*\}|\[.*\])$/,k=/([A-Z])/g;f.extend({cache:{},uuid:0,expando:"jQuery"+(f.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(a){a=a.nodeType?f.cache[a[f.expando]]:a[f.expando];return!!a&&!m(a)},data:function(a,c,d,e){if(!!f.acceptData(a)){var g,h,i,j=f.expando,k=typeof c=="string",l=a.nodeType,m=l?f.cache:a,n=l?a[j]:a[j]&&j,o=c==="events";if((!n||!m[n]||!o&&!e&&!m[n].data)&&k&&d===b)return;n||(l?a[j]=n=++f.uuid:n=j),m[n]||(m[n]={},l||(m[n].toJSON=f.noop));if(typeof c=="object"||typeof c=="function")e?m[n]=f.extend(m[n],c):m[n].data=f.extend(m[n].data,c);g=h=m[n],e||(h.data||(h.data={}),h=h.data),d!==b&&(h[f.camelCase(c)]=d);if(o&&!h[c])return g.events;k?(i=h[c],i==null&&(i=h[f.camelCase(c)])):i=h;return i}},removeData:function(a,b,c){if(!!f.acceptData(a)){var d,e,g,h=f.expando,i=a.nodeType,j=i?f.cache:a,k=i?a[h]:h;if(!j[k])return;if(b){d=c?j[k]:j[k].data;if(d){f.isArray(b)||(b in d?b=[b]:(b=f.camelCase(b),b in d?b=[b]:b=b.split(" ")));for(e=0,g=b.length;e-1)return!0;return!1},val:function(a){var c,d,e,g=this[0];{if(!!arguments.length){e=f.isFunction(a);return this.each(function(d){var g=f(this),h;if(this.nodeType===1){e?h=a.call(this,d,g.val()):h=a,h==null?h="":typeof h=="number"?h+="":f.isArray(h)&&(h=f.map(h,function(a){return a==null?"":a+""})),c=f.valHooks[this.nodeName.toLowerCase()]||f.valHooks[this.type];if(!c||!("set"in c)||c.set(this,h,"value")===b)this.value=h}})}if(g){c=f.valHooks[g.nodeName.toLowerCase()]||f.valHooks[g.type];if(c&&"get"in c&&(d=c.get(g,"value"))!==b)return d;d=g.value;return typeof d=="string"?d.replace(q,""):d==null?"":d}}}}),f.extend({valHooks:{option:{get:function(a){var b=a.attributes.value;return!b||b.specified?a.value:a.text}},select:{get:function(a){var b,c,d,e,g=a.selectedIndex,h=[],i=a.options,j=a.type==="select-one";if(g<0)return null;c=j?g:0,d=j?g+1:i.length;for(;c=0}),c.length||(a.selectedIndex=-1);return c}}},attrFn:{val:!0,css:!0,html:!0,text:!0,data:!0,width:!0,height:!0,offset:!0},attr:function(a,c,d,e){var g,h,i,j=a.nodeType;if(!!a&&j!==3&&j!==8&&j!==2){if(e&&c in f.attrFn)return f(a)[c](d);if(typeof a.getAttribute=="undefined")return f.prop(a,c,d);i=j!==1||!f.isXMLDoc(a),i&&(c=c.toLowerCase(),h=f.attrHooks[c]||(u.test(c)?x:w));if(d!==b){if(d===null){f.removeAttr(a,c);return}if(h&&"set"in h&&i&&(g=h.set(a,d,c))!==b)return g;a.setAttribute(c,""+d);return d}if(h&&"get"in h&&i&&(g=h.get(a,c))!==null)return g;g=a.getAttribute(c);return g===null?b:g}},removeAttr:function(a,b){var c,d,e,g,h=0;if(b&&a.nodeType===1){d=b.toLowerCase().split(p),g=d.length;for(;h=0}})});var z=/^(?:textarea|input|select)$/i,A=/^([^\.]*)?(?:\.(.+))?$/,B=/\bhover(\.\S+)?\b/,C=/^key/,D=/^(?:mouse|contextmenu)|click/,E=/^(?:focusinfocus|focusoutblur)$/,F=/^(\w*)(?:#([\w\-]+))?(?:\.([\w\-]+))?$/,G=function(a){var b=F.exec(a);b&&(b[1]=(b[1]||"").toLowerCase(),b[3]=b[3]&&new RegExp("(?:^|\\s)"+b[3]+"(?:\\s|$)"));return b},H=function(a,b){var c=a.attributes||{};return(!b[1]||a.nodeName.toLowerCase()===b[1])&&(!b[2]||(c.id||{}).value===b[2])&&(!b[3]||b[3].test((c["class"]||{}).value))},I=function(a){return f.event.special.hover?a:a.replace(B,"mouseenter$1 mouseleave$1")}; +f.event={add:function(a,c,d,e,g){var h,i,j,k,l,m,n,o,p,q,r,s;if(!(a.nodeType===3||a.nodeType===8||!c||!d||!(h=f._data(a)))){d.handler&&(p=d,d=p.handler),d.guid||(d.guid=f.guid++),j=h.events,j||(h.events=j={}),i=h.handle,i||(h.handle=i=function(a){return typeof f!="undefined"&&(!a||f.event.triggered!==a.type)?f.event.dispatch.apply(i.elem,arguments):b},i.elem=a),c=f.trim(I(c)).split(" ");for(k=0;k=0&&(h=h.slice(0,-1),k=!0),h.indexOf(".")>=0&&(i=h.split("."),h=i.shift(),i.sort());if((!e||f.event.customEvent[h])&&!f.event.global[h])return;c=typeof c=="object"?c[f.expando]?c:new f.Event(h,c):new f.Event(h),c.type=h,c.isTrigger=!0,c.exclusive=k,c.namespace=i.join("."),c.namespace_re=c.namespace?new RegExp("(^|\\.)"+i.join("\\.(?:.*\\.)?")+"(\\.|$)"):null,o=h.indexOf(":")<0?"on"+h:"";if(!e){j=f.cache;for(l in j)j[l].events&&j[l].events[h]&&f.event.trigger(c,d,j[l].handle.elem,!0);return}c.result=b,c.target||(c.target=e),d=d!=null?f.makeArray(d):[],d.unshift(c),p=f.event.special[h]||{};if(p.trigger&&p.trigger.apply(e,d)===!1)return;r=[[e,p.bindType||h]];if(!g&&!p.noBubble&&!f.isWindow(e)){s=p.delegateType||h,m=E.test(s+h)?e:e.parentNode,n=null;for(;m;m=m.parentNode)r.push([m,s]),n=m;n&&n===e.ownerDocument&&r.push([n.defaultView||n.parentWindow||a,s])}for(l=0;le&&i.push({elem:this,matches:d.slice(e)});for(j=0;j0?this.on(b,null,a,c):this.trigger(b)},f.attrFn&&(f.attrFn[b]=!0),C.test(b)&&(f.event.fixHooks[b]=f.event.keyHooks),D.test(b)&&(f.event.fixHooks[b]=f.event.mouseHooks)}),function(){function x(a,b,c,e,f,g){for(var h=0,i=e.length;h0){k=j;break}}j=j[a]}e[h]=k}}}function w(a,b,c,e,f,g){for(var h=0,i=e.length;h+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,d="sizcache"+(Math.random()+"").replace(".",""),e=0,g=Object.prototype.toString,h=!1,i=!0,j=/\\/g,k=/\r\n/g,l=/\W/;[0,0].sort(function(){i=!1;return 0});var m=function(b,d,e,f){e=e||[],d=d||c;var h=d;if(d.nodeType!==1&&d.nodeType!==9)return[];if(!b||typeof b!="string")return e;var i,j,k,l,n,q,r,t,u=!0,v=m.isXML(d),w=[],x=b;do{a.exec(""),i=a.exec(x);if(i){x=i[3],w.push(i[1]);if(i[2]){l=i[3];break}}}while(i);if(w.length>1&&p.exec(b))if(w.length===2&&o.relative[w[0]])j=y(w[0]+w[1],d,f);else{j=o.relative[w[0]]?[d]:m(w.shift(),d);while(w.length)b=w.shift(),o.relative[b]&&(b+=w.shift()),j=y(b,j,f)}else{!f&&w.length>1&&d.nodeType===9&&!v&&o.match.ID.test(w[0])&&!o.match.ID.test(w[w.length-1])&&(n=m.find(w.shift(),d,v),d=n.expr?m.filter(n.expr,n.set)[0]:n.set[0]);if(d){n=f?{expr:w.pop(),set:s(f)}:m.find(w.pop(),w.length===1&&(w[0]==="~"||w[0]==="+")&&d.parentNode?d.parentNode:d,v),j=n.expr?m.filter(n.expr,n.set):n.set,w.length>0?k=s(j):u=!1;while(w.length)q=w.pop(),r=q,o.relative[q]?r=w.pop():q="",r==null&&(r=d),o.relative[q](k,r,v)}else k=w=[]}k||(k=j),k||m.error(q||b);if(g.call(k)==="[object Array]")if(!u)e.push.apply(e,k);else if(d&&d.nodeType===1)for(t=0;k[t]!=null;t++)k[t]&&(k[t]===!0||k[t].nodeType===1&&m.contains(d,k[t]))&&e.push(j[t]);else for(t=0;k[t]!=null;t++)k[t]&&k[t].nodeType===1&&e.push(j[t]);else s(k,e);l&&(m(l,h,e,f),m.uniqueSort(e));return e};m.uniqueSort=function(a){if(u){h=i,a.sort(u);if(h)for(var b=1;b0},m.find=function(a,b,c){var d,e,f,g,h,i;if(!a)return[];for(e=0,f=o.order.length;e":function(a,b){var c,d=typeof b=="string",e=0,f=a.length;if(d&&!l.test(b)){b=b.toLowerCase();for(;e=0)?c||d.push(h):c&&(b[g]=!1));return!1},ID:function(a){return a[1].replace(j,"")},TAG:function(a,b){return a[1].replace(j,"").toLowerCase()},CHILD:function(a){if(a[1]==="nth"){a[2]||m.error(a[0]),a[2]=a[2].replace(/^\+|\s*/g,"");var b=/(-?)(\d*)(?:n([+\-]?\d*))?/.exec(a[2]==="even"&&"2n"||a[2]==="odd"&&"2n+1"||!/\D/.test(a[2])&&"0n+"+a[2]||a[2]);a[2]=b[1]+(b[2]||1)-0,a[3]=b[3]-0}else a[2]&&m.error(a[0]);a[0]=e++;return a},ATTR:function(a,b,c,d,e,f){var g=a[1]=a[1].replace(j,"");!f&&o.attrMap[g]&&(a[1]=o.attrMap[g]),a[4]=(a[4]||a[5]||"").replace(j,""),a[2]==="~="&&(a[4]=" "+a[4]+" ");return a},PSEUDO:function(b,c,d,e,f){if(b[1]==="not")if((a.exec(b[3])||"").length>1||/^\w/.test(b[3]))b[3]=m(b[3],null,null,c);else{var g=m.filter(b[3],c,d,!0^f);d||e.push.apply(e,g);return!1}else if(o.match.POS.test(b[0])||o.match.CHILD.test(b[0]))return!0;return b},POS:function(a){a.unshift(!0);return a}},filters:{enabled:function(a){return a.disabled===!1&&a.type!=="hidden"},disabled:function(a){return a.disabled===!0},checked:function(a){return a.checked===!0},selected:function(a){a.parentNode&&a.parentNode.selectedIndex;return a.selected===!0},parent:function(a){return!!a.firstChild},empty:function(a){return!a.firstChild},has:function(a,b,c){return!!m(c[3],a).length},header:function(a){return/h\d/i.test(a.nodeName)},text:function(a){var b=a.getAttribute("type"),c=a.type;return a.nodeName.toLowerCase()==="input"&&"text"===c&&(b===c||b===null)},radio:function(a){return a.nodeName.toLowerCase()==="input"&&"radio"===a.type},checkbox:function(a){return a.nodeName.toLowerCase()==="input"&&"checkbox"===a.type},file:function(a){return a.nodeName.toLowerCase()==="input"&&"file"===a.type},password:function(a){return a.nodeName.toLowerCase()==="input"&&"password"===a.type},submit:function(a){var b=a.nodeName.toLowerCase();return(b==="input"||b==="button")&&"submit"===a.type},image:function(a){return a.nodeName.toLowerCase()==="input"&&"image"===a.type},reset:function(a){var b=a.nodeName.toLowerCase();return(b==="input"||b==="button")&&"reset"===a.type},button:function(a){var b=a.nodeName.toLowerCase();return b==="input"&&"button"===a.type||b==="button"},input:function(a){return/input|select|textarea|button/i.test(a.nodeName)},focus:function(a){return a===a.ownerDocument.activeElement}},setFilters:{first:function(a,b){return b===0},last:function(a,b,c,d){return b===d.length-1},even:function(a,b){return b%2===0},odd:function(a,b){return b%2===1},lt:function(a,b,c){return bc[3]-0},nth:function(a,b,c){return c[3]-0===b},eq:function(a,b,c){return c[3]-0===b}},filter:{PSEUDO:function(a,b,c,d){var e=b[1],f=o.filters[e];if(f)return f(a,c,b,d);if(e==="contains")return(a.textContent||a.innerText||n([a])||"").indexOf(b[3])>=0;if(e==="not"){var g=b[3];for(var h=0,i=g.length;h=0}},ID:function(a,b){return a.nodeType===1&&a.getAttribute("id")===b},TAG:function(a,b){return b==="*"&&a.nodeType===1||!!a.nodeName&&a.nodeName.toLowerCase()===b},CLASS:function(a,b){return(" "+(a.className||a.getAttribute("class"))+" ").indexOf(b)>-1},ATTR:function(a,b){var c=b[1],d=m.attr?m.attr(a,c):o.attrHandle[c]?o.attrHandle[c](a):a[c]!=null?a[c]:a.getAttribute(c),e=d+"",f=b[2],g=b[4];return d==null?f==="!=":!f&&m.attr?d!=null:f==="="?e===g:f==="*="?e.indexOf(g)>=0:f==="~="?(" "+e+" ").indexOf(g)>=0:g?f==="!="?e!==g:f==="^="?e.indexOf(g)===0:f==="$="?e.substr(e.length-g.length)===g:f==="|="?e===g||e.substr(0,g.length+1)===g+"-":!1:e&&d!==!1},POS:function(a,b,c,d){var e=b[2],f=o.setFilters[e];if(f)return f(a,c,b,d)}}},p=o.match.POS,q=function(a,b){return"\\"+(b-0+1)};for(var r in o.match)o.match[r]=new RegExp(o.match[r].source+/(?![^\[]*\])(?![^\(]*\))/.source),o.leftMatch[r]=new RegExp(/(^(?:.|\r|\n)*?)/.source+o.match[r].source.replace(/\\(\d+)/g,q));var s=function(a,b){a=Array.prototype.slice.call(a,0);if(b){b.push.apply(b,a);return b}return a};try{Array.prototype.slice.call(c.documentElement.childNodes,0)[0].nodeType}catch(t){s=function(a,b){var c=0,d=b||[];if(g.call(a)==="[object Array]")Array.prototype.push.apply(d,a);else if(typeof a.length=="number")for(var e=a.length;c",e.insertBefore(a,e.firstChild),c.getElementById(d)&&(o.find.ID=function(a,c,d){if(typeof c.getElementById!="undefined"&&!d){var e=c.getElementById(a[1]);return e?e.id===a[1]||typeof e.getAttributeNode!="undefined"&&e.getAttributeNode("id").nodeValue===a[1]?[e]:b:[]}},o.filter.ID=function(a,b){var c=typeof a.getAttributeNode!="undefined"&&a.getAttributeNode("id");return a.nodeType===1&&c&&c.nodeValue===b}),e.removeChild(a),e=a=null}(),function(){var a=c.createElement("div");a.appendChild(c.createComment("")),a.getElementsByTagName("*").length>0&&(o.find.TAG=function(a,b){var c=b.getElementsByTagName(a[1]);if(a[1]==="*"){var d=[];for(var e=0;c[e];e++)c[e].nodeType===1&&d.push(c[e]);c=d}return c}),a.innerHTML="",a.firstChild&&typeof a.firstChild.getAttribute!="undefined"&&a.firstChild.getAttribute("href")!=="#"&&(o.attrHandle.href=function(a){return a.getAttribute("href",2)}),a=null}(),c.querySelectorAll&&function(){var a=m,b=c.createElement("div"),d="__sizzle__";b.innerHTML="

";if(!b.querySelectorAll||b.querySelectorAll(".TEST").length!==0){m=function(b,e,f,g){e=e||c;if(!g&&!m.isXML(e)){var h=/^(\w+$)|^\.([\w\-]+$)|^#([\w\-]+$)/.exec(b);if(h&&(e.nodeType===1||e.nodeType===9)){if(h[1])return s(e.getElementsByTagName(b),f);if(h[2]&&o.find.CLASS&&e.getElementsByClassName)return s(e.getElementsByClassName(h[2]),f)}if(e.nodeType===9){if(b==="body"&&e.body)return s([e.body],f);if(h&&h[3]){var i=e.getElementById(h[3]);if(!i||!i.parentNode)return s([],f);if(i.id===h[3])return s([i],f)}try{return s(e.querySelectorAll(b),f)}catch(j){}}else if(e.nodeType===1&&e.nodeName.toLowerCase()!=="object"){var k=e,l=e.getAttribute("id"),n=l||d,p=e.parentNode,q=/^\s*[+~]/.test(b);l?n=n.replace(/'/g,"\\$&"):e.setAttribute("id",n),q&&p&&(e=e.parentNode);try{if(!q||p)return s(e.querySelectorAll("[id='"+n+"'] "+b),f)}catch(r){}finally{l||k.removeAttribute("id")}}}return a(b,e,f,g)};for(var e in a)m[e]=a[e];b=null}}(),function(){var a=c.documentElement,b=a.matchesSelector||a.mozMatchesSelector||a.webkitMatchesSelector||a.msMatchesSelector;if(b){var d=!b.call(c.createElement("div"),"div"),e=!1;try{b.call(c.documentElement,"[test!='']:sizzle")}catch(f){e=!0}m.matchesSelector=function(a,c){c=c.replace(/\=\s*([^'"\]]*)\s*\]/g,"='$1']");if(!m.isXML(a))try{if(e||!o.match.PSEUDO.test(c)&&!/!=/.test(c)){var f=b.call(a,c);if(f||!d||a.document&&a.document.nodeType!==11)return f}}catch(g){}return m(c,null,null,[a]).length>0}}}(),function(){var a=c.createElement("div");a.innerHTML="
";if(!!a.getElementsByClassName&&a.getElementsByClassName("e").length!==0){a.lastChild.className="e";if(a.getElementsByClassName("e").length===1)return;o.order.splice(1,0,"CLASS"),o.find.CLASS=function(a,b,c){if(typeof b.getElementsByClassName!="undefined"&&!c)return b.getElementsByClassName(a[1])},a=null}}(),c.documentElement.contains?m.contains=function(a,b){return a!==b&&(a.contains?a.contains(b):!0)}:c.documentElement.compareDocumentPosition?m.contains=function(a,b){return!!(a.compareDocumentPosition(b)&16)}:m.contains=function(){return!1},m.isXML=function(a){var b=(a?a.ownerDocument||a:0).documentElement;return b?b.nodeName!=="HTML":!1};var y=function(a,b,c){var d,e=[],f="",g=b.nodeType?[b]:b;while(d=o.match.PSEUDO.exec(a))f+=d[0],a=a.replace(o.match.PSEUDO,"");a=o.relative[a]?a+"*":a;for(var h=0,i=g.length;h0)for(h=g;h=0:f.filter(a,this).length>0:this.filter(a).length>0)},closest:function(a,b){var c=[],d,e,g=this[0];if(f.isArray(a)){var h=1;while(g&&g.ownerDocument&&g!==b){for(d=0;d-1:f.find.matchesSelector(g,a)){c.push(g);break}g=g.parentNode;if(!g||!g.ownerDocument||g===b||g.nodeType===11)break}}c=c.length>1?f.unique(c):c;return this.pushStack(c,"closest",a)},index:function(a){if(!a)return this[0]&&this[0].parentNode?this.prevAll().length:-1;if(typeof a=="string")return f.inArray(this[0],f(a));return f.inArray(a.jquery?a[0]:a,this)},add:function(a,b){var c=typeof a=="string"?f(a,b):f.makeArray(a&&a.nodeType?[a]:a),d=f.merge(this.get(),c);return this.pushStack(S(c[0])||S(d[0])?d:f.unique(d))},andSelf:function(){return this.add(this.prevObject)}}),f.each({parent:function(a){var b=a.parentNode;return b&&b.nodeType!==11?b:null},parents:function(a){return f.dir(a,"parentNode")},parentsUntil:function(a,b,c){return f.dir(a,"parentNode",c)},next:function(a){return f.nth(a,2,"nextSibling")},prev:function(a){return f.nth(a,2,"previousSibling")},nextAll:function(a){return f.dir(a,"nextSibling")},prevAll:function(a){return f.dir(a,"previousSibling")},nextUntil:function(a,b,c){return f.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return f.dir(a,"previousSibling",c)},siblings:function(a){return f.sibling(a.parentNode.firstChild,a)},children:function(a){return f.sibling(a.firstChild)},contents:function(a){return f.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:f.makeArray(a.childNodes)}},function(a,b){f.fn[a]=function(c,d){var e=f.map(this,b,c);L.test(a)||(d=c),d&&typeof d=="string"&&(e=f.filter(d,e)),e=this.length>1&&!R[a]?f.unique(e):e,(this.length>1||N.test(d))&&M.test(a)&&(e=e.reverse());return this.pushStack(e,a,P.call(arguments).join(","))}}),f.extend({filter:function(a,b,c){c&&(a=":not("+a+")");return b.length===1?f.find.matchesSelector(b[0],a)?[b[0]]:[]:f.find.matches(a,b)},dir:function(a,c,d){var e=[],g=a[c];while(g&&g.nodeType!==9&&(d===b||g.nodeType!==1||!f(g).is(d)))g.nodeType===1&&e.push(g),g=g[c];return e},nth:function(a,b,c,d){b=b||1;var e=0;for(;a;a=a[c])if(a.nodeType===1&&++e===b)break;return a},sibling:function(a,b){var c=[];for(;a;a=a.nextSibling)a.nodeType===1&&a!==b&&c.push(a);return c}});var V="abbr|article|aside|audio|canvas|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",W=/ jQuery\d+="(?:\d+|null)"/g,X=/^\s+/,Y=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/ig,Z=/<([\w:]+)/,$=/",""],legend:[1,"
","
"],thead:[1,"","
"],tr:[2,"","
"],td:[3,"","
"],col:[2,"","
"],area:[1,"",""],_default:[0,"",""]},bh=U(c);bg.optgroup=bg.option,bg.tbody=bg.tfoot=bg.colgroup=bg.caption=bg.thead,bg.th=bg.td,f.support.htmlSerialize||(bg._default=[1,"div
","
"]),f.fn.extend({text:function(a){if(f.isFunction(a))return this.each(function(b){var c=f(this);c.text(a.call(this,b,c.text()))});if(typeof a!="object"&&a!==b)return this.empty().append((this[0]&&this[0].ownerDocument||c).createTextNode(a));return f.text(this)},wrapAll:function(a){if(f.isFunction(a))return this.each(function(b){f(this).wrapAll(a.call(this,b))});if(this[0]){var b=f(a,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&b.insertBefore(this[0]),b.map(function(){var a=this;while(a.firstChild&&a.firstChild.nodeType===1)a=a.firstChild;return a}).append(this)}return this},wrapInner:function(a){if(f.isFunction(a))return this.each(function(b){f(this).wrapInner(a.call(this,b))});return this.each(function(){var b=f(this),c=b.contents();c.length?c.wrapAll(a):b.append(a)})},wrap:function(a){var b=f.isFunction(a);return this.each(function(c){f(this).wrapAll(b?a.call(this,c):a)})},unwrap:function(){return this.parent().each(function(){f.nodeName(this,"body")||f(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(a){this.nodeType===1&&this.appendChild(a)})},prepend:function(){return this.domManip(arguments,!0,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this)});if(arguments.length){var a=f.clean(arguments);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this.nextSibling)});if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,f.clean(arguments));return a}},remove:function(a,b){for(var c=0,d;(d=this[c])!=null;c++)if(!a||f.filter(a,[d]).length)!b&&d.nodeType===1&&(f.cleanData(d.getElementsByTagName("*")),f.cleanData([d])),d.parentNode&&d.parentNode.removeChild(d);return this},empty:function() +{for(var a=0,b;(b=this[a])!=null;a++){b.nodeType===1&&f.cleanData(b.getElementsByTagName("*"));while(b.firstChild)b.removeChild(b.firstChild)}return this},clone:function(a,b){a=a==null?!1:a,b=b==null?a:b;return this.map(function(){return f.clone(this,a,b)})},html:function(a){if(a===b)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(W,""):null;if(typeof a=="string"&&!ba.test(a)&&(f.support.leadingWhitespace||!X.test(a))&&!bg[(Z.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Y,"<$1>");try{for(var c=0,d=this.length;c1&&l0?this.clone(!0):this).get();f(e[h])[b](j),d=d.concat(j)}return this.pushStack(d,a,e.selector)}}),f.extend({clone:function(a,b,c){var d,e,g,h=f.support.html5Clone||!bc.test("<"+a.nodeName)?a.cloneNode(!0):bo(a);if((!f.support.noCloneEvent||!f.support.noCloneChecked)&&(a.nodeType===1||a.nodeType===11)&&!f.isXMLDoc(a)){bk(a,h),d=bl(a),e=bl(h);for(g=0;d[g];++g)e[g]&&bk(d[g],e[g])}if(b){bj(a,h);if(c){d=bl(a),e=bl(h);for(g=0;d[g];++g)bj(d[g],e[g])}}d=e=null;return h},clean:function(a,b,d,e){var g;b=b||c,typeof b.createElement=="undefined"&&(b=b.ownerDocument||b[0]&&b[0].ownerDocument||c);var h=[],i;for(var j=0,k;(k=a[j])!=null;j++){typeof k=="number"&&(k+="");if(!k)continue;if(typeof k=="string")if(!_.test(k))k=b.createTextNode(k);else{k=k.replace(Y,"<$1>");var l=(Z.exec(k)||["",""])[1].toLowerCase(),m=bg[l]||bg._default,n=m[0],o=b.createElement("div");b===c?bh.appendChild(o):U(b).appendChild(o),o.innerHTML=m[1]+k+m[2];while(n--)o=o.lastChild;if(!f.support.tbody){var p=$.test(k),q=l==="table"&&!p?o.firstChild&&o.firstChild.childNodes:m[1]===""&&!p?o.childNodes:[];for(i=q.length-1;i>=0;--i)f.nodeName(q[i],"tbody")&&!q[i].childNodes.length&&q[i].parentNode.removeChild(q[i])}!f.support.leadingWhitespace&&X.test(k)&&o.insertBefore(b.createTextNode(X.exec(k)[0]),o.firstChild),k=o.childNodes}var r;if(!f.support.appendChecked)if(k[0]&&typeof (r=k.length)=="number")for(i=0;i=0)return b+"px"}}}),f.support.opacity||(f.cssHooks.opacity={get:function(a,b){return br.test((b&&a.currentStyle?a.currentStyle.filter:a.style.filter)||"")?parseFloat(RegExp.$1)/100+"":b?"1":""},set:function(a,b){var c=a.style,d=a.currentStyle,e=f.isNumeric(b)?"alpha(opacity="+b*100+")":"",g=d&&d.filter||c.filter||"";c.zoom=1;if(b>=1&&f.trim(g.replace(bq,""))===""){c.removeAttribute("filter");if(d&&!d.filter)return}c.filter=bq.test(g)?g.replace(bq,e):g+" "+e}}),f(function(){f.support.reliableMarginRight||(f.cssHooks.marginRight={get:function(a,b){var c;f.swap(a,{display:"inline-block"},function(){b?c=bz(a,"margin-right","marginRight"):c=a.style.marginRight});return c}})}),c.defaultView&&c.defaultView.getComputedStyle&&(bA=function(a,b){var c,d,e;b=b.replace(bs,"-$1").toLowerCase(),(d=a.ownerDocument.defaultView)&&(e=d.getComputedStyle(a,null))&&(c=e.getPropertyValue(b),c===""&&!f.contains(a.ownerDocument.documentElement,a)&&(c=f.style(a,b)));return c}),c.documentElement.currentStyle&&(bB=function(a,b){var c,d,e,f=a.currentStyle&&a.currentStyle[b],g=a.style;f===null&&g&&(e=g[b])&&(f=e),!bt.test(f)&&bu.test(f)&&(c=g.left,d=a.runtimeStyle&&a.runtimeStyle.left,d&&(a.runtimeStyle.left=a.currentStyle.left),g.left=b==="fontSize"?"1em":f||0,f=g.pixelLeft+"px",g.left=c,d&&(a.runtimeStyle.left=d));return f===""?"auto":f}),bz=bA||bB,f.expr&&f.expr.filters&&(f.expr.filters.hidden=function(a){var b=a.offsetWidth,c=a.offsetHeight;return b===0&&c===0||!f.support.reliableHiddenOffsets&&(a.style&&a.style.display||f.css(a,"display"))==="none"},f.expr.filters.visible=function(a){return!f.expr.filters.hidden(a)});var bD=/%20/g,bE=/\[\]$/,bF=/\r?\n/g,bG=/#.*$/,bH=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,bI=/^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,bJ=/^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/,bK=/^(?:GET|HEAD)$/,bL=/^\/\//,bM=/\?/,bN=/)<[^<]*)*<\/script>/gi,bO=/^(?:select|textarea)/i,bP=/\s+/,bQ=/([?&])_=[^&]*/,bR=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+))?)?/,bS=f.fn.load,bT={},bU={},bV,bW,bX=["*/"]+["*"];try{bV=e.href}catch(bY){bV=c.createElement("a"),bV.href="",bV=bV.href}bW=bR.exec(bV.toLowerCase())||[],f.fn.extend({load:function(a,c,d){if(typeof a!="string"&&bS)return bS.apply(this,arguments);if(!this.length)return this;var e=a.indexOf(" ");if(e>=0){var g=a.slice(e,a.length);a=a.slice(0,e)}var h="GET";c&&(f.isFunction(c)?(d=c,c=b):typeof c=="object"&&(c=f.param(c,f.ajaxSettings.traditional),h="POST"));var i=this;f.ajax({url:a,type:h,dataType:"html",data:c,complete:function(a,b,c){c=a.responseText,a.isResolved()&&(a.done(function(a){c=a}),i.html(g?f("
").append(c.replace(bN,"")).find(g):c)),d&&i.each(d,[c,b,a])}});return this},serialize:function(){return f.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?f.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||bO.test(this.nodeName)||bI.test(this.type))}).map(function(a,b){var c=f(this).val();return c==null?null:f.isArray(c)?f.map(c,function(a,c){return{name:b.name,value:a.replace(bF,"\r\n")}}):{name:b.name,value:c.replace(bF,"\r\n")}}).get()}}),f.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(a,b){f.fn[b]=function(a){return this.on(b,a)}}),f.each(["get","post"],function(a,c){f[c]=function(a,d,e,g){f.isFunction(d)&&(g=g||e,e=d,d=b);return f.ajax({type:c,url:a,data:d,success:e,dataType:g})}}),f.extend({getScript:function(a,c){return f.get(a,b,c,"script")},getJSON:function(a,b,c){return f.get(a,b,c,"json")},ajaxSetup:function(a,b){b?b_(a,f.ajaxSettings):(b=a,a=f.ajaxSettings),b_(a,b);return a},ajaxSettings:{url:bV,isLocal:bJ.test(bW[1]),global:!0,type:"GET",contentType:"application/x-www-form-urlencoded",processData:!0,async:!0,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":bX},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":a.String,"text html":!0,"text json":f.parseJSON,"text xml":f.parseXML},flatOptions:{context:!0,url:!0}},ajaxPrefilter:bZ(bT),ajaxTransport:bZ(bU),ajax:function(a,c){function w(a,c,l,m){if(s!==2){s=2,q&&clearTimeout(q),p=b,n=m||"",v.readyState=a>0?4:0;var o,r,u,w=c,x=l?cb(d,v,l):b,y,z;if(a>=200&&a<300||a===304){if(d.ifModified){if(y=v.getResponseHeader("Last-Modified"))f.lastModified[k]=y;if(z=v.getResponseHeader("Etag"))f.etag[k]=z}if(a===304)w="notmodified",o=!0;else try{r=cc(d,x),w="success",o=!0}catch(A){w="parsererror",u=A}}else{u=w;if(!w||a)w="error",a<0&&(a=0)}v.status=a,v.statusText=""+(c||w),o?h.resolveWith(e,[r,w,v]):h.rejectWith(e,[v,w,u]),v.statusCode(j),j=b,t&&g.trigger("ajax"+(o?"Success":"Error"),[v,d,o?r:u]),i.fireWith(e,[v,w]),t&&(g.trigger("ajaxComplete",[v,d]),--f.active||f.event.trigger("ajaxStop"))}}typeof a=="object"&&(c=a,a=b),c=c||{};var d=f.ajaxSetup({},c),e=d.context||d,g=e!==d&&(e.nodeType||e instanceof f)?f(e):f.event,h=f.Deferred(),i=f.Callbacks("once memory"),j=d.statusCode||{},k,l={},m={},n,o,p,q,r,s=0,t,u,v={readyState:0,setRequestHeader:function(a,b){if(!s){var c=a.toLowerCase();a=m[c]=m[c]||a,l[a]=b}return this},getAllResponseHeaders:function(){return s===2?n:null},getResponseHeader:function(a){var c;if(s===2){if(!o){o={};while(c=bH.exec(n))o[c[1].toLowerCase()]=c[2]}c=o[a.toLowerCase()]}return c===b?null:c},overrideMimeType:function(a){s||(d.mimeType=a);return this},abort:function(a){a=a||"abort",p&&p.abort(a),w(0,a);return this}};h.promise(v),v.success=v.done,v.error=v.fail,v.complete=i.add,v.statusCode=function(a){if(a){var b;if(s<2)for(b in a)j[b]=[j[b],a[b]];else b=a[v.status],v.then(b,b)}return this},d.url=((a||d.url)+"").replace(bG,"").replace(bL,bW[1]+"//"),d.dataTypes=f.trim(d.dataType||"*").toLowerCase().split(bP),d.crossDomain==null&&(r=bR.exec(d.url.toLowerCase()),d.crossDomain=!(!r||r[1]==bW[1]&&r[2]==bW[2]&&(r[3]||(r[1]==="http:"?80:443))==(bW[3]||(bW[1]==="http:"?80:443)))),d.data&&d.processData&&typeof d.data!="string"&&(d.data=f.param(d.data,d.traditional)),b$(bT,d,c,v);if(s===2)return!1;t=d.global,d.type=d.type.toUpperCase(),d.hasContent=!bK.test(d.type),t&&f.active++===0&&f.event.trigger("ajaxStart");if(!d.hasContent){d.data&&(d.url+=(bM.test(d.url)?"&":"?")+d.data,delete d.data),k=d.url;if(d.cache===!1){var x=f.now(),y=d.url.replace(bQ,"$1_="+x);d.url=y+(y===d.url?(bM.test(d.url)?"&":"?")+"_="+x:"")}}(d.data&&d.hasContent&&d.contentType!==!1||c.contentType)&&v.setRequestHeader("Content-Type",d.contentType),d.ifModified&&(k=k||d.url,f.lastModified[k]&&v.setRequestHeader("If-Modified-Since",f.lastModified[k]),f.etag[k]&&v.setRequestHeader("If-None-Match",f.etag[k])),v.setRequestHeader("Accept",d.dataTypes[0]&&d.accepts[d.dataTypes[0]]?d.accepts[d.dataTypes[0]]+(d.dataTypes[0]!=="*"?", "+bX+"; q=0.01":""):d.accepts["*"]);for(u in d.headers)v.setRequestHeader(u,d.headers[u]);if(d.beforeSend&&(d.beforeSend.call(e,v,d)===!1||s===2)){v.abort();return!1}for(u in{success:1,error:1,complete:1})v[u](d[u]);p=b$(bU,d,c,v);if(!p)w(-1,"No Transport");else{v.readyState=1,t&&g.trigger("ajaxSend",[v,d]),d.async&&d.timeout>0&&(q=setTimeout(function(){v.abort("timeout")},d.timeout));try{s=1,p.send(l,w)}catch(z){if(s<2)w(-1,z);else throw z}}return v},param:function(a,c){var d=[],e=function(a,b){b=f.isFunction(b)?b():b,d[d.length]=encodeURIComponent(a)+"="+encodeURIComponent(b)};c===b&&(c=f.ajaxSettings.traditional);if(f.isArray(a)||a.jquery&&!f.isPlainObject(a))f.each(a,function(){e(this.name,this.value)});else for(var g in a)ca(g,a[g],c,e);return d.join("&").replace(bD,"+")}}),f.extend({active:0,lastModified:{},etag:{}});var cd=f.now(),ce=/(\=)\?(&|$)|\?\?/i;f.ajaxSetup({jsonp:"callback",jsonpCallback:function(){return f.expando+"_"+cd++}}),f.ajaxPrefilter("json jsonp",function(b,c,d){var e=b.contentType==="application/x-www-form-urlencoded"&&typeof b.data=="string";if(b.dataTypes[0]==="jsonp"||b.jsonp!==!1&&(ce.test(b.url)||e&&ce.test(b.data))){var g,h=b.jsonpCallback=f.isFunction(b.jsonpCallback)?b.jsonpCallback():b.jsonpCallback,i=a[h],j=b.url,k=b.data,l="$1"+h+"$2";b.jsonp!==!1&&(j=j.replace(ce,l),b.url===j&&(e&&(k=k.replace(ce,l)),b.data===k&&(j+=(/\?/.test(j)?"&":"?")+b.jsonp+"="+h))),b.url=j,b.data=k,a[h]=function(a){g=[a]},d.always(function(){a[h]=i,g&&f.isFunction(i)&&a[h](g[0])}),b.converters["script json"]=function(){g||f.error(h+" was not called");return g[0]},b.dataTypes[0]="json";return"script"}}),f.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/javascript|ecmascript/},converters:{"text script":function(a){f.globalEval(a);return a}}}),f.ajaxPrefilter("script",function(a){a.cache===b&&(a.cache=!1),a.crossDomain&&(a.type="GET",a.global=!1)}),f.ajaxTransport("script",function(a){if(a.crossDomain){var d,e=c.head||c.getElementsByTagName("head")[0]||c.documentElement;return{send:function(f,g){d=c.createElement("script"),d.async="async",a.scriptCharset&&(d.charset=a.scriptCharset),d.src=a.url,d.onload=d.onreadystatechange=function(a,c){if(c||!d.readyState||/loaded|complete/.test(d.readyState))d.onload=d.onreadystatechange=null,e&&d.parentNode&&e.removeChild(d),d=b,c||g(200,"success")},e.insertBefore(d,e.firstChild)},abort:function(){d&&d.onload(0,1)}}}});var cf=a.ActiveXObject?function(){for(var a in ch)ch[a](0,1)}:!1,cg=0,ch;f.ajaxSettings.xhr=a.ActiveXObject?function(){return!this.isLocal&&ci()||cj()}:ci,function(a){f.extend(f.support,{ajax:!!a,cors:!!a&&"withCredentials"in a})}(f.ajaxSettings.xhr()),f.support.ajax&&f.ajaxTransport(function(c){if(!c.crossDomain||f.support.cors){var d;return{send:function(e,g){var h=c.xhr(),i,j;c.username?h.open(c.type,c.url,c.async,c.username,c.password):h.open(c.type,c.url,c.async);if(c.xhrFields)for(j in c.xhrFields)h[j]=c.xhrFields[j];c.mimeType&&h.overrideMimeType&&h.overrideMimeType(c.mimeType),!c.crossDomain&&!e["X-Requested-With"]&&(e["X-Requested-With"]="XMLHttpRequest");try{for(j in e)h.setRequestHeader(j,e[j])}catch(k){}h.send(c.hasContent&&c.data||null),d=function(a,e){var j,k,l,m,n;try{if(d&&(e||h.readyState===4)){d=b,i&&(h.onreadystatechange=f.noop,cf&&delete ch[i]);if(e)h.readyState!==4&&h.abort();else{j=h.status,l=h.getAllResponseHeaders(),m={},n=h.responseXML,n&&n.documentElement&&(m.xml=n),m.text=h.responseText;try{k=h.statusText}catch(o){k=""}!j&&c.isLocal&&!c.crossDomain?j=m.text?200:404:j===1223&&(j=204)}}}catch(p){e||g(-1,p)}m&&g(j,k,m,l)},!c.async||h.readyState===4?d():(i=++cg,cf&&(ch||(ch={},f(a).unload(cf)),ch[i]=d),h.onreadystatechange=d)},abort:function(){d&&d(0,1)}}}});var ck={},cl,cm,cn=/^(?:toggle|show|hide)$/,co=/^([+\-]=)?([\d+.\-]+)([a-z%]*)$/i,cp,cq=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]],cr;f.fn.extend({show:function(a,b,c){var d,e;if(a||a===0)return this.animate(cu("show",3),a,b,c);for(var g=0,h=this.length;g=i.duration+this.startTime){this.now=this.end,this.pos=this.state=1,this.update(),i.animatedProperties[this.prop]=!0;for(b in i.animatedProperties)i.animatedProperties[b]!==!0&&(g=!1);if(g){i.overflow!=null&&!f.support.shrinkWrapBlocks&&f.each(["","X","Y"],function(a,b){h.style["overflow"+b]=i.overflow[a]}),i.hide&&f(h).hide();if(i.hide||i.show)for(b in i.animatedProperties)f.style(h,b,i.orig[b]),f.removeData(h,"fxshow"+b,!0),f.removeData(h,"toggle"+b,!0);d=i.complete,d&&(i.complete=!1,d.call(h))}return!1}i.duration==Infinity?this.now=e:(c=e-this.startTime,this.state=c/i.duration,this.pos=f.easing[i.animatedProperties[this.prop]](this.state,c,0,1,i.duration),this.now=this.start+(this.end-this.start)*this.pos),this.update();return!0}},f.extend(f.fx,{tick:function(){var a,b=f.timers,c=0;for(;c-1,k={},l={},m,n;j?(l=e.position(),m=l.top,n=l.left):(m=parseFloat(h)||0,n=parseFloat(i)||0),f.isFunction(b)&&(b=b.call(a,c,g)),b.top!=null&&(k.top=b.top-g.top+m),b.left!=null&&(k.left=b.left-g.left+n),"using"in b?b.using.call(a,k):e.css(k)}},f.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),c=this.offset(),d=cx.test(b[0].nodeName)?{top:0,left:0}:b.offset();c.top-=parseFloat(f.css(a,"marginTop"))||0,c.left-=parseFloat(f.css(a,"marginLeft"))||0,d.top+=parseFloat(f.css(b[0],"borderTopWidth"))||0,d.left+=parseFloat(f.css(b[0],"borderLeftWidth"))||0;return{top:c.top-d.top,left:c.left-d.left}},offsetParent:function(){return this.map(function(){var a=this.offsetParent||c.body;while(a&&!cx.test(a.nodeName)&&f.css(a,"position")==="static")a=a.offsetParent;return a})}}),f.each(["Left","Top"],function(a,c){var d="scroll"+c;f.fn[d]=function(c){var e,g;if(c===b){e=this[0];if(!e)return null;g=cy(e);return g?"pageXOffset"in g?g[a?"pageYOffset":"pageXOffset"]:f.support.boxModel&&g.document.documentElement[d]||g.document.body[d]:e[d]}return this.each(function(){g=cy(this),g?g.scrollTo(a?f(g).scrollLeft():c,a?c:f(g).scrollTop()):this[d]=c})}}),f.each(["Height","Width"],function(a,c){var d=c.toLowerCase();f.fn["inner"+c]=function(){var a=this[0];return a?a.style?parseFloat(f.css(a,d,"padding")):this[d]():null},f.fn["outer"+c]=function(a){var b=this[0];return b?b.style?parseFloat(f.css(b,d,a?"margin":"border")):this[d]():null},f.fn[d]=function(a){var e=this[0];if(!e)return a==null?null:this;if(f.isFunction(a))return this.each(function(b){var c=f(this);c[d](a.call(this,b,c[d]()))});if(f.isWindow(e)){var g=e.document.documentElement["client"+c],h=e.document.body;return e.document.compatMode==="CSS1Compat"&&g||h&&h["client"+c]||g}if(e.nodeType===9)return Math.max(e.documentElement["client"+c],e.body["scroll"+c],e.documentElement["scroll"+c],e.body["offset"+c],e.documentElement["offset"+c]);if(a===b){var i=f.css(e,d),j=parseFloat(i);return f.isNumeric(j)?j:i}return this.css(d,typeof a=="string"?a:a+"px")}}),a.jQuery=a.$=f,typeof define=="function"&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return f})})(window); \ No newline at end of file diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_static/logo.png Binary file DVN-web/installer/dvninstall/doc/guides/_static/logo.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_static/minus.png Binary file DVN-web/installer/dvninstall/doc/guides/_static/minus.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_static/plus.png Binary file DVN-web/installer/dvninstall/doc/guides/_static/plus.png has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_static/pygments.css --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/doc/guides/_static/pygments.css Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,62 @@ +.highlight .hll { background-color: #ffffcc } +.highlight { background: #eeffcc; } +.highlight .c { color: #408090; font-style: italic } /* Comment */ +.highlight .err { border: 1px solid #FF0000 } /* Error */ +.highlight .k { color: #007020; font-weight: bold } /* Keyword */ +.highlight .o { color: #666666 } /* Operator */ +.highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */ +.highlight .cp { color: #007020 } /* Comment.Preproc */ +.highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */ +.highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */ +.highlight .gd { color: #A00000 } /* Generic.Deleted */ +.highlight .ge { font-style: italic } /* Generic.Emph */ +.highlight .gr { color: #FF0000 } /* Generic.Error */ +.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ +.highlight .gi { color: #00A000 } /* Generic.Inserted */ +.highlight .go { color: #333333 } /* Generic.Output */ +.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ +.highlight .gs { font-weight: bold } /* Generic.Strong */ +.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ +.highlight .gt { color: #0044DD } /* Generic.Traceback */ +.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */ +.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */ +.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */ +.highlight .kp { color: #007020 } /* Keyword.Pseudo */ +.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */ +.highlight .kt { color: #902000 } /* Keyword.Type */ +.highlight .m { color: #208050 } /* Literal.Number */ +.highlight .s { color: #4070a0 } /* Literal.String */ +.highlight .na { color: #4070a0 } /* Name.Attribute */ +.highlight .nb { color: #007020 } /* Name.Builtin */ +.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */ +.highlight .no { color: #60add5 } /* Name.Constant */ +.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */ +.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */ +.highlight .ne { color: #007020 } /* Name.Exception */ +.highlight .nf { color: #06287e } /* Name.Function */ +.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */ +.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ +.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */ +.highlight .nv { color: #bb60d5 } /* Name.Variable */ +.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */ +.highlight .w { color: #bbbbbb } /* Text.Whitespace */ +.highlight .mf { color: #208050 } /* Literal.Number.Float */ +.highlight .mh { color: #208050 } /* Literal.Number.Hex */ +.highlight .mi { color: #208050 } /* Literal.Number.Integer */ +.highlight .mo { color: #208050 } /* Literal.Number.Oct */ +.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */ +.highlight .sc { color: #4070a0 } /* Literal.String.Char */ +.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */ +.highlight .s2 { color: #4070a0 } /* Literal.String.Double */ +.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */ +.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */ +.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */ +.highlight .sx { color: #c65d09 } /* Literal.String.Other */ +.highlight .sr { color: #235388 } /* Literal.String.Regex */ +.highlight .s1 { color: #4070a0 } /* Literal.String.Single */ +.highlight .ss { color: #517918 } /* Literal.String.Symbol */ +.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */ +.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */ +.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */ +.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */ +.highlight .il { color: #208050 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/_static/searchtools.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/doc/guides/_static/searchtools.js Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,622 @@ +/* + * searchtools.js_t + * ~~~~~~~~~~~~~~~~ + * + * Sphinx JavaScript utilties for the full-text search. + * + * :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + + +/** + * Porter Stemmer + */ +var Stemmer = function() { + + var step2list = { + ational: 'ate', + tional: 'tion', + enci: 'ence', + anci: 'ance', + izer: 'ize', + bli: 'ble', + alli: 'al', + entli: 'ent', + eli: 'e', + ousli: 'ous', + ization: 'ize', + ation: 'ate', + ator: 'ate', + alism: 'al', + iveness: 'ive', + fulness: 'ful', + ousness: 'ous', + aliti: 'al', + iviti: 'ive', + biliti: 'ble', + logi: 'log' + }; + + var step3list = { + icate: 'ic', + ative: '', + alize: 'al', + iciti: 'ic', + ical: 'ic', + ful: '', + ness: '' + }; + + var c = "[^aeiou]"; // consonant + var v = "[aeiouy]"; // vowel + var C = c + "[^aeiouy]*"; // consonant sequence + var V = v + "[aeiou]*"; // vowel sequence + + var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) + return w; + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") + w = firstch.toUpperCase() + w.substr(1); + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) + w = w.replace(re,"$1$2"); + else if (re2.test(w)) + w = w.replace(re2,"$1$2"); + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) + w = w + "e"; + else if (re3.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + else if (re4.test(w)) + w = w + "e"; + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) + w = stem + "i"; + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step2list[suffix]; + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step3list[suffix]; + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) + w = stem; + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) + w = stem; + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) + w = stem; + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") + w = firstch.toLowerCase() + w.substr(1); + return w; + } +} + + + +/** + * Simple result scoring code. + */ +var Scorer = { + // Implement the following function to further tweak the score for each result + // The function takes a result array [filename, title, anchor, descr, score] + // and returns the new score. + /* + score: function(result) { + return result[4]; + }, + */ + + // query matches the full name of an object + objNameMatch: 11, + // or matches in the last dotted part of the object name + objPartialMatch: 6, + // Additive scores depending on the priority of the object + objPrio: {0: 15, // used to be importantResults + 1: 5, // used to be objectResults + 2: -5}, // used to be unimportantResults + // Used when the priority is not in the mapping. + objPrioDefault: 0, + + // query found in title + title: 15, + // query found in terms + term: 5 +}; + + +/** + * Search Module + */ +var Search = { + + _index : null, + _queued_query : null, + _pulse_status : -1, + + init : function() { + var params = $.getQueryParameters(); + if (params.q) { + var query = params.q[0]; + $('input[name="q"]')[0].value = query; + this.performSearch(query); + } + }, + + loadIndex : function(url) { + $.ajax({type: "GET", url: url, data: null, + dataType: "script", cache: true, + complete: function(jqxhr, textstatus) { + if (textstatus != "success") { + document.getElementById("searchindexloader").src = url; + } + }}); + }, + + setIndex : function(index) { + var q; + this._index = index; + if ((q = this._queued_query) !== null) { + this._queued_query = null; + Search.query(q); + } + }, + + hasIndex : function() { + return this._index !== null; + }, + + deferQuery : function(query) { + this._queued_query = query; + }, + + stopPulse : function() { + this._pulse_status = 0; + }, + + startPulse : function() { + if (this._pulse_status >= 0) + return; + function pulse() { + var i; + Search._pulse_status = (Search._pulse_status + 1) % 4; + var dotString = ''; + for (i = 0; i < Search._pulse_status; i++) + dotString += '.'; + Search.dots.text(dotString); + if (Search._pulse_status > -1) + window.setTimeout(pulse, 500); + } + pulse(); + }, + + /** + * perform a search for something (or wait until index is loaded) + */ + performSearch : function(query) { + // create the required interface elements + this.out = $('#search-results'); + this.title = $('

' + _('Searching') + '

').appendTo(this.out); + this.dots = $('').appendTo(this.title); + this.status = $('

').appendTo(this.out); + this.output = $('
'); + } + // Prettify the comment rating. + comment.pretty_rating = comment.rating + ' point' + + (comment.rating == 1 ? '' : 's'); + // Make a class (for displaying not yet moderated comments differently) + comment.css_class = comment.displayed ? '' : ' moderate'; + // Create a div for this comment. + var context = $.extend({}, opts, comment); + var div = $(renderTemplate(commentTemplate, context)); + + // If the user has voted on this comment, highlight the correct arrow. + if (comment.vote) { + var direction = (comment.vote == 1) ? 'u' : 'd'; + div.find('#' + direction + 'v' + comment.id).hide(); + div.find('#' + direction + 'u' + comment.id).show(); + } + + if (opts.moderator || comment.text != '[deleted]') { + div.find('a.reply').show(); + if (comment.proposal_diff) + div.find('#sp' + comment.id).show(); + if (opts.moderator && !comment.displayed) + div.find('#cm' + comment.id).show(); + if (opts.moderator || (opts.username == comment.username)) + div.find('#dc' + comment.id).show(); + } + return div; + } + + /** + * A simple template renderer. Placeholders such as <%id%> are replaced + * by context['id'] with items being escaped. Placeholders such as <#id#> + * are not escaped. + */ + function renderTemplate(template, context) { + var esc = $(document.createElement('div')); + + function handle(ph, escape) { + var cur = context; + $.each(ph.split('.'), function() { + cur = cur[this]; + }); + return escape ? esc.text(cur || "").html() : cur; + } + + return template.replace(/<([%#])([\w\.]*)\1>/g, function() { + return handle(arguments[2], arguments[1] == '%' ? true : false); + }); + } + + /** Flash an error message briefly. */ + function showError(message) { + $(document.createElement('div')).attr({'class': 'popup-error'}) + .append($(document.createElement('div')) + .attr({'class': 'error-message'}).text(message)) + .appendTo('body') + .fadeIn("slow") + .delay(2000) + .fadeOut("slow"); + } + + /** Add a link the user uses to open the comments popup. */ + $.fn.comment = function() { + return this.each(function() { + var id = $(this).attr('id').substring(1); + var count = COMMENT_METADATA[id]; + var title = count + ' comment' + (count == 1 ? '' : 's'); + var image = count > 0 ? opts.commentBrightImage : opts.commentImage; + var addcls = count == 0 ? ' nocomment' : ''; + $(this) + .append( + $(document.createElement('a')).attr({ + href: '#', + 'class': 'sphinx-comment-open' + addcls, + id: 'ao' + id + }) + .append($(document.createElement('img')).attr({ + src: image, + alt: 'comment', + title: title + })) + .click(function(event) { + event.preventDefault(); + show($(this).attr('id').substring(2)); + }) + ) + .append( + $(document.createElement('a')).attr({ + href: '#', + 'class': 'sphinx-comment-close hidden', + id: 'ah' + id + }) + .append($(document.createElement('img')).attr({ + src: opts.closeCommentImage, + alt: 'close', + title: 'close' + })) + .click(function(event) { + event.preventDefault(); + hide($(this).attr('id').substring(2)); + }) + ); + }); + }; + + var opts = { + processVoteURL: '/_process_vote', + addCommentURL: '/_add_comment', + getCommentsURL: '/_get_comments', + acceptCommentURL: '/_accept_comment', + deleteCommentURL: '/_delete_comment', + commentImage: '/static/_static/comment.png', + closeCommentImage: '/static/_static/comment-close.png', + loadingImage: '/static/_static/ajax-loader.gif', + commentBrightImage: '/static/_static/comment-bright.png', + upArrow: '/static/_static/up.png', + downArrow: '/static/_static/down.png', + upArrowPressed: '/static/_static/up-pressed.png', + downArrowPressed: '/static/_static/down-pressed.png', + voting: false, + moderator: false + }; + + if (typeof COMMENT_OPTIONS != "undefined") { + opts = jQuery.extend(opts, COMMENT_OPTIONS); + } + + var popupTemplate = '\ +
\ +

\ + Sort by:\ + best rated\ + newest\ + oldest\ +

\ +
Comments
\ +
\ + loading comments...
\ +
    \ +
    \ +

    Add a comment\ + (markup):

    \ +
    \ + reStructured text markup: *emph*, **strong**, \ + ``code``, \ + code blocks: :: and an indented block after blank line
    \ +
    \ + \ +

    \ + \ + Propose a change ▹\ + \ + \ + Propose a change ▿\ + \ +

    \ + \ + \ + \ + \ + \ +
    \ +
    '; + + var commentTemplate = '\ +
    \ +
    \ +
    \ + \ + \ + \ + \ + \ + \ +
    \ +
    \ + \ + \ + \ + \ + \ + \ +
    \ +
    \ +
    \ +

    \ + <%username%>\ + <%pretty_rating%>\ + <%time.delta%>\ +

    \ +
    <#text#>
    \ +

    \ + \ + reply ▿\ + proposal ▹\ + proposal ▿\ + \ + \ +

    \ +
    \
    +<#proposal_diff#>\
    +        
    \ +
      \ +
      \ +
      \ +
      \ + '; + + var replyTemplate = '\ +
    • \ +
      \ +
      \ + \ + \ + \ + \ + \ + \ +
      \ +
    • '; + + $(document).ready(function() { + init(); + }); +})(jQuery); + +$(document).ready(function() { + // add comment anchors for all paragraphs that are commentable + $('.sphinx-has-comment').comment(); + + // highlight search words in search results + $("div.context").each(function() { + var params = $.getQueryParameters(); + var terms = (params.q) ? params.q[0].split(/\s+/) : []; + var result = $(this); + $.each(terms, function() { + result.highlightText(this.toLowerCase(), 'highlighted'); + }); + }); + + // directly open comment window if requested + var anchor = document.location.hash; + if (anchor.substring(0, 9) == '#comment-') { + $('#ao' + anchor.substring(9)).click(); + document.location.hash = '#s' + anchor.substring(9); + } +}); diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/dataverse-R-ingest.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/doc/guides/dataverse-R-ingest.html Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,270 @@ + + + + + + + + + + Ingest of R (.RData) files — The Harvard Dataverse Network 3.6.1 documentation + + + + + + + + + + + + + + +
      +
      +
      + +
      +
      +
      + +
      +

      Ingest of R (.RData) files

      +
      +

      Overview.

      +

      Support for ingesting R data files has been added in version 3.5. R +has been increasingly popular in the research/academic community, +owing to the fact that it is free and open-source (unlike SPSS and +STATA). Consequently, more and more data is becoming available +exclusively as R data files. This long-awaited feature makes it +possible to ingest such data into DVN as “subsettable” files.

      +
      +
      +

      Requirements.

      +

      R ingest relies on R having been installed, configured and made +available to the DVN application via RServe (see the Installers +Guide). This is in contrast to the SPSS and Stata ingest - which can +be performed without R present. (though R is still needed to perform +most subsetting/analysis tasks on the resulting data files).

      +

      The data must be formatted as an R dataframe (data.frame()). If an +.RData file contains multiple dataframes, only the 1st one will be +ingested.

      +
      +
      +

      Data Types, compared to other supported formats (Stat, SPSS)

      +
      +

      Integers, Doubles, Character strings

      +

      The handling of these types is intuitive and straightforward. The +resulting tab file columns, summary statistics and UNF signatures +should be identical to those produced by ingesting the same vectors +from SPSS and Stata.

      +

      A couple of things that are unique to R/new in DVN:

      +

      R explicitly supports Missing Values for all of the types above; +Missing Values encoded in R vectors will be recognized and preserved +in TAB files (as ‘NA’), counted in the generated summary statistics +and data analysis.

      +

      In addition to Missing Values, R recognizes “Not a Value” (NaN) and +positive and negative infinity for floating point variables. These +are now properly supported by the DVN.

      +

      Also note, that unlike Stata, that does recognize “float” and “double” +as distinct data types, all floating point values in R are in fact +double precision.

      +
      +
      +

      R Factors

      +

      These are ingested as “Categorical Values” in the DVN.

      +

      One thing to keep in mind: in both Stata and SPSS, the actual value of +a categorical variable can be both character and numeric. In R, all +factor values are strings, even if they are string representations of +numbers. So the values of the resulting categoricals in the DVN will +always be of string type too.

      +
      +
      New: To properly handle ordered factors in R, the DVN now supports the concept of an “Ordered Categorical” - a categorical value where an explicit order is assigned to the list of value labels.
      +
      +
      +
      +

      (New!) Boolean values

      +

      R Boolean (logical) values are supported.

      +
      +
      +

      Limitations of R, as compared to SPSS and STATA.

      +

      Most noticeably, R lacks a standard mechanism for defining descriptive +labels for the data frame variables. In the DVN, similarly to +both Stata and SPSS, variables have distinct names and labels; with +the latter reserved for longer, descriptive text. +With variables ingested from R data frames the variable name will be +used for both the “name” and the “label”.

      +
      +
      Optional R packages exist for providing descriptive variable labels; +in one of the future versions support may be added for such a +mechanism. It would of course work only for R files that were +created with such optional packages.
      +
      +

      Similarly, R categorical values (factors) lack descriptive labels too. +Note: This is potentially confusing, since R factors do +actually have “labels”. This is a matter of terminology - an R +factor’s label is in fact the same thing as the “value” of a +categorical variable in SPSS or Stata and DVN; it contains the actual +meaningful data for the given observation. It is NOT a field reserved +for explanatory, human-readable text, such as the case with the +SPSS/Stata “label”.

      +

      Ingesting an R factor with the level labels “MALE” and “FEMALE” will +produce a categorical variable with “MALE” and “FEMALE” in the +values and labels both.

      +
      +
      +
      +

      Time values in R

      +

      This warrants a dedicated section of its own, because of some unique +ways in which time values are handled in R.

      +

      R makes an effort to treat a time value as a real time instance. This +is in contrast with either SPSS or Stata, where time value +representations such as “Sep-23-2013 14:57:21” are allowed; note that +in the absence of an explicitly defined time zone, this value cannot +be mapped to an exact point in real time. R handles times in the +“Unix-style” way: the value is converted to the +“seconds-since-the-Epoch” Greenwitch time (GMT or UTC) and the +resulting numeric value is stored in the data file; time zone +adjustments are made in real time as needed.

      +

      Things still get ambiguous and confusing when R displays this time +value: unless the time zone was explicitly defined, R will adjust the +value to the current time zone. The resulting behavior is often +counter-intuitive: if you create a time value, for example:

      +
      +
      timevalue<-as.POSIXct(“03/19/2013 12:57:00”, format = “%m/%d/%Y %H:%M:%OS”);
      +

      on a computer configured for the San Francisco time zone, the value +will be differently displayed on computers in different time zones; +for example, as “12:57 PST” while still on the West Coast, but as +“15:57 EST” in Boston.

      +

      If it is important that the values are always displayed the same way, +regardless of the current time zones, it is recommended that the time +zone is explicitly defined. For example:

      +
      +
      attr(timevalue,”tzone”)<-“PST”
      +
      +
      or
      +
      timevalue<-as.POSIXct(“03/19/2013 12:57:00”, format = “%m/%d/%Y %H:%M:%OS”, tz=”PST”);
      +
      +

      Now the value will always be displayed as “15:57 PST”, regardless of +the time zone that is current for the OS ... BUT ONLY if the OS +where R is installed actually understands the time zone “PST”, which +is not by any means guaranteed! Otherwise, it will quietly adjust +the stored GMT value to the current time zone, yet it will still +display it with the “PST” tag attached!** One way to rephrase this is +that R does a fairly decent job storing time values in a +non-ambiguous, platform-independent manner - but gives you no guarantee that +the values will be displayed in any way that is predictable or intuitive.

      +

      In practical terms, it is recommended to use the long/descriptive +forms of time zones, as they are more likely to be properly recognized +on most computers. For example, “Japan” instead of “JST”. Another possible +solution is to explicitly use GMT or UTC (since it is very likely to be +properly recognized on any system), or the “UTC+<OFFSET>” notation. Still, none of the above +guarantees proper, non-ambiguous handling of time values in R data +sets. The fact that R quietly modifies time values when it doesn’t +recognize the supplied timezone attribute, yet still appends it to the +changed time value does make it quite difficult. (These issues are +discussed in depth on R-related forums, and no attempt is made to +summarize it all in any depth here; this is just to made you aware of +this being a potentially complex issue!)

      +

      An important thing to keep in mind, in connection with the DVN ingest +of R files, is that it will reject an R data file with any time +values that have time zones that we can’t recognize. This is done in +order to avoid (some) of the potential issues outlined above.

      +

      It is also recommended that any vectors containing time values +ingested into the DVN are reviewed, and the resulting entries in the +TAB files are compared against the original values in the R data +frame, to make sure they have been ingested as expected.

      +

      Another potential issue here is the UNF. The way the UNF +algorithm works, the same date/time values with and without the +timezone (e.g. “12:45” vs. “12:45 EST”) produce different +UNFs. Considering that time values in Stata/SPSS do not have time +zones, but ALL time values in R do (yes, they all do - if the timezone +wasn’t defined explicitely, it implicitly becomes a time value in the +“UTC” zone!), this means that it is impossible to have 2 time +value vectors, in Stata/SPSS and R, that produce the same UNF.

      +
      +
      A pro tip: if it is important to produce SPSS/Stata and R versions of
      +
      +

      the same data set that result in the same UNF when ingested, you may +define the time variables as strings in the R data frame, and use +the “YYYY-MM-DD HH:mm:ss” formatting notation. This is the formatting used by the UNF +algorithm to normalize time values, so doing the above will result in +the same UNF as the vector of the same time values in Stata.

      +

      Note: date values (dates only, without time) should be handled the +exact same way as those in SPSS and Stata, and should produce the same +UNFs.

      +
      +
      + + +
      +
      +
      +
      + +
      +
      +
      + + + + + \ No newline at end of file diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/dataverse-api-main.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/doc/guides/dataverse-api-main.html Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,684 @@ + + + + + + + + + + APIs Guide — The Harvard Dataverse Network 3.6.1 documentation + + + + + + + + + + + + + + + +
      +
      +
      + +
      +
      +
      + +
      +

      APIs Guide

      +

      Introduction

      +

      We strongly encourage anyone interested in building tools to +interoperate with the Dataverse Network to utilize our open source +APIs. Please visit our website for +examples of external apps that have been built to work with our APIs.

      +
      +

      Data Sharing API

      +

      As of version 3.0, a new API for programmatic access to the DVN data and +metadata has been added. The API allows a remote, non-DVN +archive/application to search the holdings and download files from a +Dataverse Network.

      +

      The Data Sharing API documentation is available below:

      +
      +

      API URLs

      +

      The URLs for the Data Sharing API resources are of the form:

      +

      /dvn/api/{/arg}{?{{arg}&...}}

      +

      Generally, mandatory arguments are embedded in the URL and optional +arguments are supplied as query parameters, in the ?param=... notation. +See the documentation for the individual resources below for details.

      +

      The API supports basic HTTP Authentication. So that the access +credentials are not transmitted in the clear, the API verbs (methods) +below are only accessible over HTTPS.

      +
      +
      +

      Metadata API

      +

      The API for accessing Dataverse Network metadata is implemented in 4 verbs +(resources):

      +
      +
      metadataSearchFields
      +
      metadataSearch
      +
      metadataFormatsAvailable
      +
      metadata
      +
      +
      +

      metadataSearchFields

      +

      Arguments:

      +

      none

      +

      URL example:

      +

      /dvn/api/metadataSearchFields/

      +

      Output:

      +

      XML record in the format below:

      +
      <MetadataSearchFields>
      +<SearchableField>
      +<fieldName>title</fieldName>
      +<fieldDescription>title</fieldDescription>
      +</SearchableField>
      +<SearchableField>
      +<fieldName>authorName</fieldName>
      +<fieldDescription>authorName</fieldDescription>
      +</SearchableField>
      +<SearchableField>
      +<fieldName>otherId</fieldName>
      +<fieldDescription>otherId</fieldDescription>
      +</SearchableField>
      +...
      +</MetadataSearchableFields>
      +
      +
      +
      +
      +

      metadataSearch

      +

      Arguments:

      +
      +
      queryString: mandatory, embedded.
      +
      Standard Lucene-style search queries are supported; (same query format currently used to define OAI sets, etc.)
      +
      +

      URLs examples:

      +
      +
      /dvn/api/metadataSearch/title:test
      +
      /dvn/api/metadataSearch/title:test AND authorName:leonid
      +
      +

      Output:

      +

      XML record in the format below:

      +
      <MetadataSearchResults>
      +<searchQuery>title:test</searchQuery>
      +<searchHits>
      +<study ID="hdl:TEST/10007"/>
      +...
      +</searchHits>
      +</MetadataSearchResults>
      +
      +
      +

      Error Conditions:

      +

      Note that when the query does not produce any results, the resource returns an XML record +with an empty <searchHits> list, NOT a 404.

      +
      +
      +

      metadataFormatsAvailable

      +

      Arguments:

      +
      +
      objectId: mandatory, embedded.
      +
      Both global and local (database) IDs are supported.
      +
      +

      URLs examples:

      +
      +
      /dvn/api/metadataFormatsAvailable/hdl:1902.1/6635
      +
      /dvn/api/metadataFormatsAvailable/9956
      +
      +

      Output:

      +

      XML record in the format below:

      +
      <MetadataFormatsAvailable studyId="hdl:TEST/10007">
      +<formatAvailable selectSupported="true" excludeSupported="true">
      +<formatName>ddi</formatName>
      +<formatSchema>http://www.icpsr.umich.edu/DDI/Version2-0.xsd</formatSchema>
      +<formatMime>application/xml</formatMime>
      +</formatAvailable>
      +<formatAvailable>
      +<formatName>oai_dc</formatName>
      +<formatSchema>http://www.openarchives.org/OAI/2.0/oai_dc.xsd</formatSchema>
      +<formatMime>application/xml</formatMime>
      +</formatAvailable>
      +</MetadataFormatsAvailable>
      +
      +
      +

      (Note the selectSupported and excludeSupported attributes above!)

      +

      Error Conditions:

      +

      404 NOT FOUND if study does not exist

      +
      +
      +

      metadata

      +

      Arguments:

      +
      +
      objectId: mandatory, embedded.
      +
      Both global and local (database) IDs are supported.
      +
      +
      +
      formatType: optional, query.
      +
      Defaults to DDI if not supplied.
      +
      +

      URLs examples:

      +
      +
      /dvn/api/metadata/hdl:1902.1/6635 /dvn/api/metadata/9956
      +
      /dvn/api/metadata/hdl:1902.1/6635?formatType=ddi
      +
      +

      Output:

      +

      Metadata record in the format requested, if available. No extra +headers, etc.

      +

      Partial selection of metadata sections:

      +

      When requesting partial records is supported (see +metadataFormatsAvailable, above for more info), these additional parameters can be supplied:

      +
      +
      partialExclude: optional, query.
      +
      Xpath query representing metadata section to drop, where supported.
      +
      +
      +
      partialInclude: optional, query.
      +
      Xpath query representing metadata section to include, where supported.
      +
      +

      Examples:

      +
      +
      /dvn/api/metadata/hdl:1902.1/6635?formatType=ddi&partialExclude=codeBook/dataDscr
      +
      will produce a DDI without the dataDscr section.
      +
      [I’m expecting this to be the single most useful and common real-life application of thisfeature - L.A.]
      +
      +
      +
      /dvn/api/metadata/hdl:1902.1/6635?formatType=ddi&partialInclude=codeBook/stdyDscr
      +
      will produce a DDI with the stdyDscr section only.
      +
      +

      (Note: for now, only simple top-level Xpath queries like the above are supported).

      +

      One other limitation of the current implementation: it does not validate the supplied partialExclude and partialInclude arguments; no error messages/diagnostics will be given if the Xpath queries are not part of the metadata schema. For example, if you request partialInclude=foobar, it will quietly produce an empty DDI, and partialExclude=foobar will not exclude anything (and you will get a complete DDI).

      +

      Error Conditions:

      +
      +
      404 NOT FOUND
      +
      if study does not exist
      +
      +
      +
      503 SERVICE UNAVAILABLE
      +
      if study exists, but the format requested is not available;
      +
      also, when partial exclude or include is requested, if it’s not supported by the service (see the documenation for metadataFormatsAvailable above).
      +
      +

      Notes:

      +

      A real-life workflow scenario may go as follows:

      +
        +
      1. Find the searchable index fields on this DVN (meatadataSearchFields)
      2. +
      3. Run a search (metadataSearch)
      4. +
      5. For [select] studies returned, find what metadata formats are available (metadataFormatsAvailable)
      6. +
      7. Retrieve the metadata in the desired format (metadata)
      8. +
      +
      +
      +
      +

      File Access API

      +

      The Dataverse Network API for downloading digital objects (files) is implemented in 2 +verbs (resources):

      +
      +
      downloadInfo
      +
      download
      +
      +
      +

      downloadInfo

      +

      Arguments:

      +
      +
      objectId: mandatory, embedded.
      +
      Database ID of the Dataverse Network Study File.
      +
      +

      URLs example:

      +

      /dvn/api/downloadInfo/9956

      +

      Output:

      +

      XML record in the format below:

      +

      (Note: the record below is only an example; we will provide full schema/documentation of theFileDownloadInfo record format below)

      +
      <FileDownloadInfo>
      +<studyFile fileId="9956">
      +
      +<fileName>prettypicture.jpg</fileName>
      +<fileMimeType>image/jpeg</fileMimeType>
      +<fileSize>52825</fileSize>
      +
      +<Authentication>
      +        <authUser>testUser</authUser>
      +        <authMethod>password</authMethod>
      +</Authentication>
      +
      +<Authorization directAccess="true"/>
      +
      +<accessPermissions accessGranted="true">Authorized Access only</accessPermissions>
      +
      +<accessRestrictions accessGranted="true">Terms of Use</accessRestrictions>
      +
      +<accessServicesSupported>
      +
      +        <accessService>
      +                <serviceName>thumbnail</serviceName>
      +                <serviceArgs>imageThumb=true</serviceArgs>
      +                <contentType>image/png</contentType>
      +                <serviceDesc>Image Thumbnail</serviceDesc>
      +        </accessService>
      +
      +</accessServicesSupported>
      +</studyFile>
      +</FileDownloadInfo>
      +
      +
      +

      Error Conditions:

      +
      +
      404 NOT FOUND
      +
      Study file does not exist.
      +
      +
      +
      +

      download

      +

      Arguments:

      +
      +
      objectId: mandatory, embedded.
      +
      Database ID of the DVN Study File.
      +
      +
      +
      Optional Query args:
      +
      As specified in the output of downloadInfo, above.
      +
      +

      URLs examples:

      +
      +
      /dvn/api/download/9956
      +
      /dvn/api/download/9956?imageThumb=true
      +
      /dvn/api/download/9957?fileFormat=stata
      +
      +

      Output:

      +

      Byte Stream (with proper HTTP headers specifying the content +type, file name and such)

      +

      Error Conditions:

      +
      +
      404 NOT FOUND
      +
      Study file does not exist.
      +
      +
      +
      401 AUTHORIZATION REQUIRED
      +
      Access to restricted object attempted without HTTP Authorization header supplied.
      +
      +
      +
      403 PERMISSION DENIED HTTP
      +
      Authorization header supplied, but the authenticated user is not
      +
      authorized to directly access the object protected by Access
      +
      Permissions and/or Access Restrictions (“Terms of Use”).
      +
      +
      +
      +
      +
      +

      Data Deposit API

      +

      As of version 3.6, a new API for programmatic deposit of data and metadata to the Dataverse Network has been added. The API allows a remote, non-Dataverse Network archive/application to deposit files and metadata to a Dataverse Network installation.

      +
      +

      Overview of Data Deposit API

      +

      “v1” of the DVN Data Deposit API is a partial implementation of the SWORDv2 protocol, the specification for which available at http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html

      +

      Please reference the SWORDv2 specification for expected HTTP status codes (i.e. 201, 204, 404, etc.), headers (i.e. “Location”), etc.

      +
      +

      Data Deposit API v1 curl examples

      +

      The following curl commands demonstrate supported operations:

      +
      +
      Retrieve SWORD service document
      +

      The service document enumerates the dataverses (“collections” from a SWORD perspective) the user can deposit data into. The “collectionPolicy” element for each dataverse contains the deposit terms of use for the network and dataverse.

      +

      curl https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/service-document

      +
      +
      +
      Create a study with an Atom entry (XML file)
      +

      curl --data-binary "@atom-entry-study.xml" -H "Content-Type: application/atom+xml" https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS

      +
      <?xml version="1.0"?>
      +<!--
      +modified from http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html#protocoloperations_editingcontent_metadata
      +-->
      +<entry xmlns="http://www.w3.org/2005/Atom"
      +       xmlns:dcterms="http://purl.org/dc/terms/">
      +   <!-- some embedded metadata -->
      +   <dcterms:title>Roasting at Home</dcterms:title>
      +   <dcterms:creator>Peets, John</dcterms:creator>
      +   <dcterms:creator>Stumptown, Jane</dcterms:creator>
      +   <!-- Producer with financial or admin responsibility of the data -->
      +   <dcterms:publisher>Coffee Bean State University</dcterms:publisher>
      +   <!-- related publications -->
      +   <dcterms:isReferencedBy holdingsURI="http://dx.doi.org/10.1038/dvn333" agency="DOI"
      +       IDNo="10.1038/dvn333">Peets, J., &amp; Stumptown, J. (2013). Roasting at Home. New England Journal of Coffee, 3(1), 22-34.</dcterms:isReferencedBy>
      +   <!-- production date -->
      +   <dcterms:date>2013-07-11</dcterms:date>
      +   <!-- Other Identifier for the data in this study (or potentially global id if unused) -->
      +   <!--
      +   <dcterms:identifier>hdl:1XXZY.1/XYXZ</dcterms:identifier>
      +   -->
      +   <dcterms:description>Considerations before you start roasting your own coffee at home.</dcterms:description>
      +   <!-- keywords -->
      +   <dcterms:subject>coffee</dcterms:subject>
      +   <dcterms:subject>beverage</dcterms:subject>
      +   <dcterms:subject>caffeine</dcterms:subject>
      +   <!-- geographic coverage -->
      +   <dcterms:coverage>United States</dcterms:coverage>
      +   <dcterms:coverage>Canada</dcterms:coverage>
      +   <!-- kind of data -->
      +   <dcterms:type>aggregate data</dcterms:type>
      +   <!-- List of sources of the data collection-->
      +   <dcterms:source>Stumptown, Jane. 2011. Home Roasting. Coffeemill Press.</dcterms:source>
      +   <!-- restrictions -->
      +   <dcterms:rights>Creative Commons CC-BY 3.0 (unported) http://creativecommons.org/licenses/by/3.0/</dcterms:rights>
      +   <!-- related materials -->
      +   <dcterms:relation>Peets, John. 2010. Roasting Coffee at the Coffee Shop. Coffeemill Press</dcterms:relation>
      +</entry>
      +
      +
      +
      +
      +
      Dublin Core (DC) Qualified Mapping - DDI - Dataverse Network DB Element Crosswalk
      +
      ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
      DC (terms: namespace)DVN DB ElementDDI Element 2.xNote
      dcterms:titletitle2.1.1.1 title 
      dcterms:creatorauthor (LastName, FirstName)2.1.2.1 AuthEnty 
      dcterms:subjectkeyword2.2.1.1. keyword 
      dcterms:descriptionabstract2.2.2 abstractDescribing the purpose, scope or nature of the data collection...
      dcterms:publisherproducer2.1.3.1 producerperson or agency financially or administratively responsible for the dataset
      dcterms:contributorn/an/asee dcterms:creator above
      dcterms:dateproductionDate (YYYY-MM-DD or YYYY-MM or YYYY)2.1.3.3 prodDateproduction or published date of dataset
      dcterms:typekindOfData2.2.3.10 dataKindType of data included in the file: survey data, census/enumeration data, aggregate data, clinical
      dcterms:formatn/an/a 
      dcterms:identifierotherID2.1.1.5 IDNoDon’t use this field to map a journal article ID. Only ID’s that directly belong to dataset
      dcterms:sourcedataSources2.3.1.8.1 dataSrcList of books, articles, data files if any that served as the sources for the data collection
      dcterms:languagen/an/a 
      dcterms:relationrelatedMaterial2.5.1 relMatany related material (journal article is not included here - see: dcterms:isReferencedBy below)
      dcterms:coveragegeographicCoverage2.2.3.4 geogCoverInfo on the geographic coverage of the data
      dcterms:rightsrestrictions2.4.2.3 restrctnany restrictions on the access or use of the dataset
      dcterms:bibliographicCitationdataCitation? (2.1.7 biblCit)data citation for the study in the Dataverse Network
      dcterms:isReferencedBystudyRelPublications? (not set by DDI community yet)the publication (journal article, book, other work) that uses this dataset (include citation, permanent identifier (DOI), and permanent URL)
      + +
      +
      Add files to a study with a zip file
      +

      curl --data-binary @example.zip -H "Content-Disposition: filename=example.zip" -H "Content-Type: application/zip" -H "Packaging: http://purl.org/net/sword/package/SimpleZip" https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/study/hdl:TEST/12345

      +
      +
      +
      Display a study atom entry
      +

      Contains data citation (bibliographicCitation), alternate URI [persistent URI of study], edit URI, edit media URI, statement URI.

      +

      curl https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/hdl:TEST/12345

      +
      +
      +
      Display a study statement
      +

      Contains feed of file entries, latestVersionState, locked boolean

      +

      curl https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/statement/study/hdl:TEST/12345

      +
      +
      +
      Delete a file by database id
      +

      curl -i -X DELETE https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/file/2325541

      +
      +
      +
      Replacing cataloging information (title, author, etc.) for a study
      +

      Please note that all cataloging information will be replaced, including fields that can not be expressed with “dcterms” fields.

      +

      curl --upload-file "atom-entry-study2.xml" -H "Content-Type: application/atom+xml" https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/hdl:TEST/12345

      +
      <?xml version="1.0"?>
      +<!--
      +for modifying a study created with atom-entry-study.xml
      +-->
      +<entry xmlns="http://www.w3.org/2005/Atom"
      +       xmlns:dcterms="http://purl.org/dc/terms/">
      +   <!-- some embedded metadata -->
      +   <dcterms:title>The Levels of Caffeine in Cold Brew Coffee</dcterms:title>
      +   <dcterms:creator>Peets, John L.</dcterms:creator>
      +   <dcterms:creator>Stumptown Research Institute</dcterms:creator>
      +   <dcterms:isReferencedBy holdingsURI="http://dx.doi.org/10.1038/dvn333" agency="DOI"
      +       IDNo="10.1038/dvn333">Peets, J., &amp; Stumptown, J. (2013). Roasting at Home. New England Journal of Coffee, 3(1), 22-34.</dcterms:isReferencedBy>
      +   <dcterms:date>2013-08-11</dcterms:date>
      +   <dcterms:description>This study evaluates the caffeine levels of a cold brewed coffee.</dcterms:description>
      +   <dcterms:subject>coffee bean</dcterms:subject>
      +   <dcterms:subject>caffeine</dcterms:subject>
      +   <dcterms:subject>cold brew process</dcterms:subject>
      +   <dcterms:subject>Stumptown Coffee Company</dcterms:subject>
      +   <dcterms:rights>Creative Commons CC-BY 3.0 (unported) http://creativecommons.org/licenses/by/3.0/</dcterms:rights>
      +</entry>
      +
      +
      +
      +
      +
      List studies in a dataverse
      +

      curl https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS

      +
      +
      +
      Delete a study (non-released studies only)
      +

      curl -i -X DELETE https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/hdl:TEST/12345

      +
      +
      +
      Deaccession a study (released studies only)
      +

      curl -i -X DELETE https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/hdl:TEST/12345

      +
      +
      +
      Release a study
      +

      curl -X POST -H "In-Progress: false" --upload-file zero-length-file.txt https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/hdl:TEST/12345

      +
      +
      +
      Determine if a dataverse has been released
      +

      Look for a dataverseHasBeenReleased boolean.

      +

      curl https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS

      +
      + +
      +

      curl reference

      +

      Per http://curl.haxx.se/docs/manpage.html

      +
        +
      • –upload-file is an HTTP PUT
      • +
      • –data-binary is an HTTP POST
      • +
      +
      + +
      +

      DVN Data Deposit API v1 client sample code (Python)

      +

      https://github.com/dvn/swordpoc/tree/master/dvn_client contains sample Python code for writing a DVN Data Deposit API v1 client. It makes use of a Python client library which conforms to the SWORDv2 specification: https://github.com/swordapp/python-client-sword2

      +
      + + + + + + + + + + +
      + + + + + + + \ No newline at end of file diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/dataverse-developer-main.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/doc/guides/dataverse-developer-main.html Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,777 @@ + + + + + + + + + + DVN Developers Guide — The Harvard Dataverse Network 3.6.1 documentation + + + + + + + + + + + + + + + + +
      +
      +
      + +
      +
      +
      + +
      +

      DVN Developers Guide

      +

      Please note: This guide was updated in October 2013 to reflex the switch +from Ant to Maven in DVN 3.6.1.

      +
      +

      Build Environment (Configuring NetBeans)

      +

      This chapter describes setting up the build environment that you will +need to build the DVN application from source code.

      +
      +

      Install NetBeans and GlassFish

      +

      As of DVN version 3.6.1 and the switch to Maven, a DVN development +environment should not have any dependency on a particular IDE, but use +of NetBeans 7.2.1 is encouraged because it’s the version used by most of +the current developers (on Mac OS X).

      +

      The NetBeans project is currently offering an installer bundle that +contains both NetBeans 7.2.1 and a supported version of GlassFish +(3.1.2.2). If they choose to discontinue the bundle, you will have to +download and install the two packages separately. Note that you can have +multiple versions of both NetBeans and GlassFish on your system.

      +

      Please note: While we intend to investigate NetBeans 7.4 and GlassFish +4, these are not yet known to provide a suitable development +environment.

      +

      We strongly recommend that you run both installs as a regular user. There’s no reason to run your development environment as root.

      +
      +

      Install NetBeans bundle

      +

      Download NetBeans 7.2.1 Java EE + GlassFish Open Source Edition 3.1.2.2 +bundle from https://netbeans.org/downloads/7.2.1

      +

      For Mac OS X, you will download a .dmg disk image that will open +automatically and start the installer for you. Choose the typical +installation but be sure to install GlassFish and JUnit when prompted.

      +

      Note that you don’t have to uninstall your existing NetBeans version. +You can have as many versions installed as you need in parallel.

      +

      When you start NetBeans 7.2.1 for the first time, you will be asked if +you want to import the settings from the previous installations. If you +have an existing, pre-DVN 3.* development environment on your system, +answer “no” – we want to create the new configuration from scratch.

      +
      +
      +

      [If you have to] Install GlassFish 3.1.2.2

      +

      We strongly recommend that you install GlassFish Server 3.1.2.2, +Open Source Edition, Full Platform. If you have to install it +separately from NetBeans, it can be obtained from +http://glassfish.java.net/downloads/3.1.2.2-final.html

      +

      The page above contains a link to the installation instructions, but the +process is very straightforward - just download and run the installer.

      +

      It is strongly recommended that you use Sun/Oracle Java JDK version 1.6. +Please make sure you have the newest (or at least, recent) build number +available for your platform. (On Mac OS X 10.8, since the JDK can be +installed as part of OS distribution, the version currently provided by +Apple should be sufficient). In other words, we do not recommend +building DVN under JDK 1.7 until the ticket regarding the move from Java +6 to 7 has been closed: https://redmine.hmdc.harvard.edu/issues/3306

      +

      Note that you don’t have to uninstall older versions of GlassFish you +may still have around. It’s ok to have multiple versions installed. But +make sure you have the 3.1.2.2 installation selected as the active +server in NetBeans.

      +

      Important: During the installation, leave the admin password fields +blank. This is not a security risk since out of the box, GlassFish +3.1.2.2 will only be accepting admin connections on the localhost +interface. Choosing a password at this stage, however, will complicate +the installation process unnecessarily. Since this is a development +system, you can probably keep this configuration unchanged (admin on +localhost only). If you need to be able to connect to the admin console +remotely, please see the note in the Appendix section of the main +Installers Guide.

      +
      +
      +

      Install JUnit (if you haven’t already)

      +

      Depending on how you installed NetBeans, you might already have JUnit +installed. JUnit can be installed from Tools -> Plugins.

      +
      +
      +
      +

      Check out a new copy of the DVN source tree

      +
      +

      Create a GitHub account [if you don’t have one already]

      +

      Sign up at https://github.com

      +

      Please note that primary audience of this guide (for now) is people who +have push access to https://github.com/IQSS/dvn . If you do not have +push access and want to contribute (and we hope you do!) please fork the +repo per https://help.github.com/articles/fork-a-repo and make +adjustments below when cloning the repo.

      +
      +
      +

      Set up an ssh keypair (if you haven’t already)

      +

      You can use git with passwords over HTTPS but it’s much nicer to set +up SSH keys.

      +

      https://github.com/settings/ssh is the place to manage the ssh keys +GitHub knows about for you. That page also links to a nice howto: +https://help.github.com/articles/generating-ssh-keys

      +

      From the terminal, ssh-keygen will create new ssh keys for you:

      +
        +
      • private key: ~/.ssh/id_rsa
          +
        • It is very important to protect your private key. If someone +else acquires it, they can access private repositories on GitHub +and make commits as you! Ideally, you’ll store your ssh keys on an +encrypted volume and protect your private key with a password when +prompted for one by ssh-keygen. See also “Why do passphrases +matter” at https://help.github.com/articles/generating-ssh-keys
        • +
        +
      • +
      • public key: ~/.ssh/id_rsa.pub
      • +
      +

      After you’ve created your ssh keys, add the public key to your GitHub +account.

      +
      +
      +

      Clone the repo

      +

      Please see branches for detail, but in short, the +“develop” branch is where new commits go. Below we will assume you want +to make commits to “develop”.

      +

      In NetBeans, click Team, then Git, then Clone.

      +
      +
      Remote Repository
      +
        +
      • Repository URL: github.com:IQSS/dvn.git
      • +
      • Username: git
      • +
      • Private/Public Key
          +
        • Private Key File: /Users/[YOUR_USERNAME]/.ssh/id_rsa
        • +
        +
      • +
      • Passphrase: (the passphrase you chose while running ssh-keygen)
      • +
      +

      Click Next.

      +

      If you are prompted about the authenticity of github.com’s RSA key fingerprint, answer “Yes” to continue connecting. GitHub’s RSA key fingerprint is listed at https://help.github.com/articles/generating-ssh-keys

      +
      +
      +
      Remote Branches
      +

      Under Select Remote Branches check the “develop” branch.

      +

      Please note: You may see other branches listed, such as “master”, but +there is no need to check them out at this time.

      +

      Click Next.

      +
      +
      +
      Destination Directory
      +

      The defaults should be fine:

      +
        +
      • Parent Directory: /Users/[YOUR_USERNAME]/NetBeansProjects
      • +
      • Clone Name: dvn
      • +
      • Checkout Branch: develop*
      • +
      • Remote Name: origin
      • +
      +

      Click Finish.

      +

      You should see a message that 3 projects were cloned. Click “Open +Project”.

      +
      +
      +
      +
      +

      Open Projects

      +

      In the “Open Projects” dialog you should see three projects, DVN-lockss, +DVN-root, and DVN-web (a child of DVN-root).

      +

      Highlight DVN-root and check “Open Required” (to include DVN-web) and click “Open”.

      +

      At this point, you should have two (and only two) projects open in +NetBeans: DVN-root and DVN-web. If you hover over the projects, it’s +normal at this point to see warnings such as “Some dependency artifacts +are not in the local repository” or “Cannot find application server: +GlassFish Server 3+”. We’ll correct these next.

      +
      +
      +

      Build for the first time

      +

      In NetBeans, right-click DVN-root and click “Build”. This will download +many dependencies via Maven and may take several minutes.

      +

      When this process has completed, right-click DVN-web and click “Build”. +You should expect to see “BUILD SUCCESS”. This means you have +successfully built the .war application package, but do not attempt to +deploy the application just yet! We need to configure the server +environment first, which consists of GlassFish and PostgreSQL

      +
      +
      +
      +

      Application Environment (Configuring GlassFish and PostgreSQL)

      +

      In this chapter, we describe the process of setting up your own local +application environment into which you will deploy the DVN application.

      +
      +

      Install PostgreSQL database server

      +

      For Mac OS X (our default development OS), you can get the installer +from http://www.postgresql.org/download/macosx

      +

      The installation is very straightforward; just make sure you answer +“yes” when asked if Postgres should be accepting network connections. +(The application will be accessing the database at the “localhost” +address).

      +

      Once installed, we recommend that you also allow connections +over local Unix sockets. This way the installer won’t have to ask you +for the Postgres password every time it needs to talk to the database. +To do so, modify the “local all all” line in the data/pg_hba.conf file +to look like this:

      +
      +
      local all all trust
      +
      +

      Note that this only opens Postgres to the local socket connections, +and should not be considered a security risk. But if you are extra +cautious, you may use instead:

      +
      +
      local all all ident sameuser
      +
      +

      Restart Postgres for the changes to take effect!

      +

      Please note: if you have any problems with the PostgreSQL setup, please +ensure the right psql is in your $PATH.

      +

      You can check the instructions in the main Installers Guide for more info: +PostgreSQL section; +but the above should be sufficient to get your environment set up.

      +
      +
      +

      Run the install-dev script

      +

      The installer is supplied with the DVN source in the tools directory. +You must run it as root (for direct access to Postgres).

      +
      +
      To run the script:
      +
      sudo su -
      +
      cd /Users/[YOUR_USERNAME]/NetBeansProjects/dvn/tools/installer/dvninstall
      +
      +
      +
      then execute
      +
      ./install-dev
      +
      +

      When prompted for various settings, you will likely be able to accept +all the default values (in a development environment, they are for the +most part the same for everybody).

      +
      +
      +

      Testing login

      +

      Once the install-dev script has completed successfully, you will +have a fully functional Dataverse Network server. After making sure +GlassFish has been started per the output of the script, you should be +able to log in DVN with these credentials:

      + +

      Please note that when deploying from NetBeans for the first time, you +will be prompted to select a deployment server. From the drop down, +select “GlassFish Server 3.1.2”, click “Remember in Current IDE Session” +and click “OK”.

      +
      +
      +
      +

      Developing with Git

      +
      +

      Commit

      +

      Committing Changes

      +

      By following the instructions in the build step, you +should be in the “develop” branch, which is where we want to make +commits as we work toward the next release.

      +

      You can verify which branch you are on by clicking Team then “Repository +Browser”.

      +

      You should see dvn [develop] at the root of the tree and develop +in bold under Branches -> Local

      +

      Click Team, then “Show Changes”. Select the desired files and +right-click to commit.

      +

      To publish your changes on GitHub, you’ll need to follow the next step: +push.

      +
      +
      +

      Push

      +

      Pushing your commits to GitHub

      +

      After making your commit, push it to GitHub by clicking +Team -> Remote -> Push, then Next (to use your configured remote +repository), then checking develop and Finish.

      +

      Your commit should now appear on GitHub in the develop branch: +https://github.com/IQSS/dvn/commits/develop

      +

      Your commit should not appear in the master branch on GitHub: +https://github.com/IQSS/dvn/commits/master . Not yet anyway. We only +merge commits into master when we are ready to release. Please see the +branches section for for detail.

      +
      +
      +

      Release

      +
      +

      Merge develop into master

      +
      +
      Tag the release
      +

      Here is an example of how the 3.4 tag ( +https://github.com/IQSS/dvn/tree/3.4) was created and pushed to GitHub:

      +
      murphy:dvn pdurbin$ git branch
      +* develop
      +  master
      +murphy:dvn pdurbin$ git pull
      +Already up-to-date.
      +murphy:dvn pdurbin$ git checkout master
      +Switched to branch 'master'
      +murphy:dvn pdurbin$ git merge develop
      +Updating fdbfe57..6ceb24f
      +(snip)
      + create mode 100644 tools/installer/dvninstall/readme.md
      +murphy:dvn pdurbin$ git tag
      +3.3
      +murphy:dvn pdurbin$ git tag -a 3.4 -m 'merged develop, tagging master as 3.4'
      +murphy:dvn pdurbin$ git tag
      +3.3
      +3.4
      +murphy:dvn pdurbin$ git push origin 3.4
      +Counting objects: 1, done.
      +Writing objects: 100% (1/1), 182 bytes, done.
      +Total 1 (delta 0), reused 0 (delta 0)
      +To git@github.com:IQSS/dvn.git
      + * [new tag]         3.4 -> 3.4
      +murphy:dvn pdurbin$
      +murphy:dvn pdurbin$ git push origin master
      +Total 0 (delta 0), reused 0 (delta 0)
      +To git@github.com:IQSS/dvn.git
      +   fdbfe57..6ceb24f  master -> master
      +murphy:dvn pdurbin$
      +
      +
      +
      +
      +
      Make release available for download
      +

      On dvn-build:

      +
      cd tools/installer
      +make installer
      +
      +
      +

      Rename the resulting “dvninstall.zip” to include the release number +(i.e. “dvninstall_v3_4.zip”) and upload it, the separate war file, a +readme, and a buildupdate script (all these files should include the +release number) to SourceForge (i.e. +http://sourceforge.net/projects/dvn/files/dvn/3.4/).

      +
      +
      +
      Increment the version number
      +

      The file to edit is:

      + +
      +
      +
      +
      +

      Branches

      +
      +

      Current list of branches

      +

      https://github.com/IQSS/dvn/branches

      +
      +
      +

      New branching model: develop vs. master

      +

      Please note that with the move to git, we are adopting the branching +model described at +http://nvie.com/posts/a-successful-git-branching-model/

      +

      In this branching model there are two persistent branches:

      +
        +
      • develop: where all new commits go
      • +
      • master: where code gets merged and tagged as a release
      • +
      +

      That is to say, please make your commits on the develop branch, not +the master branch.

      +
      +
      +

      Feature branches

      +
      +
      “The essence of a feature branch is that it exists as long as the +feature is in development, but will eventually be merged back into +develop (to definitely add the new feature to the upcoming release) +or discarded (in case of a disappointing experiment).” – +http://nvie.com/posts/a-successful-git-branching-model/
      +
      +
      +

      Example feature branch: 2656-lucene

      +

      First, we create the branch and check it out:

      +
      murphy:dvn pdurbin$ git branch
      +  2656-solr
      +* develop
      +murphy:dvn pdurbin$ git branch 2656-lucene
      +murphy:dvn pdurbin$
      +murphy:dvn pdurbin$ git branch
      +  2656-lucene
      +  2656-solr
      +* develop
      +murphy:dvn pdurbin$ git checkout 2656-lucene
      +Switched to branch '2656-lucene'
      +murphy:dvn pdurbin$
      +murphy:dvn pdurbin$ git status
      +# On branch 2656-lucene
      +nothing to commit (working directory clean)
      +murphy:dvn pdurbin$
      +
      +
      +
      Then, we make a change and a commit, and push it to:
      +
      +
      +
      https://github.com/iqss/dvn/tree/2656-lucene (creating a new remote branch):
      +
      +
      murphy:dvn pdurbin$ vim src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/Indexer.java
      +murphy:dvn pdurbin$
      +murphy:dvn pdurbin$ git commit -m 'start lucene faceting branch' src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/Indexer.java
      +[2656-lucene 3b82f88] start lucene faceting branch
      + 1 file changed, 73 insertions(+), 2 deletions(-)
      +murphy:dvn pdurbin$
      +murphy:dvn pdurbin$ git push origin 2656-lucene
      +Counting objects: 25, done.
      +Delta compression using up to 8 threads.
      +Compressing objects: 100% (10/10), done.
      +Writing objects: 100% (13/13), 2.23 KiB, done.
      +Total 13 (delta 6), reused 0 (delta 0)
      +To git@github.com:IQSS/dvn.git
      + * [new branch]      2656-lucene -> 2656-lucene
      +murphy:dvn pdurbin$
      +
      +
      +

      +
      +

      As we work on the feature branch, we merge the latest changes from +“develop”. We want to resolve conflicts in the feature branch itself so +that the feature branch will merge cleanly into “develop” when we’re +ready. In the example below, we use git mergetool and opendiff +to resolve conflicts and save the merge. Then we push the newly-merged +2656-lucene feature branch to GitHub:

      +
      +

      +
      +
      murphy:dvn pdurbin$ git branch
      +* 2656-lucene
      +  2656-solr
      +  develop
      +murphy:dvn pdurbin$ git checkout develop
      +murphy:dvn pdurbin$ git branch
      +  2656-lucene
      +  2656-solr
      +* develop
      +murphy:dvn pdurbin$ git pull
      +remote: Counting objects: 206, done.
      +remote: Compressing objects: 100% (43/43), done.
      +remote: Total 120 (delta 70), reused 96 (delta 46)
      +Receiving objects: 100% (120/120), 17.65 KiB, done.
      +Resolving deltas: 100% (70/70), completed with 40 local objects.
      +From github.com:IQSS/dvn
      +   8fd223d..9967413  develop    -> origin/develop
      +Updating 8fd223d..9967413
      +Fast-forward
      + .../admin/EditNetworkPrivilegesServiceBean.java  |    5 +-
      +(snip)
      + src/DVN-web/web/study/StudyFilesFragment.xhtml   |    2 +-
      + 12 files changed, 203 insertions(+), 118 deletions(-)
      +murphy:dvn pdurbin$ murphy:dvn pdurbin$ git pull
      +remote: Counting objects: 206, done.
      +remote: Compressing objects: 100% (43/43), done.
      +remote: Total 120 (delta 70), reused 96 (delta 46)
      +Receiving objects: 100% (120/120), 17.65 KiB, done.
      +Resolving deltas: 100% (70/70), completed with 40 local objects.
      +From github.com:IQSS/dvn
      +   8fd223d..9967413  develop    -> origin/develop
      +Updating 8fd223d..9967413
      +Fast-forward
      + .../admin/EditNetworkPrivilegesServiceBean.java  |    5 +-
      +(snip)
      + .../harvard/iq/dvn/core/web/study/StudyUI.java   |    2 +-
      + src/DVN-web/web/HomePage.xhtml                   |    5 +-
      +murphy:dvn pdurbin$
      +murphy:dvn pdurbin$ git checkout 2656-lucene
      +Switched to branch '2656-lucene'
      +murphy:dvn pdurbin$
      +murphy:dvn pdurbin$
      +murphy:dvn pdurbin$ git merge develop
      +Auto-merging src/DVN-web/web/BasicSearchFragment.xhtml
      +CONFLICT (content): Merge conflict in src/DVN-web/web/BasicSearchFragment.xhtml
      +Auto-merging src/DVN-web/src/edu/harvard/iq/dvn/core/web/BasicSearchFragment.java
      +Auto-merging src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/Indexer.java
      +Automatic merge failed; fix conflicts and then commit the result.
      +murphy:dvn pdurbin$
      +murphy:dvn pdurbin$ git status
      +# On branch 2656-lucene
      +# Changes to be committed:
      +#
      +#       modified:   src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/admin/EditNetworkPrivilegesServiceBean.java
      +(snip)
      +#       new file:   src/DVN-web/web/admin/ChooseDataverseForCreateStudy.xhtml
      +#       modified:   src/DVN-web/web/study/StudyFilesFragment.xhtml
      +#
      +# Unmerged paths:
      +#   (use "git add/rm <file>..." as appropriate to mark resolution)
      +#
      +#       both modified:      src/DVN-web/web/BasicSearchFragment.xhtml
      +#
      +murphy:dvn pdurbin$ git mergetool
      +merge tool candidates: opendiff kdiff3 tkdiff xxdiff meld tortoisemerge gvimdiff diffuse ecmerge p4merge araxis bc3 emerge vimdiff
      +Merging:
      +src/DVN-web/web/BasicSearchFragment.xhtml
      +
      +Normal merge conflict for 'src/DVN-web/web/BasicSearchFragment.xhtml':
      +  {local}: modified file
      +  {remote}: modified file
      +Hit return to start merge resolution tool (opendiff):
      +murphy:dvn pdurbin$
      +murphy:dvn pdurbin$ git add .
      +murphy:dvn pdurbin$
      +murphy:dvn pdurbin$ git commit -m "Merge branch 'develop' into 2656-lucene"
      +[2656-lucene 519cd8c] Merge branch 'develop' into 2656-lucene
      +murphy:dvn pdurbin$
      +murphy:dvn pdurbin$ git push origin 2656-lucene
      +(snip)
      +murphy:dvn pdurbin$
      +
      +
      +
      When we are ready to merge the feature branch back into the develop branch, we can do so.
      +
      +
      +
      Here’s an example of merging the 2656-lucene branch back into develop:
      +
      +
      murphy:dvn pdurbin$ git checkout 2656-lucene
      +Switched to branch '2656-lucene'
      +murphy:dvn pdurbin$ git pull
      +Already up-to-date.
      +murphy:dvn pdurbin$ git checkout develop
      +Switched to branch 'develop'
      +murphy:dvn pdurbin$ git pull
      +Already up-to-date.
      +murphy:dvn pdurbin$ git merge 2656-lucene
      +Removing lib/dvn-lib-EJB/lucene-core-3.0.0.jar
      +Merge made by the 'recursive' strategy.
      + lib/dvn-lib-EJB/lucene-core-3.0.0.jar                                     |  Bin 1021623 -> 0 bytes
      + lib/dvn-lib-EJB/lucene-core-3.5.0.jar                                     |  Bin 0 -> 1466301 bytes
      + lib/dvn-lib-EJB/lucene-facet-3.5.0.jar                                    |  Bin 0 -> 293582 bytes
      + src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/DvnQuery.java          |  160 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++
      + src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/IndexServiceBean.java  |   56 ++++++++++++++++++++
      + src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/IndexServiceLocal.java |   16 +++++-
      + src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/Indexer.java           |  432 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++--
      + src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/ResultsWithFacets.java |   71 +++++++++++++++++++++++++
      + src/DVN-web/src/SearchFieldBundle.properties                              |    4 +-
      + src/DVN-web/src/edu/harvard/iq/dvn/core/web/AdvSearchPage.java            |   86 +++++++++++++++++++++++++++++++
      + src/DVN-web/src/edu/harvard/iq/dvn/core/web/BasicSearchFragment.java      |  102 +++++++++++++++++++++++++++++++++++-
      + src/DVN-web/src/edu/harvard/iq/dvn/core/web/StudyListing.java             |   11 ++++
      + src/DVN-web/src/edu/harvard/iq/dvn/core/web/StudyListingPage.java         |  428 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++-
      + src/DVN-web/src/edu/harvard/iq/dvn/core/web/study/FacetResultUI.java      |   42 +++++++++++++++
      + src/DVN-web/src/edu/harvard/iq/dvn/core/web/study/FacetUI.java            |   62 ++++++++++++++++++++++
      + src/DVN-web/web/AdvSearchPage.xhtml                                       |    3 +-
      + src/DVN-web/web/BasicSearchFragment.xhtml                                 |    9 ++--
      + src/DVN-web/web/StudyListingPage.xhtml                                    |   43 +++++++++++-----
      + 18 files changed, 1500 insertions(+), 25 deletions(-)
      + delete mode 100644 lib/dvn-lib-EJB/lucene-core-3.0.0.jar
      + create mode 100644 lib/dvn-lib-EJB/lucene-core-3.5.0.jar
      + create mode 100644 lib/dvn-lib-EJB/lucene-facet-3.5.0.jar
      + create mode 100644 src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/DvnQuery.java
      + create mode 100644 src/DVN-EJB/src/java/edu/harvard/iq/dvn/core/index/ResultsWithFacets.java
      + create mode 100644 src/DVN-web/src/edu/harvard/iq/dvn/core/web/study/FacetResultUI.java
      + create mode 100644 src/DVN-web/src/edu/harvard/iq/dvn/core/web/study/FacetUI.java
      +murphy:dvn pdurbin$
      +murphy:dvn pdurbin$ git status
      +# On branch develop
      +# Your branch is ahead of 'origin/develop' by 68 commits.
      +#
      +nothing to commit (working directory clean)
      +murphy:dvn pdurbin$
      +murphy:dvn pdurbin$ git push
      +Counting objects: 51, done.
      +Delta compression using up to 8 threads.
      +Compressing objects: 100% (12/12), done.
      +Writing objects: 100% (19/19), 1.41 KiB, done.
      +Total 19 (delta 7), reused 0 (delta 0)
      +To git@github.com:IQSS/dvn.git
      +   b7fae01..2b88b68  develop -> develop
      +murphy:dvn pdurbin$
      +
      +
      +
      +

      Switching to the master branch to merge commits from the develop branch

      +

      We should really only need to switch from the develop branch to the +master branch as we prepare for a release.

      +

      First, we check out the master branch by clicking Team -> Git -> Branch +-> Switch to Branch.

      +

      Change Branch to “origin/master” and check the box for “Checkout as New +Branch” and fill in “master” as the “Branch Name” to match the name of +the branch we’re switching to. Then click “Switch”.

      +

      Now, in the Git Repository Browser (from Team -> Repository Browser) the +root of the tree should say dvn [master] and you should see two +branches under Branches -> Local. master should be in bold and +develop should not.

      +
      +
      +
      +

      Tips

      +
      +

      Previewing changes before a pull

      +

      If the build fails overnight you may want to hold off on doing a pull +until the problem is resolved. To preview what has changed since your +last pull, you can do a git fetch (the first part of a pull) then +git log HEAD..origin/develop to see the commit messages. +git log -p or git diff will allow you to see the contents of the +changes:

      +
      git checkout develop
      +git fetch
      +git log HEAD..origin/develop
      +git log -p HEAD..origin/develop
      +git diff HEAD..origin/develop
      +
      +

      After the build is working again, you can simply do a pull as normal.

      +
      +
      +
      +

      Errors

      +
      +

      Duplicate class

      +

      The error “duplicate class” can result whenever you resolve a merge +conflict in git.

      +

      The fix is to close NetBeans and delete (or move aside) the cache like +this:

      +
      cd ~/Library/Caches/NetBeans
      +mv 7.2.1 7.2.1.moved
      +
      +

      According to https://netbeans.org/bugzilla/show_bug.cgi?id=197983 this might be fixed in NetBeans 7.3.

      +
      +
      +
      +
      + + +
      +
      +
      +
      + +
      +
      +
      + + + + + \ No newline at end of file diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/dataverse-installer-main.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/doc/guides/dataverse-installer-main.html Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,1209 @@ + + + + + + + + + + Installers Guide — The Harvard Dataverse Network 3.6.1 documentation + + + + + + + + + + + + + + + + +
      +
      +
      + +
      +
      +
      + +
      +

      Installers Guide

      +

      Introduction

      +

      This is our “new and improved” installation guide, it was first +released with the Dataverse Network application versions 2.2.4, when we +introduced the new, automated and much simplified installation process. +As of February 2012, it has been updated to reflect the changes made in +the newly released version 3.0 of the software. (Our existing users will +notice however, that the changes in the installation process have been +fairly minimal).

      +

      The guide is intended for anyone who needs to install the DVN app, +developers and Dataverse Network administrators alike.

      +

      The top-down organization of the chapters and sections is that of +increasing complexity. First a very basic, simple installation scenario +is presented. The instructions are straightforward and only the required +components are discussed. This use case will in fact be sufficient for +most DVN developers and many Dataverse Network administrators. Chances +are you are one of such users, so if brave by nature, you may stop +reading this section and go straight to the “Quick Install” chapter.

      +

      The “basic” installation process described in the first chapter is +fully automated, everything is performed by a single interactive script. +This process has its limitations. It will likely work only on the +supported platforms. Optional components need to be configured outside +of the Installer (these are described in the “Optional Components” +section).

      +

      For an advanced user, we provide the detailed explanations of all the +steps performed by the Installer. This way he or she can experiment with +individual configuration options, having maximum flexibility and control +over the process. Yet we tried to organize the advanced information in +such a way that those who only need the most basic instructions would +not have to read through it unnecessarily. Instead we provide them with +an easy way to get a bare-bones configuration of the DVN up and running.

      +

      If you are interested in practicing a DVN installation in a Vagrant +environment you can later throw away, please follow the instructions at +https://github.com/dvn/dvn-install-demo to spin up a Linux virtual +machine on your laptop with vagrant up. When you are finished with +this temporary DVN installation, you can delete the virtual machine with +vagrant destroy.

      +

      If you encounter any problems during installation, please contact the +development team +at support@thedata.org +or our Dataverse Users +Community.

      +
      +

      Quick Install

      +

      For an experienced and/or rather bold user, this is a 1 +paragraph version of the installation instructions:

      +

      This should work on RedHat and its derivatives, and MacOS X. If this +does not describe your case, you will very likely have to install and +configure at least some of the components manually. Meaning, you may +consider reading through the chapters that follow! Still here? Great. +Prerequisites: Sun/Oracle Java JDK 1.6_31+ and a “virgin” installation +of Glassfish v3.1.2; PostgreSQL v8.3+, configured to listen to network +connections and support password authentication on the localhost +interface; you may need R as well. See the corresponding sections under +“2. Prerequisites”, if necessary. Download the installer package from +SourceForge:

      +

      http://sourceforge.net/projects/dvn/files/dvn

      +

      Choose the latest version and download the dvninstall zip file.

      +

      Unzip the package in a temp location of your choice (this will create +the directory dvninstall). Run the installer, as root:

      +
      +
      +
      cd dvninstall
      +
      ./ install
      +
      +
      +

      Follow the installation prompts. If it all works as it should, you +will have a working DVN instance running in about a minute from now.

      +

      Has it worked? Awesome! Now you may read the rest of the guide +chapters at your own leisurely pace, to see if you need any of the +optional components described there. And/or if you want to understand +what exactly has just been done to your system.

      +
      +
      +

      SYSTEM REQUIREMENTS

      +

      Or rather, recommendations. The closer your configuration is to what’s +outlined below, the easier it will be for the DVN team to provide +support and answer your questions.

      +
        +
      • Operating system - The production version of the Dataverse Network at +IQSS (dvn.iq.harvard.edu) runs on RedHat Linux 5. Most of the DVN +development is currently done on MacOS X. Because of our experience +with RedHat and MacOS X these are the recommended platforms. You +should be able to deploy the application .ear file on any other +platform that supports Java. However, the automated installer we +provide will likely work on RedHat and MacOS only. Some information +provided in this guide is specific to these 2 operating systems. (Any +OS-specific instructions/examples will be clearly marked, for +example:[MacOS-specific:])
      • +
      • CPU - The production IQSS Dataverse Network runs on generic, +multi-core 64-bit processors.
      • +
      • Memory - The application servers currently in production at the IQSS +have 64 GB of memory each. Development and testing systems require a +minimum of 2 gigabyte of memory.
      • +
      • Disk space - How much disk space is required depends on the amount of +data that you expect to serve. The IQSS Dataverse Network file system +is a standalone NetApp with 2 TB volume dedicated to the DVN data.
      • +
      • Multiple servers – All the DVN components can run on the same server. +On a busy, hard-working production network the load can be split +across multiple servers. The 3 main components, the application +server (Glassfish), the database (Postgres) and R can each run on its +own host. Furthermore, multiple application servers sharing the same +database and R server(s) can be set up behind a load balancer. +Developers would normally run Glassfish and Postgres on their +workstations locally and use a shared R server.
      • +
      • If it actually becomes a practical necessity to bring up more servers +to handle your production load, there are no universal instructions +on how to best spread it across extra CPUs. It will depend on the +specifics of your site, the nature of the data you serve and the +needs of your users, whether you’ll benefit most from dedicating +another server to run the database, or to serve R requests. Please +see the discussion in the corresponding sections of the Prerequisites +chapter.
      • +
      +
      +
      +

      PREREQUISITES

      +

      In this chapter, an emphasis is made on clearly identifying those +components that are absolutely required for every installation and +marking any advanced, optional instructions as such.

      +
      +

      Glassfish

      +

      Version 3.1.2 is required.

      +

      Make sure you have Sun/OracleJava JDK version 1.6, build 31 +or newer. It is available from +http://www.oracle.com/technetwork/java/javase/downloads/index.html.

      +

      [note for developers:]

      +

      If you are doing this installation as part of your DVN software +development setup: The version of NetBeans currently in use by the DVN +team is 7.0.1, and it is recommended that you use this same version if +you want to participate in the development. As of writing of this +manual, NetBeans 7.0.1 installer bundle comes with an older version of +Glassfish. So you will have to install Glassfish version 3.1.2 +separately, and then select it as the default server for your NetBeans +project.

      +

      [/note for developers]

      +

      We strongly recommend that you install GlassFish Server 3.1.2, +Open Source Edition, Full Platform. You are very likely to run into +installation issues if you attempt to run the installer and get the +application to work with a different version! Simply transitioning from +3.1.1 to 3.1.2 turned out to be a surprisingly complex undertaking, +hence this recommendation to all other installers and developers to stay +with the same version.

      +

      It can be obtained from

      +

      http://glassfish.java.net/downloads/3.1.2-final.html

      +

      The page contains a link to the installation instructions. However, +the process is completely straightforward. You are given 2 options for +the format of the installer package. We recommend that you choose to +download it as a shell archive; you will need to change its executable +permission, with chmod +x, and then run it, as root:

      +

      ./installer-filename.sh

      +

      [Important:]

      +

      Leave the admin password fields blank. This is not a security risk, +since out of the box, Glassfish will only be accepting admin connections +on the localhost interface. Choosing password at this stage however will +complicate the installation process unnecessarily.If this is a +developers installation, you can probably keep this configuration +unchanged (admin on localhost only). If you need to be able to connect +to the admin console remotely, please see the note in the Appendix +section of the manual.

      +

      [/Important]

      +
      +
      [Advanced:]
      +
      [Unix-specific:`]
      +
      +

      The installer shell script will normally attempt to run in a graphic +mode. If you are installing this on a remote Unix server, this will +require X Windows support on your local workstation. If for whatever +reason it’s not available, you have an option of running it in a silent +mode - check the download page, above, for more information.

      +
      +
      [/Unix-specific]
      +
      [/Advanced]
      +
      +
      +
      +

      PostgreSQL

      +
      +
      Version 8.3 or higher is required.
      +
      Installation instructions specific to RedHat Linux and MacOS X are
      +
      provided below.
      +
      Once the database server is installed, you’ll need to configure access
      +
      control to suit your installation.
      +
      Note that any modifications to the configuration files above require you to restart Postgres:
      +
      service postgresql restart (RedHat)
      +
      +
      +
      or
      +
      “Restart Server” under Applications -> PostgreSQL (MacOS X)
      +
      +

      By default, most Postgres distributions are configured to listen to network connections on the localhost interface only; and to only support ident for authentication. (The MacOS installer may ask you if network connections should be allowed - answer “yes”). At a minimum, if GlassFish is running on the same host, it will also need to allow password authentication on localhost. So you will need to modify the “host all all 127.0.0.1” line in your /var/lib/pgsq1/data/pg_hba.conf so that it looks like this:

      +
      +
      host all all 127.0.0.1/32 password
      +
      +

      Also, the installer script needs to have direct access to the local PostgresQL server via Unix domain sockets. So this needs to be set to either “trust” or “ident”. I.e., your pg_hba.conf must contain either of the 2 lines below:

      +
      +
      local all all ident sameuser
      +
      or
      +
      local all all trust
      +
      +

      (“ident” is the default setting; but if it has been changed to +“password” or “md5”, etc. on your system, Postgres will keep prompting +you for the master password throughout the installation)

      +

      [optional:]

      +

      If GlassFish will be accessing the database remotely, add or modify the following line in your <POSTGRES DIR>/data/postgresql.conf:

      +
      +
      listen_addresses='*'
      +
      +

      to enable network connections on all interfaces; and add the following +line to pg_hba.conf:

      +
      +
      host all all [ADDRESS]      255.255.255.255 password
      +
      +
      +
      where [ADDRESS] is the numeric IP address of the GlassFish server.
      +
      Using the subnet notation above you can enable authorization for multiple hosts on | your network. For example,
      +
      +
      +
      host all all 140.247.115.0 255.255.255.0 password
      +
      +
      +
      will permit password-authenticated connections from all hosts on the 140.247.115.* subnet.
      +
      [/optional:]
      +
      +
      +

      +
      [RedHat-specific:]
      +
      [Advanced:]
      +
      +

      Please note that the instructions below are meant for users who have some experience with basic RedHat admin tasks. You should be safe to proceed if an instruction such as “uninstall the postgres rpms” makes sense to you immediately. I.e., if you already know how to install or uninstall an rpm package. Otherwise we recommend that you contact your systems administrator.

      +

      For RedHat (and relatives), version 8.4 is now part of the distribution. As of RedHat 5, the default postgresql rpm is still version 8.1. So you may have to un-install the postgresql rpms, then get the ones for version 8.4:

      +
      +
      yum install postgresql84 postgresql84-server
      +
      +

      Before you start the server for the first time with

      +
      +
      service postgresql start
      +
      +

      You will need to populate the initial database with

      +
      +
      service postgresql initdb
      +
      +
      +
      [/advanced]
      +
      [/RedHat-specific]
      +
      +

      [MacOS-specific:]

      +

      Postgres Project provides a one click installer for Mac OS X 10.4 and +above at +http://www.postgresql.org/download/macosx. +Fink and MacPorts packages are also available.

      +

      [/MacOS-specific]`

      +
      +
      [advanced:]
      +
      [optional:]
      +
      +

      See the section PostgresQL setup in the Appendix for the description of the steps that the automated installer takes to set up PostgresQL for use with the DVN.

      +
      +
      [/optional]
      +
      [/advanced]
      +
      +
      +
      +

      R and RServe

      +

      Strictly speaking, R is an optional component. You can bring up a +running DVN instance without it. The automated installer will allow such +an installation, with a warning. Users of this Dataverse Network will be +able to upload and share some data. Only the advanced modes of serving +quantitative data to the users require R [style?]. Please consult +the “Do you need R?” section in the Appendix for an extended discussion of this.

      +
      +
      Installation instructions:
      +
      +

      Install the latest version of R from your favorite CRAN mirror (refer to http://cran.r-project.org/ for more information). Depending on your OS distribution, this may be as simple as typing

      +
      +
      [RedHat/Linux-specific:]
      +
      +

      yum install R R-devel

      +

      (for example, the above line will work in CentOS out of the box; in RedHat, you will have to add support for EPEL repository – see +http://fedoraproject.org/wiki/EPEL +– then run the yum install command)

      +
      +
      [/RedHat/Linux-specific]
      +
      +

      Please make sure to install the “devel” package too! you will need it +to build the extra R modules.

      +

      Once you have R installed, download the package dvnextra.tar from this location:

      +

      http://dvn.iq.harvard.edu/dist/R/dvnextra.tar

      +

      Unpack the archive:

      +

      tar xvf dvnextra.tar

      +

      then run the supplied installation shell script as root:

      +
      +
      +
      cd dvnextra
      +
      +
      ./installModules.sh
      +
      +

      This will install a number of R modules needed by the DVN to run statistics and analysis, some from CRAN and some supplied in the bundle; it will also configure Rserve to run locally on your system and install some startup files that the DVN will need.

      +

      Please note that the DVN application requires specific versions of the 3rd-party R packages. For example, if you obtain and install the version of Zelig package currently available from CRAN, it will not work with the application. This is why we distribute the sources of the correct versions in this tar package.

      +
      +
      [advanced:]
      +
      We haven’t had much experience with R on any platforms other than RedHat-and-the-like. Our developers use MacOS X, but point their DVN instances to a shared server running Rserve under RedHat.
      +
      +

      The R project ports their distribution to a wide range of platforms. However, the installer shell script above will only run on Unix; and is not really guaranteed to work on anything other than RedHat. If you have some experience with either R or system administration, you should be able to use the script as a guide to re-create the configuration steps on any other platform quite easily. You will, however, be entirely on your own while embarking on that adventure. +[/advanced]

      +
      +
      +

      System Configuration

      +

      [Advanced/optional:]

      +

      Many modern OS distributions come pre-configured so that all the +network ports are firewalled off by default.

      +

      Depending on the configuration of your server, you may need to open some +of the following ports.

      +

      On a developers personal workstation, the user would normally access his +or her DVN instance on the localhost interface. So no open ports are +required unless you want to give access to your DVN to another +user/developer.

      +

      When running a DVN that is meant to be accessible by network users: At a +minimum, if all the components are running on the same server, the HTTP +port 80 needs to be open. You may also want to open TCP 443, to be able +to access Glassfish admin console remotely.

      +

      If the DVN is running its own HANDLE.NET server (see Chapter 4. +“Optional Components”), the TCP port 8000 and TCP/UDP ports 2641 are +also needed.

      +

      If the DVN application needs to talk to PostgreSQL and/or Rserve running +on remote hosts, the TCP ports 5432 and 6311, respectively, need to be +open there.

      +

      [/Advanced/optional]

      +
      +
      +
      +

      RUNNING THE INSTALLER

      +

      Once the Prerequisites have been take care of, the DVN application can be installed.

      +

      The installer package can be downloaded from our repository on SourceForge at

      +

      http://sourceforge.net/projects/dvn/files/dvn/3.0/dvninstall_v3_0.zip

      +
      +
      Unzip the package in a temp location of your choice (this will create the directory | dvninstall). Run the installer, as root:
      +
      +
      cd dvninstall
      +
      ./install
      +
      +
      +

      Follow the installation prompts. The installer will first verify the contents of the package and check if the required components +(in Prerequisites) are present on the system. Then it will lead you through the application setup.

      +
      +
      [Advanced:]
      +
      +

      The limitations of the installer package:

      +

      Some extra configuration steps will be required if the PostgreSQL database is being set up on a remote server.

      +

      It will most likely only work on the supported platforms, RedHat and Mac OS X.

      +

      It is only guaranteed to work on a fresh Glassfish installation. If you already have more than one Glassfish domains created and/or have applications other than the DVN running under Glassfish, please consult the “What does the Installer do?” section.

      +

      It does not install any of the optional components (see Chapter 4.)

      +

      For the detailed explanation of the tasks performed by the Installer, see the “What does the Installer do?” section.

      +
      +
      [/Advanced]
      +
      +
      +
      +

      Optional Components

      +

      [The sections on ImageMagick, Google Analytics and Captcha have been rewritten and, hopefully, made less confusing. The Handles instructions have also been modified, but I would like to work on it some more. Namely I'd like to read their own technical manual, and see if we should provide our own version of installation instructions, similarly to what we do with some other packages; we've heard complaints from users about their manual not being very easy to follow]

      +
      +

      reCAPTCHA bot blocker

      +

      We found that our “email us” feature can be abused to send spam +messages. You can choose to use the reCAPTCHA filter to help prevent +this. Configure the filter as follows:

      +
        +
      1. +
        Go to reCAPTCHA web site at
        + +
        and sign up for an account.
        +
        Register your website domain to acquire a public/private CAPTCHA key pair.
        +
        Record this information in a secure location.
        +
        +
      2. +
      3. Insert the the public/private key pair and domain for your reCAPTCHA +account into the captcha table of the DVN PostgreSQL database. +Use psql, pgadmin or any other database utility; the SQL +query will look like this: +INSERT INTO captcha (publickey, domainname, privatekey) VALUES ('sample', 'sample.edu', 'sample')

        +
      4. +
      5. Verify that the Report Issue page is now showing the reCAPTCHA +challenge.

        +
      6. +
      +
      +
      +

      Google Analytics

      +

      Network Admins can use the Google Analytics tools to view Dataverse Network website usage statistics.

      +

      Note: It takes about 24 hours for Google Analytics to start monitoring +your website after the registration.

      +
      +

      +
      To enable the use of Google Analytics:
      +
      +
        +
      1. Go to the Google Analytics homepage at +http://www.google.com/analytics/indexu.html.

        +
      2. +
      3. Set up a Google Analytics account and obtain a tracking code for your Dataverse Network installation.

        +
      4. +
      5. Use the Google Analytics Help Center to find how to add the tracking code to the content you serve.

        +
      6. +
      7. +
        Configure the DVN to use the tracking key (obtained in Step 2,
        +

        above), by setting | the dvn.googleanalytics.key JVM option in +Glassfish.

        +

        This can be done by adding the following directly to the +domain.xml config file (for example: /usr/local/glassfish/domains/domain1/confi/domain.xml): +<jvm-options>-Ddvn.googleanalytics.key=XX-YYY</jvm-options> (this will require Glassfish restart)

        +

        Or by using the Glassfish Admin Console configuration GUI. Consult the “Glassfish Configuration” section in the Appendix.

        +
        +
        +
      8. +
      +

      Once installed and activated, the usage statistics can be accessed from +the Network Options of the DVN.

      +
      +
      +

      ImageMagick

      +

      When image files are ingested into a DVN, the application +automatically creates small “thumbnail” versions to display on the +Files View page. These thumbnails are generated once, then cached for +future use.

      +

      Normally, the standard Java image manipulation libraries are used to +do the scaling. If you have studies with large numbers of large +images, generating the thumbnails may become a time-consuming task. If +you notice that the Files view takes a long time to load for the first +time because of the images, it is possible | to improve the +performance by installing the ImageMagick package. If it is +installed, the application will automatically use its +/usr/bin/convert utility to do the resizing, which appears to be +significantly faster than the Java code.

      +

      ImageMagick is available for, or even comes with most of the popular OS distributions.

      +
      +
      <RedHat-Specific:>
      +
      +

      It is part of the full RedHat Linux distribution, although it is not +included in the default “server” configuration. It can be installed on a +RedHat server with the yum install ImageMagick command.

      +

      </RedHat-Specific>

      +
      +
      +

      Handle System

      +

      DVN administrators may choose to set up a HANDLE.NET server to issue and register persistent, global identifiers for their studies. The DVN app can be modified to support other naming services, but as of now it comes +pre-configured to use Handles.

      +

      To install and set up a local HANDLE.NET server:

      +
        +
      1. Download HANDLE.NET. +Refer to the HANDLE.NET software download page at +http://handle.net/download.html.
      2. +
      3. Install the server on the same host as GlassFish. +Complete the installation and setup process as described in the +HANDLE.NET Technical Manual: +http://www.handle.net/tech_manual/Handle_Technical_Manual.pdf.
      4. +
      5. Accept the default settings during installation, with one +exception: do not encrypt private keys (this will make it easier to +manage the service). Note that this means answer ‘n’ when +prompted “Would you like to encrypt your private key?(y/n). [y]:” If +you accept the default ‘y’ and then hit return when prompted for +passphrase, this will encrypt the key, with a blank pass phrase!
      6. +
      7. During the installation you will be issued an “authority prefix”. +This is an equivalent of a domain name. For example, the prefix +registered to the IQSS DVN is “1902.1”. The IDs issued to IQSS +studies are of a form “1902.1/XXXX”, where “XXXX” is some unique +identifier.
      8. +
      9. Use psql or pgAdmin to execute the following SQL command: +insert into handleprefix (prefix) values( '<your HANDLE.NET prefix>');
      10. +
      11. (Optional/advanced) If you are going to be assigning HANDLE.NET +ids in more than 1 authority prefix (to register studies harvested +from remote sources): Once you obtain the additional HANDLE.NET +prefixes, add each to the handleprefix table, using the SQL +command from step 3.
      12. +
      13. Use psql or pgAdmin to execute the following SQL +command: update vdcnetwork set handleregistration=true, authority='<your HANDLE.NET prefix>';
      14. +
      +

      Note: The DVN app comes bundled with the HANDLE.NET client libraries. +You do not need to install these separately.

      +
      +
      +

      Twitter setup

      +

      To set up the ability for users to enable Automatic Tweets in your +Dataverse Network:

      +
        +
      1. You will first need to tell twitter about you Dataverse Network Application. Go to https://dev.twitter.com/apps and login (or create a new Twitter account).

        +
      2. +
      3. Click “Create a new application”.

        +
      4. +
      5. Fill out all the fields. For callback URL, use your Dataverse Network Home Page URL.

        +
      6. +
      7. Once created, go to settings tab and set Application Type to “Read and Write”. You can optionally also upload an Application +Icon and fill out Organization details (the end user will see these.

        +
      8. +
      9. +
        Click details again. You will need both the Consumer key and secret as JVM Options. Add via Glassfish console:
        +

        -Dtwitter4j.oauth.consumerKey=***

        +

        -Dtwitter4j.oauth.consumerSecret=***

        +
        +
        +
      10. +
      11. Restart Glassfish.

        +
      12. +
      13. To verify that Automatic Tweets are now properly set up, you can go to the Dataverse Network Options page or any Dataverse Options page and see that their is a new option, “Enable Twitter”.

        +
      14. +
      +
      +
      +

      Digital Object Identifiers

      +

      Beginning with version 3.6, DVN will support the use of Digital Object Identifiers. Similar to the currently enabled Handle System, these DOIs will enable a permanent link to studies in a DVN network.

      +

      DVN uses the EZID API (www.n2t.net/ezid) to facilitate the creation and maintenance of DOIs. Network administrators will have to arrange to get their own account with EZID in order to implement creation of DOIs. Once an account has been set up the following settings must be made in your DVN set-up:

      +

      Update your database with the following query:

      +

      Use psql or pgAdmin to execute the following SQL command: +update vdcnetwork set handleregistration=true,  protocol = 'doi', authority='<the namespace associated with your EZID account> where id = 0;

      +

      Add the following JVM options:

      +

      -Ddoi.username=<username of your EZID account>

      +

      -Ddoi.password=<password of your EZID account>

      +

      -Ddoi.baseurlstring=https://ezid.cdlib.org

      +

      Note: The DVN app comes bundled with the EZID API client libraries. You do not need to install these separately.

      +
      +
      +
      +

      Appendix

      +
      +

      Do you need R?

      +

      This is a more detailed explanation of the statement made earlier in the “Prerequisites” section: “Only the advanced modes of serving quantitative data to the users require R.” [style?]

      +

      In this context, by “quantitative data” we mean data sets for which +machine-readable, variable-level metadata has been defined in the DVN +database. “Subsettable data” is another frequently used term, in the +DVN parlance. The currently supported sources of subsettable data are +SPSS and STATA files, as well as row tabulated or CSV files, with +extra control cards defining the data structure and variable +metadata. (See full documentation in User Guide for Finding and Using Data

      +

      Once a “subsettable” data set is create, users can run online statistics and analysis on it. That’s where R is used. In our experience, most of the institutions who have installed the DVN did so primarily in order to share and process quantitative data. When this is the case, R must be considered a required component. But a DVN network built to serve a collection of strictly human-readable (text, image, etc.) data, R will not be necessary at all.

      +
      +
      +

      What does the Installer do?

      +

      The Installer script (chapters Quick Install, Running the Installer.) automates the following tasks:

      +
        +
      1. Checks the system for required components;
      2. +
      3. Prompts the user for the following information:
          +
        1. Location of the Glassfish directory;
        2. +
        3. Access information (host, port, database name, username, password) for PostgresQL;
        4. +
        5. Access information (host, port, username, password) for Rserve;
        6. +
        +
      4. +
      5. Attempts to create the PostgreSQL user (role) and database, from prerequisiste PostgreSQL setup step above; see the “PostgreSQL configuration” Appendix section for details.
      6. +
      7. Using the Glassfish configuration template (section the Appendix) and the information collected in step 2.b. above, creates the config file domain.xml and installs it the Glassfish domain directory.
      8. +
      9. Copies additional configuration files (supplied in the dvninstall/config directory of the Installer package) into the config directory of the Glassfish domain.
      10. +
      11. Installs Glassfish Postgres driver (supplied in the dvninstall/pgdriver directory of the Installer package) into the lib directory in the Glassfish installation tree.
      12. +
      13. Attempts to start Glassfish. The config file at this point contains the configuration settings that the DVN will need to run (see section Glassfish Configuration, individual settings section of the Appendix), but otherwise it is a “virgin”, fresh config. Glassfish will perform some initialization tasks on this first startup and deploy some internal apps.
      14. +
      15. If step 5. succeeds, the Installer attempts to deploy the DVN application (the Java archive DVN-EAR.ear supplied with the installer).
      16. +
      17. Stops Glassfish, populates the DVN database with the initial content (section “PostgreSQL configuration”” of the Appendix), starts Glassfish.
      18. +
      19. Attempts to establish connection to Rserve, using the access information obtained during step 2.c. If this fails, prints a warning message and points the user to the Prerequisites section of this guide where R installation is discussed.
      20. +
      21. Finally, prints a message informing the user that their new DVN should be up and running, provides them with the server URL and suggests that they visit it, to change the default passwords and perhaps start setting up their Dataverse Network.
      22. +
      +

      Throughout the steps above, the Installer attempts to diagnose any +potential issues and give the user clear error messages when things go +wrong (“version of Postgres too old”, “you must run this as root”, +etc.).

      +

      Enough information is supplied in this manual to enable a user (a +skilled and rather patient user, we may add) to perform all the steps +above without the use of the script.

      +
      +
      +

      Glassfish configuration template

      +

      The configuration template (domain.xml.TEMPLATE) is part of the +installer zip package. The installer replaces the placeholder +configuration tokens (for example, %POSTGRES_DATABASE%) with the +real values provided by the user to create the Glassfish configuration +file domain.xml.

      +

      [I was thinking of copy-and-pasting the entire template file here; +but it is 30K of XML, so I decided not to. The above explains where it +can be found, if anyone wants to look at it, for reference or +whatever]

      +
      +
      +

      Glassfish Configuration, individual settings

      +

      As explained earlier in the Appendix, the Installer configures Glassfish +by cooking a complete domain configuration file (domain.xml) and +installing it in the domain directory.

      +

      All of the settings and options however can be configured individually +by an operator, using the Glassfish Admin Console.

      +

      The Console can be accessed at the network port 4848 when Glassfish is +running, by pointing a browser at

      +
      +
      http://[your host name]:4848/
      +

      and logging in as admin. The initial password is adminadmin. It +is of course strongly recommended to log in and change it first thing +after you run the Installer.

      +

      The sections below describe all the configuration settings that would +need to be done through the GUI in order to replicate the configuration +file produced by the Installer. This information is provided for the +benefit of an advanced user who may want to experiment with individual +options. Or to attempt to install DVN on a platform not supported by our +installer; although we wish sincerely that nobody is driven to such +desperate measures ever.

      +
      +

      JVM options

      +

      Under Application Server->JVM Settings->JVM Options:

      +

      If you are installing Glassfish in a production environment, follow +these steps:

      +
        +
      1. +
        Delete the following options: -Dsun.rmi.dgc.server.gcInterval=3600000
        +
        -Dsun.rmi.dgc.client.gcInterval=3600000
        +
        +
      2. +
      3. +
        Add the following options:
        +
        -XX:MaxPermSize=192m
        +
        -XX:+AggressiveHeap
        +
        -Xss128l
        +
        -XX:+DisableExplicitGC
        +
        -Dcom.sun.enterprise.ss.ASQuickStartup=false
        +
        +
      4. +
      5. +
        To install on a multi-processor machine, add the following:
        +
        -XX:+UseParallelOldGC
        +
        +
      6. +
      7. +
        To enable the optional HANDLE.NET installation and provide access to
        +
        study ID registration, add the following (see the “Handles System”
        +
        section in the “Optional Components” for
        +
        details):
        +
        -Ddvn.handle.baseUrl=<-Dataverse Network host URL>/dvn/study?globalId=hdl:
        +
        -Ddvn.handle.auth=<authority>
        +
        -Ddvn.handle.admcredfile=/hs/svr_1/admpriv.bin
        +
        +
      8. +
      9. +
        To enable the optional Google Analytics option on the Network Options
        +
        page and provide access to site usage reports, add the following (see
        +
        the “Google Analytics” section in the “Optional Components” for
        +
        details):
        +
        +
        -Ddvn.googleanalytics.key=<googleAnalyticsTrackingCode>
        +
        +
        +
      10. +
      11. +
        Configure the following option only if you run multiple instances
        +
        of the GlassFish server for load balancing. This option controls
        +
        which GlassFish instance runs scheduled jobs, such as harvest or
        +
        export.
        +
        For the server instance that will run scheduled jobs, include the
        +
        following JVM option:
        +
        -Ddvn.timerServer=true
        +
        For all other server instances, include this JVM option:
        +
        -Ddvn.timerServer=false
        +
        If you are installing Glassfish in either a production or development
        +
        environment, follow these steps:
        +
        +
          +
        • +
          Change the following options’ settings:
          +
          Change -client to -server.
          +
          Change -Xmx512m to whatever size you can allot for the maximum
          +
          Java heap space.
          +
          Set `` –Xms512m`` to the same value to which you set –Xmx512m.
          +
          +
        • +
        • +
          To configure permanent file storage (data and documentation files
          +
          uploaded to studies) set the following:
          +
          -Dvdc.study.file.dir=${com.sun.aas.instanceRoot}/config/files/studies
          +
          +
        • +
        • +
          To configure the temporary location used in file uploads add the
          +
          following:
          +
          -Dvdc.temp.file.dir=${com.sun.aas.instanceRoot}/config/files/temp
          +
          +
        • +
        • +
          To configure export and import logs (harvesting and importing),
          +
          add the following:
          +
          -Dvdc.export.log.dir=${com.sun.aas.instanceRoot}/logs/export
          +
          -Dvdc.import.log.dir=${com.sun.aas.instanceRoot}/logs/import
          +
          +
        • +
        • +
          Add the following:
          +
          -Djhove.conf.dir=${com.sun.aas.instanceRoot}/config
          +
          -Ddvn.inetAddress=<host or fully qualified domain name of server
          +
          on which Dataverse Network runs>
          +
          -Ddvn.networkData.libPath=${com.sun.aas.instanceRoot}/applications/j2ee-
          +
          +
          apps/DVN-EAR
          +
          +
          +
        • +
        • +
          To manage calls to RServe and the R host (analysis and file upload), add
          +
          the following:
          +
          -Dvdc.dsb.host=<RServe server hostname>
          +
          -Dvdc.dsb.rserve.user=<account>
          +
          -Dvdc.dsb.rserve.pwrd=<password>
          +
          -Dvdc.dsb.rserve.port=<port number>
          +
          +
          +
          For Installing R, see:
          + +
          for information about configuring these values in the Rserv.conf
          +
          file.
          +
          These settings must be configured for subsetting and analysis to
          +
          work.
          +
          +
        • +
        • +
          To configure search index files set the following:
          +
          -Ddvn.index.location=${com.sun.aas.instanceRoot}/config
          +
          +
        • +
        • +
          To use the optional customized error logging and add more information
          +
          to your log files, set the following:
          +
          -Djava.util.logging.config.file= ${com.sun.aas.instanceRoot} /config/logging.properties
          +
          Note: To customize the logging, edit the logging.properties file
          +
          +
        • +
        • +
          The default size limit for file downloads is 100MB. To override this
          +
          default add the following JVM option:
          +
          -Ddvn.batchdownload.limit=<max download bytes>
          +
          +
        • +
        +
      12. +
      +
      +
      +

      EJB Container

      +

      Under Configuration->EJB Container->EJB Timer Service:

      +
        +
      1. +
        Set the Timer Datasource to the following:
        +
        jdbc/VDCNetDS
        +
        +
      2. +
      3. +
        Save the configuration.
        +
        +
      4. +
      +
      +
      +

      HTTP Service

      +

      The HTTP Service configuration settings described in this section are suggested defaults. These settings are very important. There are no right values to define; the values depend on the specifics of your web traffic, how many requests you get, how long they take to process on average, and your hardware. For detailed the +| Sun Microsystems Documentation web site at the following URL:

      +

      http://docs.sun.com/

      +
      +
      Note: If your server becomes so busy that it drops connections,
      +
      adjust the Thread Counts to improve performance.
      +
      +
        +
      1. Under Configuration->HTTP Service->HTTP +Listeners->http-listener-1:

        +
          +
        • Listener Port: 80
        • +
        • Acceptor Threads: The number of CPUs (cores) on your server
        • +
        +
      2. +
      3. Under Configuration->HTTP Service, in the RequestProcessing tab:

        +
          +
        • Thread Count: Four times the number of CPUs (cores) on your server
        • +
        • Initial Thread Count: The number of CPUs (cores)
        • +
        +
      4. +
      5. Under Configuration->HTTP Service->Virtual Servers->server: add new property allowLinking with the value true.

        +
        +
          +
        1. +
          Under Configuration->HTTP Service, configure Access Logging:
          +
          +
        2. +
        +
        +
        format=%client.name% %auth-user-name% %datetime% %request% %status%
        +
        %response.length%
        +
        rotation-enabled=true
        +
        rotation-interval-in-minutes=15
        +
        rotation-policy=time
        +
        rotation-suffix=yyyy-MM-dd
        +
        +
        +
      6. +
      +
      +
      +

      JavaMail Session

      +

      Under Resources->JavaMail Sessions->mail/notifyMailSession:

      +
        +
      • +
        Mail Host: <your mail server>
        +
        Note: The Project recommends that you install a mail server on the same machine as GlassFish and use localhost for this entry. Since email notification is used for workflow events such as creating a dataverse or study, these functions may not work properly if a valid mail server is not configured.
        +
        +
      • +
      • +
        Default User: dataversenotify
        +

        This does not need to be a real mail account.

        +
        +
        +
      • +
      • Default Return Address: do-not-reply@<your mail server>

        +
      • +
      +
      +
      +

      JDBC Resources

      +

      Under Resources->JDBC->Connection Pools:

      +
      +
      Add a new Connection Pool entry:
      +
      +
        +
      • entryName: dvnDbPool
      • +
      • Resource Type: javax.sql.DataSource
      • +
      • Database Vendor: PostgreSQL
      • +
      • DataSource ClassName: org.postgresql.ds.PGPoolingDataSource
      • +
      • Additional Properties:
          +
        • ConnectionAttributes: ;create=true
        • +
        • User: dvnApp
        • +
        • PortNumber: 5432 (Port 5432 is the PostgreSQL default port.)
        • +
        • Password: <Dataverse Network application database password>
        • +
        • DatabaseName: <your database name>
        • +
        • ServerName: <your database host>
        • +
        • JDBC30DataSource: true
        • +
        +
      • +
      +
      +

      +
      +

      Under Resources->JDBC->JDBC Resources:

      +
      +
      Add a new JDBC Resources entry:
      +
      +
        +
      • JNDI Name: jdbc/VDCNetDS
      • +
      • Pool Name: dvnDbPool
      • +
      +
      +
      +

      JMS Resources

      +

      Under Resources->JMS Resources:

      +
        +
      1. Add a new Connection Factory for the DSB Queue:
          +
        • JNDI Name: jms/DSBQueueConnectionFactory
        • +
        • Resource Type: javax.jms.QueueConnectionFactory
        • +
        +
      2. +
      3. Add a new Connection Factory for the Index Message:
          +
        • JNDI Name: jms/IndexMessageFactory
        • +
        • Resource Type: javax.jms.QueueConnectionFactory
        • +
        +
      4. +
      5. Add a new Destination Resource for the DSB Queue:
          +
        • JNDI Name: jms/DSBIngest
        • +
        • Physical Destination Name: DSBIngest
        • +
        • Resource Type: javax.jms.Queue
        • +
        +
      6. +
      7. Add a new Destination Resource for the Index Message:
          +
        • JNDI Name: jms/IndexMessage
        • +
        • Physical Destination Name: IndexMessage
        • +
        • Resource Type: javax.jms.Queue
        • +
        +
      8. +
      +
      +
      +
      +

      PostgreSQL setup

      +

      The following actions are normally performed by the automated installer +script. These steps are explained here for reference, and/or in case +your need to perform them manually:

      +
        +
      1. Start as root, then change to user postgres:

        +

        su postgres

        +
      2. +
      +
      +

      Create DVN database usert (role):

      +

      createuser -SrdPE [DB_USERNAME]

      +

      (you will be prompted to choose a user password).

      +

      Create DVN database:

      +

      createdb [DB_NAME] --owner=[DB_USERNAME]

      +

      [DB_NAME] and [USER_NAME] are the names you choose for your DVN database and database user. These, together with the password you have assigned, will be used in the Glassfish configuration so that the application can talk to the database.

      +
      +
        +
      1. Before Glassfish can be configured for the DVN app, the Postgres driver needs to be installed in the <GLASSFISH ROOT>/lib directory. We supply a version of the driver known to work with the DVN in the dvninstall/pgdriver directory of the Installer bundle. (This is the “What does the Installer do?” section of this appendix) An example of the installed location of the driver:
      2. +
      +
      +
      /usr/local/glassfish/lib/postgresql-8.3-603.jdbc4.jar
      +
        +
      1. Finally, after the DVN application is deployed under Glassfish for the first time, the database needs to be populated with the initial content:
      2. +
      +
      +

      su postgres +psql -d [DB_NAME] -f referenceData.sql

      +

      The file referenceData.sql is provided as part of the installer zip package.

      +
      +
      +
      +

      RedHat startup file for glassfish, example

      +

      Below is an example of a glassfish startup file that you may want to +install on your RedHat (or similar) system to have glassfish start +automatically on boot.

      +
      +
      Install the file as /etc/init.d/glassfish, then run chkconfig glassfish on
      +
      +

      Note that the extra configuration steps before the domain start line, +for increasing the file limit and allowing “memory overcommit”. These +are useful settings to have on a production server.

      +
      +
      You may of course add extra custom configuration specific to your +setup.
      +
      +
      #! /bin/sh
      +# chkconfig: 2345 99 01
      +# description: GlassFish App Server
      +set -e
      +ASADMIN=/usr/local/glassfish/bin/asadmin
      +case "$1" in
      +  start)
      +        echo -n "Starting GlassFish server: glassfish"
      +        # Increase file descriptor limit:
      +        ulimit -n 32768
      +        # Allow "memory overcommit":
      +        # (basically, this allows to run exec() calls from inside the
      +        # app, without the Unix fork() call physically hogging 2X
      +        # the amount of memory glassfish is already using)
      +        echo 1 > /proc/sys/vm/overcommit_memory
      +        $ASADMIN start-domain domain1 echo "."
      +        ;;
      +  stop)
      +        echo -n "Stopping GlassFish server: glassfish"
      +        $ASADMIN stop-domain domain1
      +        echo "."
      +         ;;
      +  *)
      +        echo "Usage: /etc/init.d/glassfish {start|stop}"
      +
      +        exit 1
      +esac
      +exit 0
      +
      +
      +
      +
      +

      Enabling secure remote access to Asadmin

      +

      As was mentioned in the Glassfish section of the manual, in version +3.1.2 admin interface (asadmin) is configured to be accessible on the +localhost interface only. If you need to be able to access the admin +console remotely, you will have to enable secure access to it. (It will +be accessible over https only, at https://<YOUR HOST>:4848; connections +to http://<YOUR HOST>:4848 will be automatically redirected to the https +interface)

      +

      The following must be done as root:

      +
        +
      1. First you need to configure the admin password:

        +

        <GF LOCATION>/glassfish3/bin/asadmin change-admin-password

        +

        (since you didn’t create one when you were installing Glassfish, leave the “current password” blank, i.e., hit ENTER)

        +
      2. +
      3. Enable the secure access:

        +
      4. +
      +
      +

      <GF LOCATION>/glassfish3/bin/asadmin enable-secure-admin

      +

      (Note that you will need to restart Glassfish after step 2. above)

      +
      +
      +
      +

      Using LOCKSS with DVN

      +

      DVN holdings can be crawled by LOCKSS servers (www.lockss.org). It is made possible by the special plugin developed and maintained by the DVN project, which a LOCKSS daemon utilizes to crawl and access materials served by a Dataverse network.

      +

      The current stable version of the plugin is available at the following location:

      +

      http://lockss.hmdc.harvard.edu/lockss/plugin/DVNOAIPlugin.jar

      +

      As of January 2013 and DVN version 3.3, the plugin is compatible with the LOCKSS daemon version 1.55. The plugin sources can be found in the main DVN source tree in https://dvn.svn.sourceforge.net/svnroot/dvn/dvn-app/trunk/src/DVN-lockss (please note that the DVN project is currently in the process of moving to gitHub! The preserved copy of the 3.3 source will be left at the URL above, together with the information on the current location of the source repository).

      +

      In order to crawl a DVN, the following steps need to be performed:

      +
        +
      1. Point your LOCKSS daemon to the plugin repository above. (Refer to the LOCKSS documentation for details);

        +
      2. +
      3. Create a LOCKSS Archival Unit for your target DVN:

        +

        In the LOCKSS Admin Console, go to Journal Configuration -> Manual Add/Edit and click on Add Archival Unit.

        +

        On the next form, select DVNOAI in the pull down menu under Choose a publisher plugin and click Continue.

        +

        Next configure the parameters that define your DVN Archival Unit. LOCKSS daemon can be configured to crawl either the entire holdings of a DVN (no OAI set specified), or a select Dataverse.

        +
      4. +
      +

      Note that LOCKSS crawling must be authorized on the DVN side. Refer to +the “Edit LOCKSS Settings” +section of the DVN Network Administrator Guide for the instructions on +enabling LOCKSS crawling on the network level, and/or to the +Enabling LOCKSS access to the Dataverse +of the Dataverse Administration Guide. Once you allow LOCKSS crawling of +your Dataverse(s), you will need to enter the URL of the “LOCKSS +Manifest” page provided by the DVN in the configuration above. For the +network-wide archival unit this URL will be +http://<YOUR SERVER>/dvn/faces/ManifestPage.xhtml; for an +individual dataverse it is +http://<YOUR SERVER>/dvn/dv/<DV ALIAS>/faces/ManifestPage.xhtml.

      +
      +
      The URL of the DVN OAI server is http://<YOUR DVN HOST>/dvn/OAIHandler.
      +
      +
      +
      +

      Read Only Mode

      +

      A Read Only Mode has been established in DVN to allow the application to remain available while deploying new versions or patches. Users will be able to view data and metadata, but will not be able to add or edit anything. Currently there is no way to switch to Read Only Mode through the application. +In order to change the application mode you must apply the following queries through psql or pgAdmin:

      +

      To set to Read Only Mode:

      +
      +
      +
      BEGIN;
      +
      SET TRANSACTION READ WRITE;
      +
      -- Note database and user strings may have to be modified for your particular installation;
      +
      -- You may also customize the status notice which will appear on all pages of the application;
      +
      update vdcnetwork set statusnotice = "This network is currently in Read Only state. No saving of data will be allowed.";
      +
      ALTER DATABASE "dvnDb" set default_transaction_read_only=on;
      +
      Alter user "dvnApp" set default_transaction_read_only=on;
      +
      update vdcnetwork set statusnotice = "";
      +
      END;
      +
      +
      +

      To return to regular service:

      +
      +
      +
      BEGIN;
      +
      SET TRANSACTION READ WRITE;
      +
      -- Note database and user strings may have to be modified for your particular installation;
      +
      ALTER DATABASE "dvnDb" set default_transaction_read_only=off;
      +
      Alter user "dvnApp" set default_transaction_read_only=off;
      +
      update vdcnetwork set statusnotice = "";
      +
      END;
      +
      +
      +
      +
      +

      Backup and Restore

      +

      Backup

      +
      +
      The PostgreSQL database and study files (contained within the Glassfish directory by default but this is configurable via JVM options) are the most critical components to back up. The use of standard PostgreSQL tools (i.e. pg_dump) is recommended.
      +
      +

      Glassfish configuration files (i.e. domain.xml, robots.txt) and local +customizations (i.e. images in the docroot) should be backed up as well. +In practice, it is best to simply back up the entire Glassfish directory +as other files such as logs may be of interest.

      +
      +
      Restore
      +
      +

      Restoring DVN consists of restoring the PostgreSQL database and the +Glassfish directory.

      +
      +
      +
      + + +
      +
      +
      +
      + +
      +
      +
      + + + + + \ No newline at end of file diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/dataverse-user-main.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/doc/guides/dataverse-user-main.html Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,4392 @@ + + + + + + + + + + User Guide — The Harvard Dataverse Network 3.6.1 documentation + + + + + + + + + + + + + + + + +
      +
      +
      + +
      +
      +
      + +
      +

      User Guide

      +
      +

      Common Tasks

      +

      Here is a list of the most common ways people use the Dataverse Network. +Activities can be grouped into finding and using data or publishing +data. A brief description of each activity follows with more detailed +information available in the Users Guide.

      +
      +

      Finding Data

      +

      Visitors to the site can browse dataverses looking for data of +interest or they can search by keywords. There are Basic and Advanced +Searches.

      +

      Browsing the Site

      +

      The Network Homepage presents a list of recently released dataverses on the left side of the page. +A dataverse is a container for studies that can be managed as a group by the dataverse administrator. +Most often a dataverse represents a single organization or scholar and so their studies are often related. +On the right side of the page there are lists of both recently released studies and studies that have been +downloaded most often. At the bottom of these lists, the View More link brings the user to a complete list +of released dataverses or studies as applicable. The home page also includes a scrolling list of datverse +collections called subnetworks, if applicable.

      +

      Clicking on the name of a dataverse, study or subnetwork displays its home page.

      +

      Browsing Dataverses

      +

      If you click the View More link under the recently released dataverse list on the Network Homepage you’ll be brought to +the Browse Dataverses page. Here you can sort the dataverses by Name, Affiliation, Release Date and Download Count. You +may also filter the dataverses by typing a filter term in the “filter” text box. The filter will only display those +dataverses whose name or affiliation matches the filter term. Clicking on the name of a dataverse displays its home page.

      +

      Search

      +

      For many purposes, Basic Search is sufficient. On the center top of the network homepage enter keywords or +complete sentences and click Search. A resulting list of studies is +displayed. Further refinement can be made by clicking facets such as +“Original Dataverse” or “Author” under “Refine Results” on the left side +of the page. After a facet has been clicked, it will appear at the top +of the page under “Search Results for” and clicking the selected facet +will remove it, restoring the previous results. In addition to the +network homepage, Basic Search can be found on the upper right of the +dataverse home pages as well as on the search results and Advanced +Search pages. Be aware that searching from a dataverse limits the scope +of search to studies within that dataverse while searching from the +network home page searches all released studies.

      +

      When a more specific search is needed, use Advanced Search. Advanced +Search allows searching on keywords found in specific cataloging +information fields, in particular collections in a dataverse where +available, or by variable name. The link to Advanced Search is next to +the Basic Search feature on the network and dataverse home pages and the +search results page.

      +
      +
      +

      Using Data

      +

      Data in the Dataverse Network is stored in files. Files of any +type are allowed but some types of tabular and network data files are +supported by additional functionality, including downloading in +different formats, downloading subsets of variables, and analytical +tools.

      +

      Download Files

      +

      To download files, click on a study of interest, then select the +data tab. Individual files can be downloaded or groups of files by +checking files of interest or entire file categories and clicking +Download All Selected Files. Groups of files are packaged into a single +.zip file. Group downloads have a download size limit and any selected +files not downloaded will be indicated in the .zip file.

      +

      Downloading individual files in an alternate format where available is +straightforward. Choose the format from the Download As select box next +to the file and the file will download.

      +

      Subset or Analyze Files

      +

      Tabular and Network data files of recognized formats (Stata, SPSS, RData, +Graphml) can be further manipulated through downloading subsets of +variables and by performing various statistical analyses. Where +available these options appear as an additional link, Access +Subset/Analysis, below the Download As format select box next to each +file. The functionality is quite different for tabular versus network +data files so refer to the Users Guide for additional information.

      +
      +
      +

      Publishing Data

      +

      Publishing data through the Dataverse Network is straightforward: +create an account and a place to store your data, organize your data, +upload files, and release your data for public access.

      +

      Create a Dataverse and Account

      +

      The first step to publishing your data is to create a place to +store it that can be managed by you. To do this you need an account. +Create a dataverse and account by clicking on the Create a Dataverse +link on the upper right side of the network homepage. This leads you +through a series of steps at the end of which you will have a dataverse +and user account to manage it.

      +

      Newly created dataverses are unreleased and not available for +browsing. Make note of the link to your dataverse at the end of the +process so you can return to it until it becomes released. Another way +to access your unreleased dataverse is to log in, click on your user +name in the upper right of the page, dataverses tab, then the name of +your dataverse.

      +

      Create Studies

      +

      Once you have a user account and a place to store your data, you +need to take the first step toward organizing your data into studies. +Many data have been or will be used to publish a study so this step may +be clear. If not, a study should represent a particular thesis or +inquiry with accompanying data. First, log in with your new user account +and navigate to your dataverse home page. Next, click Options in the +upper right of the page. From there click Create a Study and complete +the form. Most of the fields on the study form are optional -only the +title is required. If you are unsure of what these values should be, +enter a title and these fields can be completed later before releasing +the study.

      +

      Be aware that a newly created study is unreleased and not available +for browsing. To access an unreleased study for further editing, click +on Options->Manage Studies and click on your study’s name. You can also +click on your username, studies tab, then the study name.

      +

      Upload Files

      +

      Now that you have a place to store and manage your data and a +study to associate it with, you can upload your data and documentation +files. Files are uploaded to a study. Navigate to the study you want to +upload particular files to and click on Add Files on the upper right +side of the page. The add files page requires you to first select a file +type, then browse for the file on your local system. Some file types +undergo additional processing to support extended functionality but if +you are unsure which type to choose, select Other. At this time you can +enter a descriptive Category which can be used to group related files +and a file description. If you are unsure of these values they can be +added later.

      +

      Though files are selected individually, several files can be added +to this page at one time. It is recommended to upload only a few files +at a time since this can take some time to complete, depending on file +type.

      +

      An alternative to selecting files individually is to first create an +archive of files in .zip or .tar format and then select the +appropriate “multiple files” Data Type when uploading your archive. The +zip file or tarball will be unpacked so that the individual files will +be added to the page.

      +

      If you upload an SPSS (.por, .sav), Stata (.dta) or R +(.RData) file, your study will be temporarily unavailable for +editing until the additional processing on the file is completed. This +can be brief or take some time depending on the size and complexity of +the file. A message at the top of the file indicates it is unavailable +for editing and an email will be sent when finished to the address you +indicate on the add files page.

      +

      Release Studies

      +

      Once your study is in a state where it’s ready to be published or +shared with others, it should be released. This is done either by +clicking Release on the upper right of the study page or by navigating +to your dataverse, clicking Options, Manage Studies, then clicking +release next to the study you want released. Note that releasing a study +fixes the version number. Additional changes to the study will create a +new draft version. The draft can be repeatedly edited without changing +the version number until it is released. At this point your study is +visible within your dataverse. If your dataverse is also released it +will be searchable and viewable by others. If your dataverse is not yet +released, it will only be visible to people with access to your +dataverse.

      +

      Release Dataverse

      +

      Releasing a dataverse makes it appear in the list of dataverses on +the network home page and makes it viewable by others. This may require +adding a study or other details to your dataverse depending on site +policy. By default, releasing a dataverse requires nothing but changing +the Dataverse Release Settings to Released on the Manage Permissions +page. To release your dataverse, navigate to the dataverse home page, +choose Options from the upper right of the page, click on Dataverse +Settings, then Manage Permissions. At the top of the page, change +Dataverse Release Settiings to Released and click Save Changes.

      +

      Any studies that are released are now visible to others. Those +that are unreleased do not appear in the list of studies on the +dataverse home page.

      +

      At this point you have published one or more studies and their data and +made them available for browsing or searching.

      +
      +
      +

      Things to Consider, Next Steps

      +

      The above tasks are fundamental activities and may be all that is +needed for most users. Some situations are more complex and require +additional consideration. These include publishing and organizing data +for large organizations, shared research between scholars, and enabling +contributions by a geographically diverse team while keeping data +private until ready for publication.

      +

      For large organizations, a single dataverse may suffice. Collections +within a dataverse can further organize studies by sub unit or topic. +The dataverse itself can be customized with the organizations own +website header and footer. In some cases, sub units or organizations +want to maintain their own distinct branding. In such cases each can +create and maintain their own dataverse and the parent dataverse can +link to their studies through a link collection.

      +

      For shared research, the model is similar: a single dataverse based +on the research project can be created to which both researchers have +administration rights. Additionally, researchers can maintain their own +dataverses for other work and link back to the studies in the shared +project dataverse.

      +

      Allowing a diverse team to contribute to an unreleased dataverse is +simply a matter of granting the appropriate level of permissions to +each team member. At minimum, each team member would need to be added as +a contributor to the dataverse. By default, they can only contribute to +studies they themselves have created. However, this can be expanded from +the dataverse Manage Permissions page to allow contributors to edit all +studies in the dataverse. Changes made by contributors need to be +approved by a curator or admin before a study can be released.

      +
      +
      +

      How the Guides Are Organized

      +

      The guides are reference documents that explain how to use +the Dataverse Network functionality: Installers Guide, Developers Guide, APIs Guide, and Users +Guide. The Users Guide is further divided into primary activities: using +data, creating studies, administering dataverses or the network. Details +on all of the above tasks can be found in the Users Guide. The +Installers Guide is for people or organizations who want to host their +own Dataverse Network. The Developers Guide contains instructions for +people who want to contribute to the Open Source Dataverse Network +project or who want to modify the code to suit their own needs. Finally, the +APIs Guide is for people who would like to use our APIs in order to build apps that +can work with the Dataverse Network web application. This page lists some current apps +which have been developed with our APIs.

      +
      +
      +

      Other Resources

      +

      Dataverse Network Project Site

      +

      Additional information about the Dataverse Network project itself +including presentations, information about upcoming releases, data +management and citation, and announcements can be found at +http://thedata.org

      +

      User Group

      +

      As the user community grows we encourage people to shares ideas, ask +questions, or offer suggestions for improvement. Go to +https://groups.google.com/group/dataverse-community to register to our dataverse community group.

      +

      Follow Us on Twitter

      +

      For up to date news, information and developments, follow our twitter account: https://twitter.com/thedataorg

      +

      Support

      +

      We maintain an email based support service that’s free of charge. We +attempt to respond within one business day to all questions and if it +cannot be resolved immediately, we’ll let you know what to expect.

      +
      +
      +

      Contact Us

      +

      The support email address is +support@thedata.org.

      +

      This is the same address as the Report Issue link. We try to respond +within one business day.

      +
      +
      +
      +

      Finding and Using Data

      +

      Ends users, without need to login to the Dataverse Network, can browse +dataverses, search studies, view study description and data files for +public studies, and subset, analyze and visualize data for public data +files. If entire studies or individual data files are restricted, end +users need to be given permission from the dataverse administrator to +access the data.

      + +
      +

      View Studies / Download Data

      +

      Cataloging Information

      +

      When a study is created, a set of metadata is associated with that +study. This metadata is called the Cataloging Information for the +study. When you select a study to view it, you first see the Cataloging +Information tab listing the metadata associated with that study. This is +the default view of a study.

      +

      Cataloging Information contains numerous fields that help to describe +the study. The amount of information you find for each study varies, +based on what was entered by the author (Contributor) or Curator of that +study. For example, one study might display the distributor, related +material, and geographic coverage. Another study might display only the +authors and the abstract. Every study includes the Citation Information fields in the Cataloging Information.

      +

      Note: A comprehensive list of all Cataloging Information fields is +provided in the List of Metadata References

      +

      Cataloging Information is divided into four sections. These sections and +their details are displayed only when the author (Contributor) or +Curator provides the information when creating the study. Sections +consist of the following:

      +
        +
      • Citation Information - These fields comprise +the citation for the study, +consisting of a global identifier for all studies and a UNF, or +Universal Numerical Fingerprint, for studies that contain subsettable +data files. It also can include information about authors, producers +and distributors, and references to related studies or papers.
      • +
      • Abstract and Scope - This section describes the research study, lists +the study’s data sets, and defines the study’s geographical scope.
      • +
      • Data Collection/Methodology - This section includes the technical +details of how the author obtained the data.
      • +
      • Terms of Use - This information explains that the study requires +users to accept a set of conditions or agreements before downloading +or analyzing the data. If any Terms of Use text is displayed in +the Cataloging Information section, you are prompted to accept the +conditions when you click the download or analyze icons in the Files +page. +Note: A study might not contain Terms of Use, but in some cases the +original parent dataverse might have set conditions for all studies +owned by that dataverse. In that case, the conditions are inherited +by the study and you must accept these conditions before downloading +files or analyzing the data.
      • +
      +

      Study metadata can be downloaded in XML format using a link at the bottom +of the study Cataloging Information tab: DDI (without variables) +/ DDI (full). +These links appear for released studies whose metadata has been exported. +Studies are typically exported on a daily basis.

      +

      List of Study Files

      +

      When you view a study, click the Documentation, Data and Analysis tab to +view a list of all electronic files associated with the study that were +provided by the author or Curator.

      +

      A study might contain documentation, data, or other files. When the +study contributor uploads data files of the type .dta, .sav, or .por to the Network, those files are converted +to .tab tab-delimited files. These .tab files +are subsettable, and can be subsetted and analyzed online by using the Dataverse Network +application.

      +

      Data files of the type .xml also are considered to be subsettable, +and can be subsetted and analyzed to a minimal degree online. +An .xml type file indicates social network data that complies with +the GraphML file format.

      +

      You can identify a subsettable data file by the Subsetting label and +the number of cases and variables listed next to the file name. Other +files that also contain data might be associated with a study, but the +Dataverse Network application does not recognize them as data (or +subsettable) files.

      +

      Download Study Files

      +

      You can download any of the following within a study:

      + +

      The default format for subsettable tabular data file downloads +is tab-delimited. When you download one or more subsettable files in +tab-delimited format, the file contains a header row. When you download +one subsettable file, you can select from the following formats in +addition to tab-delimited:

      +
        +
      • Original file
      • +
      • Splus
      • +
      • Stata
      • +
      • R
      • +
      +

      The default format for subsettable network data file downloads +is Original file. In addition, you can choose to download network +data files in GraphML format.

      +

      If you select any other format for a tabular data file, the file is +downloaded in a zipped archive. You must unzip the archive to view or +use the individual data file.

      +

      If you download all or a selection of data files within a study, the +files are downloaded in a zipped archive, and the individual files are +in tab-delimited or network format. You must unzip the archive to view +or use the individual data files.

      +

      Note: Studies and data files often have user restrictions applied. If +prompted to accept Terms of Use for a study or file, check the I Accept box and then click the Continue button to view or download the +file.

      +

      User Comments

      +

      If the User Comment feature is enabled within a dataverse, users are +able to add comments about a study within that dataverse.

      +

      When you view a study, click the User Comments tab to view all comments +associated with the study. Comments can be monitored and abuse reported +to the Network admin, who has permission to remove any comments deemed +inappropriate. Note that the dataverse admin does not have permission to +remove comments, to prevent bias.

      +

      If you choose, you also can add your own comments to a study from the +User Comments tab. See Comment on Studies or Data for +detailed information.

      +

      Note: To add a comment to a study, you must register and create an +account in the dataverse that owns the study about which you choose to +comment. This helps to prevent abuse and SPAM issues.

      +

      Versions

      +

      Upon creating a study, a version is created. This is a way to archive +the metadata and data files associated with the study citation +or UNF.

      +

      View Citations

      +

      You can view a formatted citation for any of the following entities +within the Dataverse Network application:

      +
        +
      • Studies - For every study, you can view a citation for that study. +Go to the Cataloging Information tab for a study and view the How +to Cite field.
      • +
      • Data sets - For any data set, you can view a citation for that set. +Go to the Documentation, Data and Analysis tab for a study to see the +list of study files. To view the citation for any data set click +the View Data Citation link associated with that subsettable +file.
      • +
      • Data subsets - If you subset and analyze a data set, you can view a +citation for each subset. +See Apply Descriptive Statistics or Perform Advanced Analysis for +detailed information. +Also, when you download a workspace file, a copy +of the citation information for that subset is provided in the +download.
      • +
      +

      Note: For individual variables within a subsettable data subset, you can +view the UNF for that variable. +This is not a full citation for the variable, but it is one component of +that citation. Note also that this does not apply to .xml data.

      +
      +
      +

      Subset and Analysis

      +

      Subsetting and analysis can be performed on tabular and network data +files. Refer to the appropriate section for more details.

      +
      +

      Tabular Data

      +

      Tabular data files (subsettable files) can be subsetted and analyzed +online by using the Dataverse Network application. For analysis, the +Dataverse Network offers a user interface to Zelig, a powerful, R-based +statistical computing tool. A comprehensive set of Statistical Analysis +Models are provided.

      +

      After you find the tablular data set that you want, access the Subset +and Analysis options to use the online tools. Then, you can subset +data by variables or observations, translate it into a convenient +format, download subsets, and apply statistics and analysis.

      +

      Network data files (also subsettable) can be subsetted online, and then +downloaded as a subset. Note that network data files cannot be analyzed +online.

      +

      Review the Tabular Data Subset and Recode Tips before you start.

      +

      Access Subset and Analysis Options

      +

      You can subset and analyze tabular data files before you download the +file or your subsets.

      +

      To access the Subset and Analysis options for a data set:

      +
        +
      1. Click the title of the study from which you choose to analyze or +download a file or subset.
      2. +
      3. Click the Documentation, Data and Analysis tab for the study.
      4. +
      5. In the list of study files, locate the data file that you choose to +download, subset, or analyze. +You can download data sets for a file only if the file entry includes +the subset icon.
      6. +
      7. Click the Access Subset/Analysis link associated with the +selected file. +If prompted, check the I accept box and click Continue to accept +the Terms of Use. +You see the Data File page listing data for the file that you choose +to subset or analyze.
      8. +
      +

      View Variable Quick Summary

      +

      When a subsettable data file is uploaded for a study, the Dataverse +Network code calculates summary statistics for each variable within that +data file. On any tab of the Data File page, you can view the summary +statistics for each variable in the data file. Information listed +comprises the following:

      +
        +
      • For continuous variables, the application calculates summary +statistics that are listed in the DDI schema.
      • +
      • For discrete variables, the application tabulates values and their +labels as a frequency table. +Note, however, that if the number of categories is more than 50, the +values are not tabulated.
      • +
      • The UNF value for each variable is included.
      • +
      +

      To view summary statistics for a variable:

      +
        +
      1. In the Data File page, click any tab.
      2. +
      3. In the variable list on the bottom of the page, the right column is +labeled Quick Summary. +locate a variable for which you choose to view summary statistics. +Then, click the Quick Summary icon for that variable to toggle the +statistic’s information on and off. +You see a small chart that lists information about that variable. The +information provided depends upon the variable selected.
      4. +
      +

      Download Tabular Subsets

      +

      You can download a subset of variables within a tabular-data study file. +You also can recode a subset of those variables and download the recoded +subset, if you choose.

      +

      To download a subset of variables in tabular data:

      +
        +
      1. In the Data File page, click the Download Subset tab.
      2. +
      3. Click the radio button for the appropriate File Format in which to +download the variables: Text, R Data, S plus, or Stata.
      4. +
      5. On the right side of the tab, use the Show drop-down list to select +the quantities of variables to list at one time: 10, 20, 50, or All.
      6. +
      7. Scroll down the screen and click the check boxes to select variables +from the table of available values. When you select a variable, it is +added to the Selected Variables box at the top of the tab. +To remove a variable from this box, deselect it from the Variable +Type list at the bottom of the screen. +To select all variables, click the check box beside the column name, +Variable Type.
      8. +
      9. Click the Create Zip File button. +The Create Zip File button label changes the following +format: zipFile_<number>.zip.
      10. +
      11. Click the zipFile_<number>.zip button and follow your browser’s +prompts to open or save the data file to your computer’s disk drive
      12. +
      +

      Apply Descriptive Statistics

      +

      When you run descriptive statistics for data, you can do any of the +following with the analysis results:

      +
        +
      • Open the results in a new window to save or print the results.
      • +
      • Download the R workspace in which the statistics were analyzed, for +replication of the analysis. See Replicate Analysis for more +information.
      • +
      • View citation information for the data analyzed, and for the full +data set from which you selected variables to analyze. See View +Citations for more information.
      • +
      +

      To apply descriptive statistics to a data set or subset:

      +
        +
      1. In the Data File page, click the Descriptive Statistics tab.
      2. +
      3. Click one or both of the Descriptive Statistics options: Univariate +Numeric Summaries and Univariate Graphic Summaries.
      4. +
      5. On the right side of the tab, use the Show drop-down list to select +one of the following options to show variables in predefined +quantities: 10, 20, 50, or All.
      6. +
      7. Scroll down the screen and click the check boxes to select variables +from the table of available values. When you select a variable, it is +added to the Selected Variables box at the top of the tab. +To remove a variable from this box, deselect it from the Variable +Type list at the bottom of the screen. +To select all variables, click the check box beside the column name, +Variable Type.
      8. +
      9. Click the Run Statistics button. +You see the Dataverse Analysis page.
      10. +
      11. To save or print the results, scroll to the Descriptive Statistics +section and click the link Open results in a new window. You then +can print or save the window contents. +To save the analysis, scroll to the Replication section and click the +button zipFile_<number>.zip. +Review the Citation Information for the data set and for the subset +that you analyzed.
      12. +
      13. Click the link Back to Analysis and Subsetting to return the +previous page and continue analysis of the data.
      14. +
      +

      Recode and Case-Subset Tabular Data

      +

      Review the Tabular Data Recode and Subset Tips before you start work +with a study’s files.

      +

      To recode and subset variables within a tabular data set:

      +
        +
      1. In the Data File page, click the Recode and Case-Subsetting tab.
      2. +
      3. One the right side of the variable list, use the Show drop-down list +and select one of the following options to show variables in +predefined quantities: 10, 20, 50, or All.
      4. +
      5. Scroll down the screen and click the check boxes to select variables +from the table of available values. When you select a variable, it is +added to the Selected Variables box at the top of the tab. +To remove a variable from this box, deselect it from the Variable +Type list at the bottom of the screen. +To select all variables, click the check box beside the column name, +Variable Type.
      6. +
      7. Select one variable in the Selected Variables box, and then +click Start. +The existing name and label of the variable appear in the New +Variable Name and New Variable Label boxes.
      8. +
      9. In the New Variable Label field, change the variable name to a unique +value that is not used in the data file. +The new variable label is optional.
      10. +
      11. In the table below the Variable Name fields, you can check one or +more values to drop them from the subset, or enter new values, +labels, or ranges (as a condition) as needed. Click the Add +Value/Range button to create more entries in the value table. +Note: Click the ? Info buttons to view tips on how to use the +Recode and Subset table. Also, See Tabular Data Recode and Subset +Tips for more information about adding values and ranges.
      12. +
      13. Click the Apply Recodes button. +Your renamed variables appear at the bottom of the page in the List +of Recode Variables.
      14. +
      15. Select another variable in the Selected Variables box, click the +Start button, and repeat the recode action. +Repeat this process for each variable that you choose to recode.
      16. +
      17. To remove a recoded variable, scroll to the List of Recode Variables +at the bottom of the page and click the Remove link for the recoded +variable that you choose to delete from your subset.
      18. +
      +

      Perform Advanced Analysis

      +

      When you run advanced statistical analysis for data, you can do any of +the following with the analysis results:

      +
        +
      • Open the results in a new window to save or print the results.
      • +
      • Download the R workspace in which the statistics were analyzed, for +replication of the analysis. See Replicate Analysis for more +information.
      • +
      • View citation information for the data analyzed, and for the full +data set from which you selected variables to analyze. See View +Citations for more information.
      • +
      +

      To run statistical models for selected variables:

      +
        +
      1. In the Data File page, click the Advanced Statistical Analysis tab.
      2. +
      3. Scroll down the screen and click the check boxes to select variables +from the table of available values. When you select a variable, it is +added to the Selected Variables box at the top of the tab. +To remove a variable from this box, deselect it from the Variable +Type list at the bottom of the screen. +To select all variables, click the check box beside the column name, +Variable Type.
      4. +
      5. Select a model from the Choose a Statistical Model drop-down list.
      6. +
      7. Select one variable in the Selected Variables box, and then click the +applicable arrow button to assign a function to that variable from +within the analysis model. +You see the name of the variables in the appropriate function box. +Note: Some functions allow a specific type of variable only, while +other functions allow multiple variable types. Types include +Character, Continuous, and Discrete. If you assign an incorrect +variable type to a function, you see an Incompatible type error +message.
      8. +
      9. Repeat the variable and function assignments until your model is +complete.
      10. +
      11. Select your Output options.
      12. +
      13. Click the Run Model button. +If the statistical model that you defined is incomplete, you first +are prompted to correct the definition. Correct your model, and then +click Run Model again. +You see the Dataverse Analysis page.
      14. +
      15. To save or print the results, scroll to the Advanced Statistical +Analysis section and click the link Open results in a new window. +You then can print or save the window contents. +To save the analysis, scroll to the Replication section and click the +button zipFile_<number>.zip. +Review the Citation Information for the data set and for the subset +that you analyzed.
      16. +
      17. Click the link Back to Analysis and Subsetting to return the +previous page and continue analysis of the data.
      18. +
      +

      Replicate Analysis

      +

      You can save the R workspace in which the Dataverse Network performed an +analysis. You can download the workspace as a zipped archive that +contains four files. Together, these files enable you to recreate the +subset analysis in another R environment:

      +
        +
      • citationFile.<identifier>.txt - The citation for the subset that you analyzed.
      • +
      • rhistoryFile.<identifier>.R - The R code used to perform the analysis.
      • +
      • tempsubsetfile.<identifier>.tab - The R object file used to perform the analysis.
      • +
      • tmpRWSfile.<identifier>.RData - The subset data that you analyzed.
      • +
      +

      To download this workspace for your analysis:

      +
        +
      1. For any subset, Apply Descriptive Statistics or Perform Advanced +Analysis.
      2. +
      3. On the Dataverse Analysis or Advanced Statistical Analysis page, +scroll to the Replication section and click the +button zipFile_<number>.zip.
      4. +
      5. Follow your browser’s prompts to save the zipped archive. +When the archive file is saved to your local storage, extract the +contents to use the four files that compose the R workspace.
      6. +
      +

      Statistical Analysis Models

      +

      You can apply any of the following advanced statistical models to all or +some variables in a tabular data set:

      +

      Categorical data analysis: Cross tabulation

      +

      Ecological inference model: Hierarchical mulitnomial-direct ecological +inference for R x C tables

      +

      Event count models, for event count dependent variables:

      +
        +
      • Negative binomial regression
      • +
      • Poisson regression
      • +
      +

      Models for continuous bounded dependent variables:

      +
        +
      • Exponential regression for duration
      • +
      • Gamma regression for continuous positives
      • +
      • Log-normal regression for duration
      • +
      • Weibull regression for duration
      • +
      +

      Models for continuous dependent variables:

      +
        +
      • Least squares regression
      • +
      • Linear regression for left-censoreds
      • +
      +

      Models for dichotomous dependent variables:

      +
        +
      • Logistic regression for binaries
      • +
      • Probit regression for binaries
      • +
      • Rare events logistic regression for binaries
      • +
      +

      Models for ordinal dependent variables:

      +
        +
      • Ordinal logistic regression for ordered categoricals
      • +
      • Ordinal probit regression for ordered categoricals
      • +
      +

      Tabular Data Recode and Subset Tips

      +

      Use the following guidelines when working with tabular data files:

      +
        +
      • Recoding:
          +
        • You must fill at least the first (new value) and last (condition) +columns of the table; the second column is optional and for a new +value label.
        • +
        • If the old variable you chose for recoding has information about +its value labels, you can prefill the table with these data for +convenience, and then modify these prefilled data.
        • +
        • To exclude a value from your recoding scheme, click the Drop check +box in the row for that value.
        • +
        +
      • +
      • Subsetting:
          +
        • If the variable you chose for subsetting has information about its +value labels, you can prefill the table with these data for +convenience.
        • +
        • To exclude a value in the last column of the table, click the Drop +check box in row for that value.
        • +
        • To include a particular value or range, enter it in the last +column whose header shows the name of the variable for subsetting.
        • +
        +
      • +
      • Entering a value or range as a condition for subsetting or recoding:
          +
        • Suppose the variable you chose for recoding is x. +If your condition is x==3, enter 3. +If your condition is x < -3, enter (--3. +If your condition is x > -3, enter -3-). +If your condition is -3 < x < 3, enter (-3, 3).
        • +
        • Use square brackets ([]) for closed ranges.
        • +
        • You can enter non-overlapping values and ranges separated by a +comma, such as 0,[7-9].
        • +
        +
      • +
      +
      +
      +

      Network Data

      +

      Network data files (subsettable files) can be subsetted and analyzed +online by using the Dataverse Network application. For analysis, the +Dataverse Network offers generic network data analysis. A list of +Network Analysis Models are provided.

      +

      Note: All subsetting and analysis options for network data assume a +network with undirected edges.

      +

      After you find the network data set that you want, access the Subset and +Analysis options to use the online tools. Then, you can subset data +by vertices or edges, download subsets, and apply network +measures.

      +

      Access Network Subset and Analyze Options

      +

      You can subset and analyze network data files before you download the +file or your subsets. To access the Subset and Analysis options for a +network data set:

      +
        +
      1. Click the title of the study from which you choose to analyze or +download a file or subset.
      2. +
      3. Click the Documentation, Data and Analysis tab for the study.
      4. +
      5. In the list of study files, locate the network data file that you +choose to download, subset, or analyze. You can download data sets +for a file only if the file entry includes the subset icon.
      6. +
      7. Click the Access Subset/Analysis link associated with the +selected file. If prompted, check the I accept box and click +Continue to accept the Terms of Use. +You see the Data File page listing data for the file that you choose +to subset or analyze.
      8. +
      +

      Subset Network Data

      +

      There are two ways in which you can subset network data. First, you can +run a manual query, and build a query of specific values for edge or +vertex data with which to subset the data. Or, you can select from among +three automatically generated queries with which to subset the data:

      +
        +
      • Largest graph - Subset the <nth> largest connected component of the +network. That is, the largest group of nodes that can reach one +another by walking across edges.
      • +
      • Neighborhood - Subset the <nth> neighborhood of the selected +vertices. That is, generate a subgraph of the original network +composed of all vertices that are positioned at most <n> steps away +from the currently selected vertices in the original network, plus +all of the edges that connect them.
      • +
      +

      You also can successively subset data to isolate specific values +progressively.

      +

      Continue to the next topics for detailed information about subsetting a +network data set.

      +

      Subset Manually

      +

      Perform a manual query to slice a graph based on the attributes of its +vertices or edges. You choose whether to subset the graph based on +vertices or edges, then use the Manual Query Builder or free-text Query +Workspace fields to construct a query based on that element’s +attributes. A single query can pertain only to vertices or only to +edges, never both. You can perform separate, sequential vertex or edge +queries.

      +

      When you perform a vertex query, all vertices whose attributes do not +satisfy the query are dropped from the graph, in addition to all edges +that touch them. When you perform an edge query, all edges whose +attributes do not satisfy the criteria are dropped, but all vertices +remain unless you enable the Eliminate disconnected vertices check box. Note that enabling this option drops all +disconnected vertices whether or not they were disconnected before the +edge query.

      +

      Review the Network Data Tips before you start work with a study’s files.

      +

      To subset variables within a network data set by using a manually +defined query:

      +
        +
      1. In the Data File page, click the Manual Query radio button near the +top of the page.

        +
      2. +
      3. Use the Attribute Set drop-down list and select Vertex to subset by +node or vertex values. +Select Edge to subset by edge values.

        +
      4. +
      5. Build the first attribute selection value in the Manual Query Builder +panel:

        +
          +
        1. Select a value in the Attributes list to assign values on which to +subset.
        2. +
        3. Use the Operators drop-down list to choose the function by which +to define attributes for selection in this query.
        4. +
        5. In the Values field, type the specific values to use for selection +of the attribute.
        6. +
        7. Click Add to Query to complete the attribute definition for +selection. +You see the query string for this attribute in the Query Workspace +field.
        8. +
        +

        Alternatively, you can enter your query directly by typing it into +the Query Workspace field.

        +
      6. +
      7. Continue to add selection values to your query by using the Manual +Query Builder tools.

        +
      8. +
      9. To remove any verticies that do not connect with other data in the +set, check the Eliminate disconnected vertices check box.

        +
      10. +
      11. When you complete construction of your query string, click Run to +perform the query.

        +
      12. +
      13. Scroll to the bottom of the window, and when the query is processed +you see a new entry in the Subset History panel that defines your +query.

        +
      14. +
      +

      Continue to build a successive subset or download a subset.

      +

      Subset Automatically

      +

      Peform an Automatic Query to select a subgraph of the nextwork based on +structural properties of the network. Remember to review the Network +Data Tips before you start work with a study’s files.

      +

      To subset variables within a network data set by using an automatically +generated query:

      +
        +
      1. In the Data File page, click the Automatic Query radio button near +the middle of the page.
      2. +
      3. Use the Function drop-down list and select the type of function with +which to select your subset:
          +
        • Largest graph - Subset the <nth> largest group of nodes that can +reach one another by walking across edges.
        • +
        • Neighborhood - Generate a subgraph of the original network +composed of all vertices that are positioned at most <n> steps +away from the currently selected vertices in the original network, +plus all of the edges that connect them. This is the only query +that can (and generally does) increase the number of vertices and +edges selected.
        • +
        +
      4. +
      5. In the Nth field, enter the <nth> degree with which to select data +using that function.
      6. +
      7. Click Run to perform the query.
      8. +
      9. Scroll to the bottom of the window, and when the query is processed +you see a new entry in the Subset History panel that defines your +query.
      10. +
      +

      Continue to build a successive subset or download a subset.

      +

      Build or Restart Subsets

      +

      Build a Subset

      +

      To build successive subsets and narrow your data selection +progressively:

      +
        +
      1. Perform a manual or automatic subset query on a selected data set.
      2. +
      3. Perform a second query to further narrow the results of your previous +subset activity.
      4. +
      5. When you arrive at the subset with which you choose to work, continue +to analyze or download that subset.
      6. +
      +

      Undo Previous Subset

      +

      You can reset, or undo, the most recent subsetting action for a data +set. Note that you can do this only one time, and only to the most +recent subset.

      +

      Scroll to the Subset History panel at the bottom of the page and +click Undo in the last row of the list of successive subsets. +The last subset is removed, and the previous subset is available for +downloading, further subsetting, or analysis.

      +

      Restart Subsetting

      +

      You can remove all subsetting activity and restore data to the original +set.

      +

      Scroll to the Subset History panel at the bottom of the page and +click Restart in the row labeled Initial State. +The data set is restored to the original condition, and is available +for downloading, subsetting, or analysis.

      +

      Run Network Measures

      +

      When you finish selecting the specific data that you choose to analyze, +run a Network Measure analysis on that data. Review the Network Data +Tips before you start your analysis.

      +
        +
      1. In the Data File page, click the Network Measure radio button near +the bottom of the page.
      2. +
      3. Use the Attributes drop-down list and select the type of analysis to +perform:
          +
        • Page Rank - Determine how much influence comes from a specific +actor or node.
        • +
        • Degree - Determine the number of relationships or collaborations +exist within a network data set.
        • +
        • Unique Degree - Determine the number of collaborators that exist.
        • +
        • In Largest Component - Determine the largest component of a +network.
        • +
        • Bonacich Centrality - Determine the importance of a main actor or +node.
        • +
        +
      4. +
      5. In the Parameters field, enter the specific value with which to +subset data using that function:
          +
        • Page Rank - Enter a value for the parameter <d>, a proportion, +between 0 and 1.
        • +
        • Degree - Enter the number of relationships to extract from a +network data set.
        • +
        • Unique Degree - Enter the number of unique relationships to +extract.
        • +
        • In Largest Component - Enter the number of components to extract +from a network data set, starting with the largest.
        • +
        +
      6. +
      7. Click Run to perform the analysis.
      8. +
      9. Scroll to the bottom of the window, and when the analysis is +processed you see a new entry in the Subset History panel that +contains your analyzed data.
      10. +
      +

      Continue to download the analyzed subset.

      +

      Download Network Subsets or Measures

      +

      When you complete subsetting and analysis of a network data set, you can +download the final set of data. Network data subsets are downloaded in a +zip archive, which has the name subset_<original file name>.zip. +This archive contains three files:

      +
        +
      • subset.xml - A GraphML formatted file that contains the final +subsetted or analyzed data.
      • +
      • verticies.tab - A tabular file that contains all node data for +the final set.
      • +
      • edges.tab - A tabular file that contains all relationship data +for the final set.
      • +
      +

      Note: Each time you download a subset of a specific network data set, a +zip archive is downloaded that has the same name. All three zipped files +within that archive also have the same names. Be careful not to +overwrite a downloaded data set that you choose to keep when you perform +sucessive downloads.

      +

      To download a final set of data:

      +
        +
      1. Scroll to the Subset History panel on the Data File page.
      2. +
      3. Click Download Latest Results at the bottom of the history list.
      4. +
      5. Follow your browser’s prompts to open or save the data file to your +computer’s disk drive. Be sure to save the file in a unique location +to prevent overwritting an existing downloaded data file.
      6. +
      +

      Network Data Tips

      +

      Use these guidelines when subsetting or analyzing network data:

      +
        +
      • For a Page rank network measure, the value for the parameter <d> is a +proportion and must be between 0 and 1. Higher values of <d> increase +dispersion, while values of <d> closer to zero produce a more uniform +distribution. PageRank is normalized so that all of the PageRanks sum +to 1.
      • +
      • For a Bonacich Centrality network measure, the alpha parameter is a +proportion that must be between -1 and +1. It is normalized so that +all alpha centralities sum to 1.
      • +
      • For a Bonacich Centrality network measure, the exo parameter must be +greater than 0. A higher value of exo produces a more uniform +distribution of centrality, while a lower value allows more +variation.
      • +
      • For a Bonacich Centrality network measure, the original alpha +parameter of alpha centrality takes values only from -1/lambda to +1/lambda, where lambda is the largest eigenvalue of the adjacency +matrix. In this Dataverse Network implementation, the alpha parameter +is rescaled to be between -1 and 1 and represents the proportion of +1/lambda to be used in the calculation. Thus, entering alpha=1 sets +alpha to be 1/lambda. Entering alpha=0.5 sets alpha to be +1/(2*lambda).
      • +
      +
      +
      +
      +

      Data Visualization

      +

      Data Visualization allows contributors to make time series +visualizations available to end users. These visualizations may be +viewable and downloadable as graphs or data tables. Please see the +appropriate guide for more information on setting up a visualization or +viewing one.

      +
      +

      Explore Data

      +

      The study owner may make a data visualization interface available to +those who can view a study.  This will allow you to select various data +variables and see a time series graph or data table.  You will also be +able to download your custom graph for use in your own reports or +articles.

      +

      The study owner will at least provide a list of data measures from which +to choose.   These measures may be divided into types.  If they are you +will be able to narrow the list of measures by first selecting a measure +type.  Once you have selected a measure, if there are multiple variables +associated with the measure you will be able to select one or more +filters to uniquely identify a variable. By default any filter assigned +to a variable will become the label associated with the variable in the +graph or table.   By pressing the Add Line button you will add the +selected variable to your custom graph.

      +

        image0

      +

      Once you have added data to your graph you will be able to customize it +further.  You will be given a choice of display options made available +by the study owner.  These may include an interactive flash graph, a +static image graph and a numerical data table.   You will also be +allowed to edit the graph title, which by default is the name of the +measure or measures selected. You may also edit the Source Label. +Other customizable features are the height and the legend location of +the image graph.  You may also select a subset of the data by selecting +the start and end points of the time series.  Finally, on the display +tab you may opt to display the series as indices in which case a single +data point known as the reference period will be designated as 100 and +all other points of the series will be calculated relative to the +reference period.  If you select data points that do not have units in +common (i.e. one is in percent while the other is in dollars) then the +display will automatically be set to indices with the earliest common +data point as the default reference period.

      +

      image1

      +

      On the Line Details tab you will see additional information on the data +you have selected.  This may include links to outside web pages that +further explain the data.  On this tab you will also be able to edit the +label or delete the line from your custom graph.

      +

      On the Export tab you will be given the opportunity to export your +custom graph and/or data table.   If you select multiple files for +download they will be bound together in a single zip file.

      +

      The Refresh button clears any data that you have added to your custom +graph and resets all of the display options to their default values.

      +
      +
      +

      Set Up

      +

      This feature allows you to make time series visualizations available to +your end users.   These visualizations may be viewable and downloadable +as graphs or data tables.  In the current beta version of the feature +your data file must be subsettable and must contain at least one date +field and one or more measures.  You will be able to associate data +fields from your file to a time variable and multiple measures and +filters.

      +

      When you select Set Up Exploration from within a study, you must first +select the file for which you would like to set up the exploration.  The +list of files will include all subsettable data files within the study.

      +

      Once you have selected a file you will go to a screen that has 5 tabs to +guide you through the data visualization set-up. (In general, changes +made to a visualization on the individual tabs are not saved to the +database until the form’s Save button is pressed.  When you are in add +or edit mode on a tab, the tab will have an update or cancel button to +update the “working copy” of a visualization or cancel the current +update.)

      +

      If you have a previously set up an exploration for a data file you may copy that exploration to a new file. +When you select a file for set up you will be asked if you want to copy an exploration from another data file +and will be presented a list of files from which to choose. Please note that the data variable names must +be identical in both files for this migration to work properly.

      +

      Time Variable

      +

      On the first tab you select the time variable of your data file.  The +variable list will only include those variables that are date or time +variables.  These variables must contain a date in each row.  You may +also enter a label in the box labeled Units.  This label will be +displayed under the x-axis of the graph created by the end user.

      +

      image2

      +

      Measures

      +

      On the Measures tab you may assign measures to the variables in your +data file.  First you may customize the label that the end user will see +for measures.  Next you may add measures by clicking the “Add Measure” +link.  Once you click that link you must give your measure a unique +name.  Then you may assign Units to it.  Units will be displayed as the +y-axis label of any graph produced containing that measure.  In order to +assist in the organizing of the measures you may create measure types +and assign your measures to one or more measure types.  Finally, the +list of variables for measures will include all those variables that are +entered as numeric in your data file.  If you assign multiple variables +to the same measure you will have to distinguish between them by +assigning appropriate filters.   For the end user, the measure will be +the default graph name.

      +

      image3

      +

      Filters

      +

      On the filters tab you may assign filters to the variables in your data +file.  Generally filters contain demographic, geographic or other +identifying information about the variables.  For a given group of +filters only one filter may be assigned to a single variable.  The +filters assigned to a variable must be sufficient to distinguish among +the variables assigned to a single measure.   Similar to measures, +filters may be assigned to one or more types.   For the end user the +filter name will be the default label of the line of data added to a +graph.

      +

      image4

      +
      +

      +
      +

      Sources

      +

      On the Sources tab you can indicate the source of each of the variables +in your data file.  By default, the source will be displayed as a note +below the x-axis labels.  You may assign a single source to any or all +of your data variables.  You may also assign multiple sources to any of +your data variables.

      +

      image5

      +
      +

      +
      +

      Display

      +

      On the Display tab you may customize what the end user sees in the Data +Visualization interface.  Options include the data visualization formats +made available to the end user and default view, the Measure Type label, +and the Variable Info Label.

      +
      +

      +
      +
      image6
      +
      +
      +

      Validate Button

      +

      When you press the “Validate” button the current state of your +visualization data will be validated.  In order to pass validation your +data must have one time variable defined.  There must also be at least +one measure variable assigned.  If more than one variable is assigned to +a given measure then filters must be assigned such that each single +variable is defined by the measure and one or more filters.  If the data +visualization does not pass validation a detailed error message +enumerating the errors will be displayed.

      +

      Release Button

      +

      Once the data visualization has been validated you may release it to end +users by pressing the “Release” button.  The release button will also +perform a validation.  Invalid visualizations will not be released, but +a detailed error message will not be produced.

      +

      Save Button

      +

      The “Save” button will save any changes made to a visualization on the +tabs to the database.   If a visualization has been released and changes +are saved that would make it invalid the visualization will be set to +“Unreleased”.

      +

      Exit Button

      +

      To exit the form press the “Exit” button.  You will be warned if you +have made any unsaved changes.

      +

      Examples

      +

      Simplest case – a single measure associated with a single variable.

      +

      Data variable contains information on average family income for all +Americans.  The end user of the visualization will see an interface as +below:

      +

      image7

      +

      Complex case - multiple measures and types along with multiple filters +and filter types.  If you have measures related to both income and +poverty rates you can set them up as measure types and associate the +appropriate measures with each type.  Then, if you have variables +associated with multiple demographic groups you can set them up as +filters.  You can set up filter types such as age, gender, race and +state of residence.  Some of your filters may belong to multiple types +such as males age 18-34.

      +

      image8

      +
      +
      +
      +
      +

      Dataverse Administration

      +

      Once a user creates a dataverse becomes its owner and therefore is the +administrator of that dataverse. The dataverse administrator has access +to manage the settings described in this guide.

      +
      +

      Create a Dataverse

      +

      A dataverse is a container for studies and is the home for an individual +scholar’s or organization’s data.

      +

      Creating a dataverse is easy but first you must be a registered user. +Depending on site policy, there may be a “Create a Dataverse” link on +the Network home page. This first walks you through creating an account, +then a dataverse.

      +
        +
      1. Fill in the required information:
      2. +
      +
      +
        +
      • Type of Dataverse: Choose Scholar if it represents an individual’s work otherwise choose Basic.
      • +
      • Dataverse Name: This will be displayed on the network and dataverse home pages. If this is a Scholar dataverse it will automatically be filled in with the scholar’s first and last name.
      • +
      • Dataverse Alias: This is an abbreviation, usually lower-case, that becomes part of the URL for the new dataverse.
      • +
      +
      +
      The required fields to create a dataverse are configurable in the Network Options, so fields that are required may also include +Affiliation, Network Home Page Description, and Classification.
      +
      +
        +
      1. Click “Save” and you’re done! An email will be sent to you with more information, including the URL to access you new dataverse.
      2. +
      +

      *Required information can vary depending on site policy. Required fields are noted with a red asterisk.

      +
      +
      +

      Edit General Settings

      +

      Use the General Settings tab on the Options page to release your +dataverse, change the name, alias, and classification of your +dataverse. The classifications are used to browse to your dataverse from +the Network home page.

      +

      Navigate to the General Settings from the Options page:

      +

      Dataverse home page > Options page > Settings tab > General subtab

      +

      To edit release your dataverse:

      +

      Select Released from the drop-down list when your dataverse is ready +to go public. Select Not Released if you wish to block public access +to your dataverse.

      +

      Your dataverse cannot be released if it does not contain any released +studies. Create a study or define a collection with studies from other +dataverses before you attempt to make your dataverse public.

      +

      To edit the affiliation, name, or alias settings of your dataverse:

      +

      If you edit a Scholar dataverse type, you can edit the following fields:

      +
        +
      • First Name - Edit your first name, which appears with your last name +on the Network home page in the Scholar Dataverse group.
      • +
      • Last Name - Edit your last name, which appears with your first name +on the Network home page in the Scholar Dataverse group.
      • +
      +

      If you edit either Scholar or basic types, you can edit any of the +following fields:

      +
        +
      • Affiliation - Edit your institutional identity.
      • +
      • Dataverse Name - Edit the title for your dataverse, which appears on +your dataverse home page. There are no naming restrictions.
      • +
      • Dataverse Alias - Edit your dataverse’s URL. Special characters +(~,`, !, @, #, $, %, ^, &, and *) and spaces are not allowed. +Note: if you change the Dataverse Alias field, the URL for your +Dataverse changes (http//.../dv/’alias’), which affects links to this +page.
      • +
      • Network Home Page Description - Edit the text that appears beside the +name of your dataverse on the Network home page.
      • +
      • Classification - Check the classifications, or groups, in which you +choose to include your dataverse. Remove the check for any +classifications that you choose not to join.
      • +
      +
      +
      +

      Edit Layout Branding

      +

      Customize Layout Branding (header/footer) to match your website

      +

      The Layout Branding allows you to customize your dataverse, by +adding HTML to the default banner and footer, such as that used on +your personal website. If your website has such layout elements as a +navigation menu or images, you can add them here. Each dataverse is +created with a default customization added, which you can leave as is, +edit to change the background color, or add your own customization.

      +

      Navigate to the Layout Branding from the Options page:

      +

      Dataverse home page > Options page > Settings tab > Customization subtab

      +

      To edit the banner and footer of your dataverse:

      +
        +
      1. In the Custom Banner field, enter your plain text, and HTML to define +your custom banner.
      2. +
      3. In the Custom Footer field, enter your plain text, and HTML to define +your custom footer.
      4. +
      +

      Embed your Dataverse into your website (iframes)

      +

      Want to embed your Dataverse on an OpenScholar site? Follow these special instructions.

      +

      For dataverse admins that are more advanced HTML developers, or that +have HTML developers available to assist them, you can create a page on +your site and add the dataverse with an iframe.

      +
        +
      1. Create a new page, that you will host on your site.
      2. +
      3. Add the following HTML code to the content area of that new +page.
      4. +
      +
      +
      +
      <script type="text/javascript">
      +
      var dvn_url = "[SAMPLE_ONLY_http://dvn.iq.harvard.edu/dvn/dv/sampleURL]";
      +
      var regexS = "[\\?&]dvn_subpage=([^&#]*)";
      +
      var regex = new RegExp( regexS );
      +
      var results = regex.exec( window.location.href );
      +
      if( results != null ) dvn_url = dvn_url + results[1];document.write('<iframe src="' + dvn_url + '"
      +
      onLoad="set_dvn_url(this)" width="100%" height="600px" frameborder="0"
      +
      style="background-color:#FFFFFF;"></iframe>');
      +
      </script>
      +
      +
      +
        +
      1. Edit that code by adding the URL of your dataverse (replace the +SAMPLE_ONLY URL in the example, including the brackets “[ ]”), and +adjusting the height.  We suggest you keep the height at or under +600px in order to fit the iframe into browser windows on computer +monitor of all sizes, with various screen resolutions.
      2. +
      3. The dataverse is set to have a min-width of 724px, so try give the +page a width closer to 800px.
      4. +
      5. Once you have the page created on your site, with the iframe code, go +to the Setting tab, then the Customization subtab on your dataverse +Options page, and click the checkbox that disables customization for +your dataverse.
      6. +
      7. Then enter the URL of the new page on your site. That will redirect +all users to the new page on your site.
      8. +
      +

      Layout Branding Tips

      +
        +
      • HTML markup, including script tags for JavaScript, and style +tags for an internal style sheet, are permitted. The html, +head and body element tags are not allowed.
      • +
      • When you use an internal style sheet to insert CSS into your +customization, it is important to avoid using universal (“*”) +and type (“h1”) selectors, because these can overwrite the +external style sheets that the dataverse is using, which can break +the layout, navigation or functionality in the app.
      • +
      • When you link to files, such as images or pages on a web server +outside the network, be sure to use the full URL (e.g. +http://www.mypage.com/images/image.jpg).
      • +
      • If you recreate content from a website that uses frames to combine +content on the sides, top, or bottom, then you must substitute the +frames with table or div element types. You can open such an +element in the banner field and close it in the footer field.
      • +
      • Each time you click “Save”, your banner and footer automatically are +validated for HTML and other code errors. If an error message is +displayed, correct the error and then click “Save” again.
      • +
      • You can use the banner or footer to house a link from your homepage +to your personal website. Be sure to wait until you release your +dataverse to the public before you add any links to another website. +And, be sure to link back from your website to your homepage.
      • +
      • If you are using an OpenScholar or iframe site and the redirect is +not working, you can edit your branding settings by adding a flag to +your dataverse URL: disableCustomization=true. For example: +dvn.iq.harvard.edu/dvn/dv/mydv?disableCustomization=true. To +reenable: dvn.iq.harvard.edu/dvn/dv/mydv?disableCustomization=false. +Disabling the customization lasts for the length of the user session.
      • +
      +
      +
      +

      Edit Description

      +

      The Description is displayed on your dataverse Home page. Utilize this +field to display announcements or messaging.

      +

      Navigate to the Description from the Options page:

      +

      Dataverse home page > Options page > Settings tab > General subtab >Home Page Description

      +

      To change the content of this description:

      +
        +
      • Enter your description or announcement text in the field provided. +Note: A light blue background in any form field indicates HTML, JavaScript, and style tags are permitted. The html,, head and body element types are not allowed.
      • +
      +

      Previous to the Version 3.0 release of the Dataverse Network, the +Description had a character limit set at 1000, which would truncate +longer description with a more >> link. This functionality has been +removed, so that you can add as much text or code to that field as you +wish. If you would like to add the character limit and truncate +functionality back to your dataverse, just add this snippet of +Javascript to the end of your description.

      +
      +
      +
      <script type="text/javascript">
      +
          jQuery(document).ready(function(){
      +
              jQuery(".dvn\_hmpgMainMessage span").truncate({max\_length:1000});
      +
          });
      +
      </script>
      +
      +
      +
      +
      +

      Edit Study Comments Settings

      +

      You can enable or disable the Study User Comments feature in your +dataverse. If you enable Study User Comments, any user has the option to +add a comment to a study in this dataverse. By default, this feature is +enabled in all new dataverses. Note that you should ensure there are +terms of use at the network or dataverse level that define acceptable +use of this feature if it is enabled.

      +

      Navigate to the Study User Comments from the Options page:

      +

      Dataverse home page > Options page > Settings tab > General subtab >Allow Study Comments

      +

      A user must create an account in your dataverse to use the comment +feature. When you enable this feature, be aware that new accounts will +be created in your dataverse when users add comments to studies. In +addition, the Report Abuse function in the comment feature is managed by +the network admin. If a user reads a comment that might be +inappropriate, that user can log in or register an account and access +the Report Abuse option. Comments are reported as abuse to the network +admin.

      +

      To manage the Study User Comments feature in your dataverse:

      +
        +
      • Click the “Allow Study Comments” check box to enable comments.
      • +
      • Click the checked box to remove the check and disable comments.
      • +
      +
      +
      +

      Manage E-Mail Notifications

      +

      You can edit the e-mail address used on your dataverse’s Contact Us page +and by the network when sending notifications on processes and errors. +By default, the e-mail address used is from the user account of the +dataverse creator.

      +

      Navigate to the E-Mail Notifications from the Options page:

      +

      Dataverse home page > Options page > Settings tab > General subtab >E-Mail Address(es)

      +

      To edit the contact and notification e-mail address for your dataverse:

      +
        +
      • Enter one or more e-mail addresses in the E-Mail Address field. +Provide the addresses of users who you choose to receive notification +when contacted from this dataverse. Any time a user submits a request +through your dataverse, including the Request to Contribute link and +the Contact Us page, e-mail is sent to all addresses that you enter +in this field. Separate each address from others with a comma. Do not +add any spaces between addresses.
      • +
      +
      +
      +

      Add Fields to Search Results

      +

      Your dataverse includes the network’s search and browse features to +assist your visitors in locating the data that they need. By default, +the Cataloging Information fields that appear in the search results or +in studies’ listings include the following: study title, authors, ID, +production date, and abstract. You can customize other Cataloging +Information fields to appear in search result listings after the default +fields. Additional fields appear only if they are populated for the +study.

      +

      Navigate to the Search Results Fields from the Options page:

      +

      Dataverse home page > Options page > Settings tab > Customization subtab > Search Results Fields

      +

      To add more Cataloging Information fields listed in the Search or Browse +panels:

      +
        +
      • Click the check box beside any of the following Cataloging +Information fields to include them in your results pages: Production +Date, Producer, Distribution Date, Distributor, Replication For, +Related Publications, Related Material, and Related Studies.
      • +
      +

      Note: These settings apply to your dataverse only.

      +
      +
      +

      Set Default Study Listing Sort Order

      +

      Use the drop-down menu to set the default sort order of studies on the +Study Listing page. By default, they are sorted by Global ID, but you +can also sort by Title, Last Released, Production Date, or Download +Count.

      +

      Navigate to the Default Study Listing Sort Order from the Options page:

      +

      Dataverse home page > Options page > Settings tab > Customization subtab > Default Sort Order

      +
      +
      +

      Enable Twitter

      +

      If your Dataverse Network has been configured for Automatic Tweeting, +you will see an option listed as “Enable Twitter.” When you click this, +you will be redirected to Twtter to authorize the Dataverse Network +application to send tweets for you.

      +

      Once authorized, tweets will be sent for each new study or study version +that is released.

      +

      To disable Automatic Tweeting, go to the Options page, and click +“Disable Twitter.”

      +

      Navigate to Enable Twitter from the Options page:

      +

      Dataverse home page > Options page > Settings tab > Promote Your Dataverse subtab > Sync Dataverse With Twitter

      +
      + +
      +

      Edit Terms for Study Creation

      +

      You can set up Terms of Use for the dataverse that require users to +acknowledge your terms and click “Accept” before they can contribute to +the dataverse.

      +

      Navigate to the Terms for Study Creation from the Options page:

      +

      Dataverse home page > Options page > Permissions tab > Terms subtab > Deposit Terms of Use

      +

      To set Terms of Use for creating or uploading to the dataverse:

      +
        +
      1. Click the Enable Terms of Use check box.
      2. +
      3. Enter a description of your terms to which visitors must agree before +they can create a study or upload a file to an existing study. +Note: A light blue background in any form field indicates HTML, +JavaScript, and style tags are permitted. The html and body +element types are not allowed.
      4. +
      +
      +
      +

      Edit Terms for File Download

      +

      You can set up Terms of Use for the network that require users to +acknowledge your terms and click “Accept” before they can download or +subset contents from the network.

      +

      Navigate to the Terms for File Download from the Options page:

      +

      Dataverse home page > Options page > Permissions tab > Terms subtab > Download Terms of Use

      +

      To set Terms of Use for downloading or subsetting contents from any +dataverse in the network:

      +
        +
      1. Click the Enable Terms of Use check box.
      2. +
      3. Enter a description of your terms to which visitors must agree before +they can download or analyze any file. +Note: A light blue background in any form field indicates HTML, +JavaScript, and style tags are permitted. The html and body +element types are not allowed.
      4. +
      +
      +
      +

      Manage Permissions

      +

      Enable contribution invitation, grant permissions to users and groups, +and manage dataverse file permissions.

      +

      Navigate to Manage Permissions from the Options page:

      +

      Dataverse home page > Options page > Permissions tab > Permissions subtab

      +

      Contribution Settings

      +

      Choose the access level contributors have to your dataverse. Whether +they are allowed to edit only their own studies, all studies, or whether +all registered users can edit their own studies (Open dataverse) or all +studies (Wiki dataverse). In an Open dataverse, users can add studies by +simply creating an account, and can edit their own studies any time, +even after the study is released. In a Wiki dataverse, users cannot only +add studies by creating an account, but also edit any study in that +dataverse. Contributors cannot, however, release a study directly. After +their edits, they submit it for review and a dataverse administrator or +curator will release it.

      +

      User Permission Settings

      +

      There are several roles defined for users of a Dataverse Network +installation:

      +
        +
      • Data Users - Download and analyze all types of data
      • +
      • Contributors - Distribute data and receive recognition and citations +to it
      • +
      • Curators - Summarize related data, organize data, or manage multiple +sets of data
      • +
      • Administrators - Set up and manage contributions to your dataverse, +manage the appearance of your dataverse, organize your dataverse +collections
      • +
      +

      Privileged Groups

      +

      Enter group name to allow a group access to the dataverse. Groups are +created by network administrators.

      +

      Dataverse File Permission Settings

      +

      Choose ‘Yes’ to restrict ALL files in this dataverse. To restrict files +individually, go to the Study Permissions page of the study containing +the file.

      +
      +
      +

      Create User Account

      +

      As a registered user, you can:

      +
        +
      • Add studies to open and wiki dataverses, if available
      • +
      • Contribute to existing studies in wiki dataverses, if available
      • +
      • Add user comments to studies that have this option
      • +
      • Create your own dataverse
      • +
      +

      Network Admin Level

      +

      Navigate to Create User Account from the Options page:

      +

      Network home page > Options page > Permissions tab > Users subtab > Create User link

      +

      To create an account for a new user in your Network:

      +
        +
      1. +
        Complete the account information page.
        +

        Enter values in all required fields. Note: an email address can also be used as a username

        +
        +
        +
      2. +
      3. Click Create Account to save your entries.

        +
      4. +
      5. Go to the Permissions tab on the Options page to give the user +Contributor, Curator or Admin access to your dataverse.

        +
      6. +
      +

      Dataverse Admin Level

      +

      Navigate to Create User Account from the Options page:

      +

      Dataverse home page > Options page > Permissions tab > Permissions subtab > Create User link

      +

      To create an account for a new user in your Dataverse:

      +
        +
      1. +
        Complete the account information page.
        +

        Enter values in all required fields. Note: an email address can also be used as a username

        +
        +
        +
      2. +
      3. Click Create Account to save your entries.

        +
      4. +
      5. Go to the Permissions tab on the Options page to give the user +Contributor, Curator or Admin access to your dataverse.

        +
      6. +
      +

      New User: Network Homepage

      +

      As a new user, to create an account at the Dataverse Network homepage, select “Create Account” +at the top-right hand side of the page.

      +

      Complete the required information denoted by the red asterisk and save.

      +

      New User: Dataverse Level

      +

      As a new user, to create an account at the Dataverse level, select “Create Account” +at the top-right hand side of the page. Note: For Open Dataverses select “Create Account” in the orange box +on the top right hand side of the page labelled: “OPEN DATAVERSE”.

      +

      Complete the required information denoted by the red asterisk and save.

      +
      +
      +

      Download Tracking Data

      +

      You can view any guestbook responses that have been made in your +dataverse. Beginning with version 3.2 of Dataverse Network, if the +guestbook is not enabled, data will be collected silently based on the +logged-in user or anonymously. The data displayed includes user account +data or the session ID of an anonymous user, the global ID, study title +and file name of the file downloaded, the time of the download, the type +of download and any custom questions that have been answered. The +username/session ID and download type were not collected in the 3.1 +version of Dataverse Network. A comma separated values file of all +download tracking data may be downloaded by clicking the Export Results +button.

      +

      Navigate to the Download Tracking Data from the Options page:

      +

      Dataverse home page > Options page > Permissions tab > Download Tracking Data subtab

      +
      +
      +

      Edit File Download Guestbook

      +

      You can set up a guestbook for your dataverse to collect information on +all users before they can download or subset contents from the +dataverse. The guestbook is independent of Terms of Use. Once it has +been enabled it will be shown to any user for the first file a user +downloads from a given study within a single session. If the user +downloads additional files from the study in the same session a record +will be created in the guestbook response table using data previously +entered. Beginning with version 3.2 of Dataverse Network, if the +dataverse guestbook is not enabled in your dataverse, download +information will be collected silently based on logged-in user +information or session ID.

      +

      Navigate to the File Download Guestbook from the Options page:

      +

      Dataverse home page > Options page > Permissions tab > Guestbook subtab

      +

      To set up a Guestbook for downloading or subsetting contents from any study in the dataverse:

      +
        +
      1. Click the Enable File Download Guestbook check box.
      2. +
      3. Select or unselect required for any of the user account identifying +data points (First and last name, E-Mail address, etc.)
      4. +
      5. Add any custom questions to collect additional data. These questions +may be marked as required and set up as free text responses or +multiple choice. For multiple choice responses select Radio Buttons +as the Custom Field Type and enter the possible answers.
      6. +
      7. Any custom question may be removed at any time, so that it won’t show +for the end user. If there are any responses associated with question +that has been removed they will continue to appear in the Guestbook +Response data table.
      8. +
      +
      +
      +

      OpenScholar

      +

      Embed your Dataverse easily on an OpenScholar site

      +

      Dataverse integrates seamlessly with +OpenScholar, a self-service site builder for higher education.

      +

      To embed your dataverse on an OpenScholar site:

      +
        +
      1. On your Dataverse Options page, Go to the Setting tab
      2. +
      3. Go to the Customization subtab
      4. +
      5. Click the checkbox that disables customization for your dataverse
      6. +
      7. Make note of your Dataverse alias URL (i.e. +http://thedata.harvard.edu/dvn/dv/myvalue)
      8. +
      9. Follow the OpenScholar Support Center +instructions to +enable the Dataverse App
      10. +
      +
      +
      +

      Enabling LOCKSS access to the Dataverse

      +

      Summary:

      +

      LOCKSS Project or Lots +of Copies Keeps Stuff Safe is an international initiative based at +Stanford University Libraries that provides a way to inexpensively +collect and preserve copies of authorized e-content. It does so using an +open source, peer-to-peer, decentralized server infrastructure. In order +to make a LOCKSS server crawl, collect and preserve content from a DVN, +both the server (the LOCKSS daemon) and the client (the DVN) sides must +be properly configured. In simple terms, the LOCKSS server needs to be +pointed at the DVN, given its location and instructions on what to +crawl, the entire network, or a particular Dataverse; on the DVN side, +access to the data must be authorized for the LOCKSS daemon. The section +below describes the configuration tasks that the administrator of a +Dataverse will need to do on the client side. It does not describe how +LOCKSS works and what it does in general; it’s a fairly complex system, +so please refer to the documentation on the LOCKSS +Project site for more +information. Some information intended to a LOCKSS server administrator +is available in the “Using LOCKSS with DVN” of the DVN Installers Guide +(our primary sysadmin-level manual).

      +

      Configuration Tasks:

      +

      In order for a LOCKSS server to access, crawl and preserve any data on a +given Dataverse Network, it needs to be granted an authorization by the +network administrator. (In other words, an owner of a dataverse cannot +authorize LOCKSS access to its files, unless LOCKSS access is configured +on the Dataverse Network level). By default, LOCKSS crawling of the +Dataverse Network is not allowed; check with the administrator of +your Dataverse Network for details.

      +

      But if enabled on the Dataverse Network level, the dataverse owner can +further restrict LOCKSS access. For example, if on the network level all +LOCKSS servers are allowed to crawl all publicly available data, the +owner can limit access to the materials published in his or her +dataverse to select servers only; specified by network address or +domain.

      +

      In order to configure LOCKSS access, navigate to the Advanced tab on the +Options page:

      +

      Dataverse home page > Options page > Settings tab > Advanced subtab

      +

      It’s important to understand that when a LOCKSS daemon is authorized to +“crawl restricted files”, this does not by itself grant the actual +access to the materials! This setting only specifies that the daemon +should not be skipping such restricted materials outright. If it is +indeed desired to have non-public materials collected and preserved by +LOCKSS, in addition to selecting this option, it will be the +responsibility of the DV Administrator to give the LOCKSS daemon +permission to actually access the files. As of DVN version 3.3, this can +only be done based on the IP address of the LOCKSS server (by creating +an IP-based user group with the appropriate permissions).

      +

      Once LOCKSS crawling of the Dataverse is enabled, the Manifest page +URL will be

      +

      http://<YOUR SERVER>/dvn/dv/<DV ALIAS>/faces/ManifestPage.xhtml.

      +
      +
      +
      +

      Study and Data Administration

      +

      Study Options are available for Contributors, Curators, and +Administrators of a Dataverse.

      +
      +

      Create New Study

      +

      Brief instructions for creating a study:

      +

      Navigate to the dataverse in which you want to create a study, then +click Options->Create New Study.

      +

      Enter at minimum a study title and click Save. Your draft study is now +created. Add additional cataloging information and upload files as +needed. Release the study when ready to make it viewable by others.

      +

      Data Citation widget

      +

      At the top of the edit study form, there is a data citation widget that +allows a user to quickly enter fields that appear in the data citation, +ie. title, author, date, distributor Otherwise, the information can be +entered as the fields appear in the data entry form.

      +

      See the information below for more details and recommendations for +creating a study.

      +

      Steps to Create a Study

      +
        +
      1. Enter Cataloging Information, including an abstract of the study. +Set Terms of Use for the study in the Cataloging fields, if you choose.
      2. +
      3. Upload files associated with the study.
      4. +
      5. Set permissions to access the study, all of the study files, or some +of the study files.
      6. +
      7. Delete your study if you choose, before you submit it for review.
      8. +
      9. Submit your study for review, to make it available to the public.
      10. +
      +

      There are several guidelines to creating a study:

      +
        +
      • You must create a study by performing steps in the specified order.
      • +
      • If multiple users edit a study at one time, the first user to click +Save assumes control of the file. Only that user’s changes are +effective.
      • +
      • When you save the study, any changes that you make after that do not +effect the study’s citation.
      • +
      +

      Enter Cataloging Information

      +

      To enter the Cataloging Information for a new study:

      +
        +
      1. Prepopulate Cataloging Information fields based on a study template +(if a template is available), use the Select Study Template pull-down +list to select the appropriate template.

        +

        A template provides default values for basic fields in the +Cataloging Information fields. The default template prepopulates the +Deposit Date field only.

        +
      2. +
      3. Enter a title in the Title field.

        +
      4. +
      5. Enter data in the remaining Cataloging Information fields. +To list all fields, including the Terms of Use fields, click the Show +All Fields button after you enter a title. Use the following +guidelines to complete these fields:

        +
          +
        • A light blue background in any form field indicates that HTML, +JavaScript, and style tags are permitted. You cannot use the +html and body element types.
        • +
        • To use the inline help and view information about a field, roll +your cursor over the field title.
        • +
        • Be sure to complete the Abstract field.
        • +
        • To set Terms of Use for your study, scroll to the bottom of the Cataloging Information tab. +Eight fields appear under the Terms of Use label. You must +complete at least one of these fields to enable Terms for this +study.
        • +
        +
      6. +
      7. Click the Save button and then add comments or a brief description +in the Study Version Notes popup. Then click the Continue button +and your study draft version is saved.

        +
      8. +
      +

      Upload Study Files

      +

      To upload files associated with a new study:

      +
        +
      1. For each file that you choose to upload to your study, first select +the Data Type from the drop-down list. Then click the Browse button +to select the file, and then click Upload to add each file at a time.

        +

        When selecting a CSV (character-separated values) data type, an SPSS Control Card file is first required.

        +

        When selecting a TAB (tab-delimited) data type, a DDI Control Card file is first required. There is no restriction to the number or types of files that you can upload to the Dataverse Network.

        +

        There is a maximum file size of 2 gigabytes for each file that you upload.

        +
      2. +
      3. After you upload one file, enter the type of file in the Category +field and then click Save. +If you do not enter a category and click Save, the Category +drop-down list does not have any value. You can create any category +to add to this list.

        +
      4. +
      5. For each file that you upload, first click the check box in front of +the file’s entry in the list, and then use the Category drop-down +list to select the type of file that you uploaded.

        +

        Every checked file is assigned the category that you select. Be sure +to click the checked box to remove the check before you select a new +value in the Category list for another file.

        +
      6. +
      7. In the Description field, enter a brief message that identifies the +contents of your file.

        +
      8. +
      9. Click Save when you are finished uploading files. Note: If you upload a subsettable file, that process takes a few +moments to complete. During the upload, the study is not available for editing. When you receive e-mail notification that the +subsettable file upload is complete, click Refresh to continue editing the study.

        +

        You see the Documentation, Data and Analysis tab of the study page +with a list of the uploaded files. For each subsettable tabular +data set file that you upload, the number of cases and variables and +a link to the Data Citation information for that data set are +displayed. If you uploaded an SPSS (.sav or .por) file, the +Type for that file is changed to Tab delimited and the file +extension is changed to .tab when you click Save.

        +

        For each subsettable network data set file that you upload, the number of edges and verticies and a link to the Data Citation +information for that data set are displayed.

        +
      10. +
      11. Continue to the next step and set file permissions for the study or +its files.

        +
      12. +
      +

      Study File Tips

      +

      Keep in mind these tips when uploading study files to your dataverse:

      +
        +
      • The following subsettable file types are supported: +
      • +
      • A custom ingest for FITS Astronomical data files has been added in v.3.4. (see FITS File format Ingest in the Appendix)
      • +
      • You can add information for each file, including:
          +
        • File name
        • +
        • Category (documentation or data)
        • +
        • Description
        • +
        +
      • +
      • If you upload the wrong file, click the Remove link before you click +Save. +To replace a file after you upload it and save the study, first +remove the file and then upload a new one.
      • +
      • If you upload a STATA (.dta), SPSS (.sav or .por), or +network (.xml) file, the file automatically becomes subsettable +(that is, subset and analysis tools are available for that file in +the Network). In this case, processing the file might take some time +and you will not see the file listed immediately after you click +Save.
      • +
      • When you upload a subsettable data file, you are prompted to +provide or confirm your e-mail address for notifications. One e-mail +lets you know that the file upload is in progress; a second e-mail +notifies you when the file upload is complete.
      • +
      • While the upload of the files takes place, your study is not +available for editing. When you receive e-mail notification that the +upload is completed, click Refresh to continue editing the study.
      • +
      +

      Set Study and File Permissions

      +

      You can restrict access to a study, all of its files, or some of its +files. This restriction extends to the search and browse functions.

      +

      To permit or restrict access:

      +
        +
      1. On the study page, click the Permissions link.

        +
      2. +
      3. To set permissions for the study:

        +
          +
        1. Scroll to the Entire Study Permission Settings panel, and click +the drop-down list to change the study to Restricted or Public.
        2. +
        3. In the User Restricted Study Settings field, enter a user or +group to whom you choose to grant access to the study, then click +Add.
        4. +
        +

        To enable a request for access to restricted files in the study, +scroll to the File Permission Settings panel, and click the +Restricted File Settings check box. This supplies a request link on +the Data, Documentation and Analysis tab for users to request access +to restricted files by creating an account.

        +

        To set permission for individual files in the study:

        +
          +
        1. Scroll to the Individual File Permission Settings panel, and enter +a user or group in the Restricted File User Access Username +field to grant permissions to one or more individual files.
        2. +
        3. Use the File Permission pull-down list and select the permission +level that you choose to apply to selected files: Restricted or +Public.
        4. +
        5. In the list of files, click the check box for each file to which +you choose to apply permissions. +To select all files, click the check box at the top of the list.
        6. +
        7. Click Update. +The users or groups to which you granted access privileges appear +in the File Permissions list after the selected files.
        8. +
        +
      4. +
      +

      Note: You can edit or delete your study if you choose, but only until +you submit the study for reveiw. After you submit your study for review, +you cannot edit or delete it from the dataverse.

      +

      Delete Studies

      +

      You can delete a study that you contribute, but only until you submit +that study for review. After you submit your study for review, you +cannot delete it from the dataverse.

      +

      If a study is no longer valid, it can now be deaccessioned so it’s +unavailable to users but still has a working citation. A reference to a +new study can be provided when deaccessioning a study. Only Network +Administrators can now permanently delete a study once it has been +released.

      +

      To delete a draft version:

      +
        +
      1. Click the Delete Draft Version link in the top-right area of the +study page.

        +

        You see the Delete Draft Study Version popup.

        +
      2. +
      3. Click the Delete button to remove the draft study version from the +dataverse.

        +
      4. +
      +

      To deaccession a study:

      +
        +
      1. +
        Click the Deaccession link in the top-right area of the study page.
        +

        You see the Deaccession Study page.

        +
        +
        +
      2. +
      3. You have the option to add your comments about why the study was +deaccessioned, and a link reference to a new study by including the +Global ID of the study.

        +
      4. +
      5. Click the Deaccession button to remove your study from the +dataverse.

        +
      6. +
      +

      Submit Study for Review

      +

      When you finish setting options for your study, click Submit For +Review in the top-right corner of the study page. The page study +version changes to show In Review.

      +

      You receive e-mail after you click Submit For Review, notifying you +that your study was submitted for review by the Curator or Dataverse +Admin. When a study is in review, it is not available to the public. You +receive another e-mail notifying you when your study is released for +public use.

      +

      After your study is reviewed and released, it is made available to the +public, and it is included in the search and browse functions. The +Cataloging Information tab for your study contains the Citation +Information for the complete study. The Documentation, Data and Analysis +tab lists the files associated with the study. For each subsettable file +in the study, a link is available to show the Data Citation for that +specific data set.

      +

      UNF Calculation

      +

      When a study is created, a UNF is calculated for each subsettable file +uploaded to that study. All subsettable file UNFs then are combined to +create another UNF for the study. If you edit a study and upload new +subsettable files, a new UNF is calculated for the new files and for the +study.

      +

      If the original study was created before version 2.0 of the Dataverse +Network software, the UNF calculations were performed using version 3 of +that standard. If you upload new subsettable files to an existing study +after implementation of version 2.0 of the software, the UNFs are +recalculated for all subsettable files and for the study using version 5 +of that standard. This prevents incompatibility of UNF version numbers +within a study.

      +
      +
      +

      Manage Studies

      +

      You can find all studies that you uploaded to the dataverse, or that +were submitted by a Contributor for review. Giving you access to view, +edit, release, or delete studies.

      +

      View, Edit, and Delete/Deaccession Studies

      +

      To view and edit studies that you uploaded:

      +
        +
      1. Click a study Global ID, title, or Edit link to go to the study +page.
      2. +
      3. From the study page, do any of the following:
          +
        • Edit Cataloging Information
        • +
        • Edit/Delete File + Information
        • +
        • Add File(s)
        • +
        • Edit Study Version Notes
        • +
        • Permissions
        • +
        • Create Study Template
        • +
        • Release
        • +
        • Deaccession
        • +
        • Destroy Study
        • +
        +
      4. +
      +

      To delete or deaccession studies that you uploaded:

      +
        +
      1. If the study has not been released, click the Delete link to open +the Delete Draft Study Version popup.
      2. +
      3. If the study has been released, click the Deaccession link to open +the Deaccession Study page.
      4. +
      5. Add your comments about why the study was deaccessioned, and a +reference link to another study by including the Global ID, then +click the Deaccession button.
      6. +
      +

      Release Studies

      +

      When you release a study, you make it available to the public. Users can +browse it or search for it from the dataverse or Network homepage.

      +

      You receive e-mail notification when a Contributor submits a study for +review. You must review each study submitted to you and release that +study to the public. You receive a second e-mail notification after you +release a study.

      +

      To release a study draft version:

      +
        +
      1. Review the study draft version by clicking the Global ID, or title, +to go to the Study Page, then click Release in the upper right +corner. For a quick release, click Release from the Manage Studies +page.
      2. +
      3. If the study draft version is an edit of an existing study, you will +see the Study Version Differences page. The table allows you to view +the changes compared to the current public version of the study. +Click the Release button to continue.
      4. +
      5. Add comments or a brief description in the Study Version Notes popup. +Then click the Continue button and your study is now public.
      6. +
      +
      +
      +

      Manage Study Templates

      +

      You can set up study templates for a dataverse to prepopulate any of +the Cataloging Information fields of a new study with default values. +When a user adds a new study, that user can select a template to fill in +the defaults.

      +

      Create Template

      +

      Study templates help to reduce the work needed to add a study, and to +apply consistency to studies within a dataverse. For example, you can +create a template to include the Distributor and Contact details so that +every study has the same values for that metadata.

      +

      To create a new study template:

      +
        +
      1. Click Clone on any Template.
      2. +
      3. You see the Study Template page.
      4. +
      5. In the Template Name field, enter a descriptive name for this +template.
      6. +
      7. Enter generic information in any of the Cataloging Information +metadata fields.  You may also change the input level of any field to +make a certain field required, recommended, optional or hidden. + Hidden fields will not be visible to the user creating studies from +the template.
      8. +
      9. After you complete entry of generic details in the fields that you +choose to prepopulate for new studies, click Save to create the +template.
      10. +
      +

      Note: You also can create a template directly from the study page to +use that study’s Cataloging Information in the template.

      +

      Enable a template

      +

      Click the Enabled link for the given template. Enabled templates are +available to end users for creating studies.

      +

      Edit Template

      +

      To edit an existing study template:

      +
        +
      1. In the list of templates, click the Edit link for the template that +you choose to edit.
      2. +
      3. You see the Study Template page, with the template setup that you +selected.
      4. +
      5. Edit the template fields that you choose to change, add, or remove.
      6. +
      +

      Note: You cannot edit any Network Level Template.

      +

      Make a Template the Default

      +

      To set any study template as the default template that applies +automatically to new studies: +In the list of templates, click the Make Default link next to the name +of the template that you choose to set as the default. +| The Current Default Template label is displayed next to the name of +the template that you set as the default.

      +
      +
      Remove Template
      +
      To delete a study template from a dataverse:
      +
      +
        +
      1. In the list of templates, click the Delete link for the template that +you choose to remove from the dataverse.
      2. +
      3. You see the Delete Template page.
      4. +
      5. Click Delete to remove the template from the dataverse.
      6. +
      +

      Note:  You cannot delete any network template, default template or +template in use by any study.

      +
      +
      +

      Data Uploads

      +

      Troubleshooting Data Uploads:

      +

      Though the add files page works for the majority of our users, there can +be situations where uploading files does not work. Below are some +troubleshooting tips, including situations where uploading a file might +fail and things to try.

      +

      Situations where uploading a file might fail:

      +
        +
      1. File is too large, larger than the maximum size, should fail immediately with an error.
      2. +
      3. File takes too long and connection times out (currently this seems to happen after 5 mins) Failure behavior is vague, depends +on browser. This is probably an IceFaces issue.
      4. +
      5. User is going through a web proxy or firewall that is not passing through partial submit headers. There is specific failure +behavior here that can be checked and it would also affect other web site functionality such as create account link. See +redmine ticket #2352.
      6. +
      7. AddFilesPage times out, user begins adding files and just sits there idle for a long while until the page times out, should +see the red circle slash.
      8. +
      9. For subsettable files, there is something wrong with the file +itself and so is not ingested. In these cases they should upload as other and we can test here.
      10. +
      11. For subsettable files, there is something wrong with our ingest code that can’t process something about that particular file, +format, version.
      12. +
      13. There is a browser specific issue that is either a bug in our +software that hasn’t been discovered or it is something unique to their browser such as security settings or a conflict with a +browser plugin like developer tools. Trying a different browser such as Firefox or Chrome would be a good step.
      14. +
      15. There is a computer or network specific issue that we can’t determine such as a firewall, proxy, NAT, upload versus download +speed, etc. Trying a different computer at a different location might be a good step.
      16. +
      17. They are uploading a really large subsettable file or many files and it is taking a really long time to upload.
      18. +
      19. There is something wrong with our server such as it not responding.
      20. +
      21. Using IE 8, if you add 2 text or pdf files in a row it won’t upload but if you add singly or also add a subsettable file they +all work. Known issue, reported previously, #2367
      22. +
      +

      So, general information that would be good to get and things to try would be:

      +
        +
      1. Have you ever been able to upload a file?
      2. +
      3. Does a small text file work?
      4. +
      5. Which browser and operating system are you using? Can you try Firefox or Chrome?
      6. +
      7. Does the problem affect some files or all files? If some files, do they work one at a time? Are they all the same type such as +Stata or SPSS? Which version? Can they be saved as a supported version, e.g. Stata 12 or SPSS 20? Upload them as type “other” +and we’ll test here.
      8. +
      9. Can you try a different computer at a different location?
      10. +
      11. Last, we’ll try uploading it for you (may need DropBox to facilitate upload).
      12. +
      +
      +
      +

      Manage Collections

      +

      Collections can contain studies from your own dataverse or another, +public dataverse in the Network.

      +

      Create Collection

      +

      You can create new collections in your dataverse, but any new collection +is a child of the root collection except for Collection Links. When you +create a child in the root collection, you also can create a child +within that child to make a nested organization of collections. The root +collection remains the top-level parent to all collections that are not +linked from another dataverse.

      +

      There are three ways in which you can create a collection:

      +
        +
      • Static collection - You assign specific studies to this type of +collection.
      • +
      • Dynamic collection - You can create a query that gathers studies into +a collection based on matching criteria, and keep the contents +current. If a study matches the query selection criteria one week, +then is changed and no longer matches the criteria, that study is +only a member of the collection as long as it’s criteria matches the +query.
      • +
      • Linked collection - You can link an existing collection from another +dataverse to your dataverse homepage. Note that the contents of that +collection can be edited only in the originating dataverse.
      • +
      +

      Create Static Collection by Assigning Studies

      +

      To create a collection by assigning studies directly to it:

      +
        +
      1. Locate the root collection to create a direct subcollection in the +root, or locate any other existing collection in which you choose +create a new collection. Then, click the Create link in the Create +Child field for that collection.

        +

        You see the Study Collection page.

        +
      2. +
      3. In the Type field, click the Static option.

        +
      4. +
      5. Enter your collection Name.

        +
      6. +
      7. Select the Parent in which you choose to create the collection. +The default is the collection in which you started on the Manage +Collections page. You cannot create a collection in another +dataverse unless you have permission to do so.

        +
      8. +
      9. Populate the Selected Studies box:

        +
          +
        • Click the Browse link to use the Dataverse and Collection +pull-down lists to create a list of studies.
        • +
        • Click the Search link to select a query field and search for +specific studies, enter a term to search for in that query field, +and then click Search.
        • +
        +

        A list of available studies is displayed in the Studies to Choose +from box.

        +
      10. +
      11. In the Studies to Choose from box, click a study to assign it to your +collection.

        +

        You see the study you clicked in the Selected Studies box.

        +
      12. +
      13. To remove studies from the list of Selected Studies, click the study +in that box.

        +

        The study is remove from the Selected Studies box.

        +
      14. +
      15. If needed, repopulate the Studies to Choose from box with new +studies, and add additional studies to the Studies Selected list.

        +
      16. +
      +

      Create Linked Collection

      +

      You can create a collection as a link to one or more collections from +other dataverses, thereby defining your own collections for users to +browse in your dataverse.

      +

      Note: A collection created as a link to a collection from another +dataverse is editable only in the originating dataverse. Also, +collections created by use of this option might not adhere to the +policies for adding Cataloging Information and study files that you +require in your own dataverse.

      +

      To create a collection as a link to another collection:

      +
        +
      1. In the Linked Collections field, click Add Collection Link.

        +

        You see the Add Collection Link window.

        +
      2. +
      3. Use the Dataverse pull-down list to select the dataverse from which +you choose to link a collection.

        +
      4. +
      5. Use the Collection pull-down list to select a collection from your +selected dataverse to add a link to that collection in your +dataverse.

        +

        The collection you select will be displayed in your dataverse +homepage, and will be included in your dataverse searches.

        +
      6. +
      +

      Create Dynamic Collection as a Query

      +

      When you create a collection by assigning the results of a query to it, +that collection is dynamic and is updated regularly based on the query +results.

      +

      To create a collection by assigning the results of a query:

      +
        +
      1. Locate the root collection to create a direct subcollection in the +root, or locate any other existing collection in which you choose +create a new collection. Then, click the Create link in the Create +Child field for that collection.

        +

        You see the Study Collection page.

        +
      2. +
      3. In the Type field, click the Dynamic option.

        +
      4. +
      5. Enter your collection Name.

        +
      6. +
      7. Select the Parent in which you choose to create the collection.

        +

        The default is the collection in which you started on the Manage Collections page. You cannot create a collection in another +dataverse unless you have permission to do so.

        +
      8. +
      9. Enter a Description of this collection.

        +
      10. +
      11. In the Enter query field, enter the study field terms for which to +search to assign studies with those terms to this collection. +Use the following guidelines:

        +
          +
        • Almost all study fields can be used to build a collection query.

          +

          The study fields must be entered in the appropriate format to +search the fields’ contents.

          +
        • +
        • Use the following format for your query: +title:Elections AND keywordValue:world.

          +

          For more information on query syntax, refer to the +Documentation page at +the Lucene website and look for Query Syntax. See the +cataloging fields +document for field query names.

          +
        • +
        • For each study in a dataverse, the Study Global Id field in the +Cataloging Information consists of three query terms: +protocol, authority, and globalID.

          +

          If you build a query using protocol, your collection can +return any study that uses the protocol you specified.

          +

          If you build a query using all three terms, you collection +returns only one study.

          +
        • +
        +
      12. +
      13. To limit this collection to search for results in your own dataverse, +click the Only your dataverse check box.

        +
      14. +
      +

      Edit Collections

      +
        +
      1. Click a collection title to edit the contents or setup of that +collection.

        +

        You see the Collection page, with the current collection settings +applied.

        +
      2. +
      3. Change, add, or delete any settings that you choose, and then click +Save Collection to save your edits.

        +
      4. +
      +

      Delete Collections or Remove Links

      +

      To delete existing static or dynamic collections:

      +
        +
      1. For the collection that you choose to delete, click the Delete link.
      2. +
      3. Confirm the delete action to remove the collection from your +dataverse.
      4. +
      +

      To remove existing linked collections:

      +
        +
      1. For the linked collection that you choose to remove, click the +Remove link. (Note: There is no confirmation for a Remove action. +When you click the Remove link, the Dataverse Network removes the linked collection immediately.)
      2. +
      +
      +
      +

      Managing User File Access

      +

      User file access is managed through a set of access permissions that +together determines whether or not a user can access a particular file, +study, or dataverse. Generally speaking, there are three places where +access permissions can be configured: at the dataverse level, at the +study level, and at the file level. Think of each of these as a security +perimeter or lock with dataverse being the outer most perimeter, study +the next, and finally the file level. When configuring user file access, +it might be helpful to approach this from the dataverse access level +first and so on.

      +

      For example, a user would like access to a particular file. Since files +belong to studies and studies belong to dataverses, first determine +whether the user has access to the dataverse. If the dataverse is +released, all users have access to it. If it is unreleased, the user +must appear in the User Permissions section on the dataverse permissions +page.

      +

      Next, they would need access to the study. If the study is public, then +everyone has access. If it is restricted, the user must appear in the +User Restricted Study Settings section on the study permissions page.

      +

      Last, they would need access to the file. If the file is public, +everyone has access. If the file is restricted, then the user must be +granted access.

      +

      There are two ways a file can be restricted.

      +

      First, on the dataverse permissions page, all files in the dataverse +could be restricted using Restrict ALL files in this Dataverse. To +enable user access in this case, add the username to the Restricted File +User Access section on this page.

      +

      Second, individual files can be restricted at the study level on the +study permissions page in the “Files” subtab. These can be restricted on a file-by-file basis. +If this is the case, the file(s) will be displayed +as restricted in the Individual File Permission Settings section. To +enable user access to a particular file in this case, check the file to +grant access to, type the username in the Restricted File User Access +section, click update so their name appears next to the file, then click +save.

      +

      Another option at the study level when restricting files is to allow users the ability to +request access to restricted files. This can be done in the study Permissions page in the “Files” subtab where +you must first select the files you want to restrict, click on “update permissions” to restrict, and then under +“File Permission Settings” check off the box to “Allow users to request access...” and click on Save at the bottom +of the page. The contact(s) set for the Dataverse (Dataverse Options > Settings > General) will get an email +notification each time a user sends a request. The request access email will displays a list of the file(s) +requested and a DOI or Handle for the study. To approve or deny access to these file(s) go back to the study +permissions page under the “Files” subtab and Approve or Deny the specific files that were requested. If you +choose to deny any files you will have the option to add a reason why. Be sure to remember to click on the “update” +button and then select Save so that your selections are saved and an email is sent to the requestor granting or +denying them access. The email then sent to the requestor will list out which files were approved with a DOI or +Handle URL, and any files which were denied along with any reasons that may have been provided.

      +

      Finally, a somewhat unusual configuration could exist where both +Restrict all files in a dataverse is set and an individual file is +restricted. In this case access would need to be granted in both places +-think of it as two locks. This last situation is an artifact of +integrating these two features and will be simplified in a future +release.

      +
      +
      +
      +

      Network Administration

      +

      The Dataverse Network provides several options for configuring and +customizing your application. To access these options, login to the +Dataverse Network application with an account that has Network +Administrator privileges. By default, a brand new installation of the +application will include an account of this type - the username and +password is ‘networkAdmin’.

      +

      After you login, the Dataverse Network home page links to the Options +page from the “Options” gear icon, in the menu bar. Click on the icon to +view all the options available for customizing and configuring the +applications, as well as some network adminstrator utilities.

      +

      The following tasks can be performed from the Options page:

      +
        +
      • Manage dataverses, harvesting, exporting, and OAI sets - Create, +edit, and manage standard and harvesting dataverses, manage +harvesting schedules, set study export schedules, and manage OAI +harvesting sets.
      • +
      • Manage subnetworks - Create, edit, and manage subnetworks, manage network and subnetwork level study templates.
      • +
      • Customize the Network pages and description - Brand your Network and +set up your Network e-mail contact.
      • +
      • Set and edit Terms of Use - Apply Terms of Use at the Network level +for accounts, uploads, and downloads.
      • +
      • Create and manage user accounts and groups and Network privileges, +and enable option to create a dataverse - Manage logins, permissions, +and affiliate access to the Network.
      • +
      • Use utilities and view software information - Use the administrative +utilities and track the current Network installation.
      • +
      +
      +

      Dataverses Section

      +
      +

      Create a New Dataverse

      +

      A dataverse is a container for studies and is the home for an individual +scholar’s or organization’s data.

      +

      Creating a dataverse is easy but first you must be a registered user. +Depending on site policy, there may be a link on the Network home page, +entitled “Create a Dataverse”. This first walks you through creating an +account, then a dataverse. If this is not the case on your site, log in, +then navigate to the Create a New Dataverse page and complete the +required information. That’s it!

      +
        +
      1. +
        Navigate to the Create a New Dataverse page:
        +

        Network home page > Options page >Dataverses tab > Dataverse subtab > “Create Dataverse” link.

        +
        +
        +
      2. +
      3. Fill in the required information:

        +
        +

        Type of Dataverse

        +

        Choose Scholar if it represents an individual’s work otherwise choose Basic.

        +

        Dataverse Name

        +

        This will be displayed on the network and dataverse home +pages. If this is a Scholar dataverse it will automatically be +filled in with the scholar’s first and last name.

        +

        Dataverse Alias

        +

        This is an abbreviation, usually lower-case, that becomes part of the URL for the new dataverse.

        +
        +
      4. +
      5. Click Save and you’re done!

        +

        An email will be sent to you with more information, including +the url to access you new dataverse.

        +
      6. +
      +

      Required information can vary depending on site policy. Required fields are noted with a red asterisk.

      +

      Note: If “Allow users to create a new Dataverse when they create an account” is enabled, there is a Create a Dataverse link on the Network home page.

      +
      +
      +

      Manage Dataverses

      +

      As dataverses increase in number it’s useful to view summary information +in table form and quickly locate a dataverse of interest. The Manage +Dataverse table does just that.

      +

      Navigate to Network home page > Options page > Dataverses tab > +Dataverses subtab > Manage Dataverse table:

      +
        +
      • Dataverses are listed in order of most recently created.
      • +
      • Clicking on a column name sorts the list by that column such as Name +or Affiliation.
      • +
      • Clicking on a letter in the alpha selector displays only those +dataverses beginning with that letter.
      • +
      • Move through the list of dataverses by clicking a page number or the +forward and back buttons.
      • +
      • Click Delete to remove a dataverse.
      • +
      +
      +
      +
      +

      Subnetwork Section

      +

      A subnetwork is a container for a group of dataverses. Users will be able to create their dataverses in a particular subnetwork. It may include its own branding and its own custom study templates.

      +
      +

      Create a New Subnetwork

      +

      You must be a network admin in order to create a subnetwork. These are the steps to create a subnetwork:

      +
        +
      1. +
        Navigate to Create a New Subnetwork Page:
        +

        Network home page > Options page > Subnetworks tab> Create Subnetwork Link

        +
        +
        +
      2. +
      3. Fill in required information:

        +
        +

        Subnetwork Name

        +

        The name to be displayed in the menubar. Please use a short name.

        +

        Subnetwork Alias

        +

        Short name used to build the URL for this Subnetwork. It is case sensitive.

        +

        Subnetwork Short Description

        +

        This short description is displayed on the Network Home page

        +
        +
      4. +
      5. +
        Fill in Optional Branding
        +

        These fields include a logo file, Subnetwork affiliation, description, and custom banner and footer.

        +
        +
        +
      6. +
      7. Click Save and you’re done!

        +
      8. +
      +
      +
      +

      Manage Subnetworks

      +

      The Manage Subnetworks page gives summary information about all of the subnetworks in your installation.

      +

      Navigate to Network home page > Options Page > Subnetworks tab:

      +
        +
      • Subnetworks are listed alphabetically
      • +
      • Clicking on a column name sorts the list by that column
      • +
      • Click Edit to edit the subnetwork’s information or branding
      • +
      • Click Delete to remove a subnetwork. Note: this will not remove the dataverses assigned to the subnetwork. The dataverses will remain and may be reassigned to another subnetwork.
      • +
      +
      +
      +

      Manage Classifications

      +

      Classifications are a way to organize dataverses on the network home +page so they are more easily located. They appear on the left side of +the page and clicking on a classification causes corresponding +dataverses to be displayed. An example classification might be +Organization, Government.

      +

      Classifications typically form a hierarchy defined by the network +administrator to be what makes sense for a particular site. A top level +classification could be Organization, the next level Association, +Business, Government, and School.

      +

      The classification structure is first created on the Options page, from +the Manage Classifications table. Once a classification is created, +dataverses can be assigned to it either when the dataverse is first +created or later from the Options page: Network home page > (Your) +Dataverse home page > Options page > Settings tab > General subtab.

      +

      To manage classifications, navigate to the Manage Classifications table:

      +

      Network home page > Options page > Classifications tab > Manage +Classifications table

      +

      From here you can view the current classification hierarchy, create a +classification, edit an existing classification including changing its +place in the hierarchy, and delete a classification.

      +
      +
      +

      Manage Study Comments Notifications

      +

      Dataverse admins can enable or disable a User Comment feature within +their dataverses. If this feature is enabled, users are able to add +comments to studies within that dataverse. Part of the User Comment +feature is the ability for users to report comments as abuse if they +deem that comment to be inappropriate in some way.

      +

      Note that it is a best practice to explicitly define terms of use +regarding comments when the User Comments feature is enabled. If you +define those terms at the Network level, then any study to which +comments are added include those terms.

      +

      When a user reports another’s comment as abuse, that comment is listed +on the Manage Study Comment Notifications table on the Options page. For +each comment reported as abuse, you see the study’s Global ID, the +comment reported, the user who posted the comment, and the user who +reported the comment as abuse.

      +

      There are two ways to manage abuse reports: In the Manage Study Comment +Notifications table on the Options page, and on the study page User +Comments tab. In both cases, you have the options to remove the comment +or to ignore the abuse report.

      +

      The Manage Study Comments Notifications table can be found here:

      +

      Network home page > Options page > Dataverses tab > Study Comments +subtab > Manage Study Comment Notifications table

      +
      +
      +

      Manage Controlled Vocabulary

      +

      You can set up controlled vocabulary for a dataverse network to give the +end user a set list of choices to select from for most fields in a study +template. Study fields which do not allow controlled vocabulary include +the study title and subtitle, certain date fields and geographic +boundaries.

      +

      To manage controlled vocabulary, navigate to the Manage Controlled +Vocabulary table:

      +

      Network home page > Options page > Vocabulary tab > Manage Controlled Vocabulary table

      +

      To create a new controlled vocabulary:

      +
        +
      1. Click Create New Controlled Vocabulary.
      2. +
      3. You see the Edit Controlled Vocabulary page.
      4. +
      5. In the Name field, enter a descriptive name for this Controlled +Vocabulary. In the Description field enter any additional information +that will make it easier to identify a particular controlled +vocabulary item to assign to a given custom field. In the Values +field enter the controlled vocabulary values that you want to make +available to users for a study field. Here you can submit an entire list of terms at once. Use the “add” and “remove” buttons +to add or subtract values from the list. You may also copy and paste a list of values separated by carriage returns.
      6. +
      7. After you complete entry of values, click Save to create the +controlled vocabulary.
      8. +
      +

      Edit Controlled Vocabulary

      +

      To edit an existing controlled vocabulary:

      +
        +
      1. In the list of controlled vocabulary, click the Edit link for the +controlled vocabulary that you choose to edit. You see the Edit +Controlled Vocabulary page, with the controlled vocabulary setup that +you selected.
      2. +
      3. Edit the controlled vocabulary items that you choose to change, add, +or remove. You may also copy and paste a list of values separated by carriage returns.
      4. +
      +
      +
      +

      Manage Network Study Templates

      +

      You can set up study templates for a dataverse network to prepopulate +any of the Cataloging Information fields of a new study with default +values. Dataverse administrators may clone a Network template and modify +it for users of that dataverse. You may also change the input level of +any field to make a certain field required, recommended, optional, +hidden or disabled. Hidden fields will not be available to the user, but +will be available to the dataverse administrator for update in cloned +templates. Disabled field will not be available to the dataverse +administrator for update. You may also add your own custom fields. When +a user adds a new study, that user can select a template to fill in the +defaults.

      +

      To manage study templates, navigate to the Manage Study Templates table:

      +

      Network home page > Options page > Templates tab > Manage Study Templates table

      +

      Create Template

      +

      Study templates help to reduce the work needed to add a study, and to +apply consistency to studies across a dataverse network. For example, +you can create a template to include the Distributor and Contact details +so that every study has the same values for that metadata.

      +

      To create a new study template:

      +
        +
      1. Click Create New Network Template.
      2. +
      3. You see the Study Template page.
      4. +
      5. In the Template Name field, enter a descriptive name for this +template.
      6. +
      7. Enter generic information in any of the Cataloging Information +metadata fields. You can also add your own custom fields to the Data +Collection/Methodology section of the template. Each custom field +must be assigned a Name, Description and Field Type. You may also +apply controlled vocabulary to any of the custom fields that are set +to Plain Text Input as Field Type.
      8. +
      9. After you complete entry of generic details in the fields that you +choose to prepopulate for new studies, click Save to create the +template.
      10. +
      +

      Enable a template

      +

      Click the Enabled link for the given template. Enabled templates are +available to database administrators for cloning and end users for +creating studies.

      +

      Edit Template

      +

      To edit an existing study template:

      +
        +
      1. In the list of templates, click the Edit link for the template that +you choose to edit.
      2. +
      3. You see the Study Template page, with the template setup that you +selected.
      4. +
      5. Edit the template fields that you choose to change, add, or remove.
      6. +
      +

      Make a Template the Default

      +

      To set any study template as the default template that applies +automatically to the creation of new network templates:

      +

      In the list of templates, click the Make Default Selection link next to the name +of the template that you choose to set as the default for a subnetwork(s). A pop-up window with the names of the subnetworks will appear and you may select the appropriate subnetworks. The subnetwork name(s) is displayed in the Default column of the template that you set as the +default for each given subnetwork.

      +

      Remove Template

      +

      To delete a study template from a dataverse:

      +
        +
      1. In the list of templates, click the Delete link for the template that +you choose to remove from the network.
      2. +
      3. You see the Delete Template page.
      4. +
      5. Click Delete to remove the template from the network. Note that you +cannot delete any template that is in use or is a default template at +the network or dataverse level.
      6. +
      +
      +
      +
      +

      Harvesting Section

      +
      +

      Create a New Harvesting Dataverse

      +

      A harvesting dataverse allows studies from another site to be imported +so they appear to be local, though data files remain on the remote site. +This makes it possible to access content from data repositories and +other sites with interesting content as long as they support the OAI or +Nesstar protocols.

      +

      Harvesting dataverses differ from ordinary dataverses in that study +content cannot be edited since it is provided by a remote source. Most +dataverse functions still apply including editing the dataverse name, +branding, and setting permissions.

      +

      Aside from providing the usual name, alias, and affiliation information, +Creating a harvesting dataverse involves specifying the harvest +protocol, OAI or Nesstar, the remote server URL, possibly format and set +information, whether or how to register incoming studies, an optional +harvest schedule, and permissions settings.

      +

      To create a harvesting dataverse navigate to the Create a New Harvesting +Dataverse page:

      +

      Network home page > Options page > Harvesting tab > Harvesting Dataverses subtab > "Create Harvesting Dataverse" link

      +

      Complete the form by entering required information and click Save.

      +

      An example dataverse to harvest studies native to the Harvard dataverse:

      +
        +
      • Harvesting Type: OAI Server
      • +
      • Dataverse Name: Test IQSS Harvest
      • +
      • Dataverse Alias: testiqss
      • +
      • Dataverse Affiliation: Our Organization
      • +
      • Server URL: http://dvn.iq.harvard.edu/dvn/OAIHandler
      • +
      • Harvesting Set: No Set (harvest all)
      • +
      • Harvesting Format: DDI
      • +
      • Handle Registration: Do not register harvested studies (studies must already have a handle)
      • +
      +
      +
      +

      Manage Harvesting

      +

      Harvesting is a background process meaning once initiated, either +directly or via a timer, it conducts a transaction with a remote server +and exists without user intervention. Depending on site policy and +considering the update frequency of remote content this could happen +daily, weekly, or on-demand. How does one determine what happened? By +using the Manage Harvesting Dataverses table on the Options page.

      +

      To manage harvesting dataverses, navigate to the Manage Harvesting +Dataverses table:

      +

      Network home page > Options page > Harvesting tab > Harvesting Dataverses subtab > Manage Harvesting Dataverses table

      +

      The Manage Harvesting table displays all harvesting dataverses, their +schedules, and harvest results in table form. The name of each +harvesting dataverse is a link to that harvesting dataverse’s +configuration page. The schedule, if configured, is displayed along with +a button to enable or disable the schedule. The last attempt and result +is displayed along with the last non-zero result. It is possible for the +harvest to check for updates and there are none. A Run Now button +provides on-demand harvesting and a Remove link deletes the harvesting +dataverse.

      +

      Note: the first time a dataverse is harvested the entire catalog is +harvested. This may take some time to complete depending on size. +Subsequent harvests check for additions and changes or updates.

      +

      Harvest failures can be investigated by examining the import and server +logs for the timeframe and dataverse in question.

      +
      +
      +

      Schedule Study Exports

      +

      Sharing studies programmatically or in batch such as by harvesting +requires information about the study or metadata to be exported in a +commonly understood format. As this is a background process requiring no +user intervention, it is common practice to schedule this to capture +updated information.

      +

      Our export process generates DDI, Dublin Core, Marc, and FGDC formats +though DDI and Dublin Core are most commonly used. Be aware that +different formats contain different amounts of information with DDI +being most complete because it is our native format.

      +

      To schedule study exports, navigate to the Harvesting Settings subtab:

      +

      Network home page > Options page > Harvesting tab > Settings subtab > Export Schedule

      +

      First enable export then choose frequency: daily using hour of day or +weekly using day of week. Click Save and you are finished.

      +

      To disable, just choose Disable export and Save.

      +
      +
      +

      Manage OAI Harvesting Sets

      +

      By default, a client harvesting from the Dataverse Network that does not +specify a set would fetch all unrestricted, locally owned +studies - in other words public studies that were not harvested +from elsewhere. For various reasons it might be desirable to define sets +of studies for harvest such as by owner, or to include a set that was +harvested from elsewhere. This is accomplished using the Manage OAI +Harvesting Sets table on the Options page.

      +

      The Manage OAI Harvesting Sets table lists all currently defined OAI +sets, their specifications, and edit, create, and delete functionality.

      +

      To manage OAI harvesting sets, navigate to the Manage OAI Harvesting +Sets table:

      +

      Network home page > Options page > Harvesting tab > OAI Harvesting Sets subtab > Manage OAI Harvesting Sets table

      +

      To create an OAI set, click Create OAI Harvesting Set, complete the +required fields and Save. The essential parameter that defines the set +is the Query Definition. This is a search query using Lucene +syntax +whose results populate the set.

      +

      Once created, a set can later be edited by clicking on its name.

      +

      To delete a set, click the appropriately named Delete Set link.

      +

      To test the query results before creating an OAI set, a recommended +approach is to create a dynamic study +collection using the +proposed query and view the collection contents. Both features use the +same Lucene +syntax +but a study collection provides a convenient way to confirm the results.

      +

      Generally speaking, basic queries take the form of study metadata +field:value. Examples include:

      +
        +
      • globalId:"hdl 1902 1 10684" OR globalId:"hdl 1902 1 11155": Include studies with global ids hdl:1902.1/10684 and +hdl:1902.1/11155
      • +
      • authority:1902.2: Include studies whose authority is 1902.2. Different authorities usually represent different sources such +as IQSS, ICPSR, etc.
      • +
      • dvOwnerId:184: Include all studies belonging to dataverse with database id 184
      • +
      • studyNoteType:"DATAPASS": Include all studies that were tagged with or include the text DATAPASS in their study note field.
      • +
      +

      Study Metadata Search Terms:

      +
      +
      title
      +
      subtitle
      +
      studyId
      +
      otherId
      +
      authorName
      +
      authorAffiliation
      +
      producerName
      +
      productionDate
      +
      fundingAgency
      +
      distributorName
      +
      distributorContact
      +
      distributorContactAffiliation
      +
      distributorContactEmail
      +
      distributionDate
      +
      depositor
      +
      dateOfDeposit
      +
      seriesName
      +
      seriesInformation
      +
      studyVersion
      +
      relatedPublications
      +
      relatedMaterial
      +
      relatedStudy
      +
      otherReferences
      +
      keywordValue
      +
      keywordVocabulary
      +
      topicClassValue
      +
      topicClassVocabulary
      +
      abstractText
      +
      abstractDate
      +
      timePeriodCoveredStart
      +
      timePeriodCoveredEnd
      +
      dateOfCollection
      +
      dateOfCollectionEnd
      +
      country
      +
      geographicCoverage
      +
      geographicUnit
      +
      unitOfAnalysis
      +
      universe
      +
      kindOfData
      +
      timeMethod
      +
      dataCollector
      +
      frequencyOfDataCollection
      +
      samplingProcedure
      +
      deviationsFromSampleDesign
      +
      collectionMode
      +
      researchInstrument
      +
      dataSources
      +
      originOfSources
      +
      characteristicOfSources
      +
      accessToSources
      +
      dataCollectionSituation
      +
      actionsToMinimizeLoss
      +
      controlOperations
      +
      weighting
      +
      cleaningOperations
      +
      studyLevelErrorNotes
      +
      responseRate
      +
      samplingErrorEstimate
      +
      otherDataAppraisal
      +
      placeOfAccess
      +
      originalArchive
      +
      availabilityStatus
      +
      collectionSize
      +
      studyCompletion
      +
      confidentialityDeclaration
      +
      specialPermissions
      +
      restrictions
      +
      contact
      +
      citationRequirements
      +
      depositorRequirements
      +
      conditions
      +
      disclaimer
      +
      studyNoteType
      +
      studyNoteSubject
      +
      studyNoteText
      +
      +
      +
      +

      Edit LOCKSS Harvest Settings

      +

      Summary:

      +

      LOCKSS Project or Lots +of Copies Keeps Stuff Safe is an international initiative based at +Stanford University Libraries that provides a way to inexpensively +collect and preserve copies of authorized e-content. It does so using an +open source, peer-to-peer, decentralized server infrastructure. In order +to make a LOCKSS server crawl, collect and preserve content from a Dataverse Network, +both the server (the LOCKSS daemon) and the client (the Dataverse Network) sides must +be properly configured. In simple terms, the LOCKSS server needs to be +pointed at the Dataverse Network, given its location and instructions on what to +crawl; the Dataverse Network needs to be configured to allow the LOCKSS daemon to +access the data. The section below describes the configuration tasks +that the Dataverse Network administrator will need to do on the client side. It does +not describe how LOCKSS works and what it does in general; it’s a fairly +complex system, so please refer to the documentation on the LOCKSS Project site for more +information. Some information intended to a LOCKSS server administrator +is available in the “Using LOCKSS with Dataverse Network (DVN)” of the +Dataverse Network Installers Guide

      +
      +
      (our primary sysadmin-level manual).
      +

      Configuration Tasks:

      +

      Note that neither the standard LOCKSS Web Crawler, nor the OAI plugin +can properly harvest materials from a Dataverse Network.  A custom LOCKSS plugin +developed and maintained by the Dataverse Network project is available here: +http://lockss.hmdc.harvard.edu/lockss/plugin/DVNOAIPlugin.jar. +For more information on the plugin, please see the “Using LOCKSS with +Dataverse Network (DVN)” section of +the Dataverse Network Installers Guide. In order for a LOCKSS daemon to collect DVN +content designated for preservation, an Archival Unit must be created +with the plugin above. On the Dataverse Network side, a Manifest must be created that +gives the LOCKSS daemon permission to collect the data. This is done by +completing the “LOCKSS Settings” section of the: +Network Options -> Harvesting -> Settings tab.

      +

      For the Dataverse Network, LOCKSS can be configured at the network level +for the entire site and also locally at the dataverse level. The network +level enables LOCKSS harvesting but more restrictive policies, including +disabling harvesting, can be configured by each dataverse. A dataverse +cannot enable LOCKSS harvesting if it has not first been enabled at the +network level.

      +

      This “Edit LOCKSS Harvest Settings” section refers to the network level +LOCKSS configuration.

      +

      To enable LOCKSS harvesting at the network level do the following:

      +
        +
      • Navigate to the LOCKSS Settings page: Network home page -> Network Options -> Harvesting -> Settings.
      • +
      • Fill in the harvest information including the level of harvesting allowed (Harvesting Type, Restricted Data Files), the scope +of harvest by choosing a predefined OAI set, then if necessary a list of servers or domains allowed to harvest.
      • +
      • It’s important to understand that when a LOCKSS daemon is authorized +to “crawl restricted files”, this does not by itself grant the actual +access to the materials! This setting only specifies that the daemon +should not be skipping such restricted materials outright. (The idea +behind this is that in an archive with large amounts of +access-restricted materials, if only public materials are to be +preserved by LOCKSS, lots of crawling time can be saved by instructing +the daemon to skip non-public files, instead of having it try to access +them and get 403/Permission Denied). If it is indeed desired to have +non-public materials collected and preserved by LOCKSS, it is the +responsibility of the DVN Administrator to give the LOCKSS daemon +permission to access the files. As of DVN version 3.3, this can only be +done based on the IP address of the LOCKSS server (by creating an +IP-based user group with the appropriate permissions).
      • +
      • Next select any licensing options or enter additional terms, and click “Save Changes”.
      • +
      • Once LOCKSS harvesting has been enabled, the LOCKSS Manifest page will +be provided by the application. This manifest is read by LOCKSS servers +and constitutes agreement to the specified terms. The URL for the +network-level LOCKSS manifest is +http://<YOUR SERVER>/dvn/faces/ManifestPage.xhtml (it will be +needed by the LOCKSS server administrator in order to configure an +Archive Unit for crawling and preserving the DVN).
      • +
      +
      +
      +
      +

      Settings Section

      +
      +

      Edit Name

      +

      The name of your Dataverse Network installation is displayed at the top +of the Network homepage, and as a link at the top of each dataverse +homepage in your Network.

      +

      To create or change the name of your Network, navigate to the Settings +tab on the Options page:

      +

      Network home page > Options page > Settings tab > General subtab > Network Name

      +

      Enter a descriptive title for your Network. There are no naming +restrictions, but it appears in the heading of every dataverse in your +Network, so a short name works best.

      +

      Click Save and you are done!

      +
      +
      +

      Edit Layout Branding

      +

      When you install a Network, there is no banner or footer on any page in +the Network. You can apply any style to the Network pages, such as that +used on your organization’s website. You can use plain text, HTML, +JavaScript, and style tags to define your custom banner and footer. If +your website has such elements as a navigation menu or images, you can +add them to your Network pages.

      +

      To customize the layout branding of your Network, navigate to the +Customization subtab on the Options page:

      +

      Network home page > Options page > Settings tab > Customization subtab > +Edit Layout Branding

      +

      Enter your banner and footer content in the Custom Banner and Custom +Footer fields and Save.

      +

      See Layout Branding Tips for guidelines.

      +
      +
      +

      Edit Description

      +

      By default your Network homepage has the following description: +A description of your Dataverse Network or announcements may be added here. Use Network Options to edit or remove this text. +You can edit that text to describe or announce such things as new +Network features, new dataverses, or maintenance activities. You also +can disable the description to not appear on the homepage.

      +

      To manage the Network description, navigate to:

      +

      Network home page > Options page > Settings tab > General subtab > Network Description

      +

      Create a description by entering your desired content in the text box. +HTML, JavaScript, and style tags are permitted. The html and +body element types are not allowed. Next enable the description +display by checking the Enable Description in Homepage checkbox. Click +Save and you’re done. You can disable the display of the description but +keep the content by unchecking and saving.

      +
      +
      +

      Edit Dataverse Requirements

      +

      Enforcing a minimum set of requirements can help ensure content +consistency.

      +

      When you enable dataverse requirements, newly created dataverses cannot +be made public or released until the selected requirements are met. +Existing dataverses are not affected until they are edited. Edits to +existing dataverses cannot be saved until requirements are met.

      +

      To manage the requirements, navigate to:

      +

      Network home page > Options page > Settings tab > Advanced subtab > Release Dataverse Requirements

      +

      Available requirements include:

      +
        +
      • Require Network Homepage Dataverse Description
      • +
      • Require Dataverse Affiliation
      • +
      • Require Dataverse Classification
      • +
      • Require Dataverse Studies included prior to release
      • +
      +
      +
      +

      Manage E-Mail Notifications

      +

      The Dataverse Network sends notifications via email for a number of +events on the site, including workflow events such as creating a +dataverse, uploading files, releasing a study, etc. Many of these +notifications are sent to the user initiating the action as well as to +the network administrator. Additionally, the Report Issue link on the +network home page sends email to the network administrator. By default, +this email is sent to +support@thedata.org <mailto:support@thedata.org>.

      +

      To change this email address navigate to the Options page:

      +

      Network home page > Options page > Settings tab > General subtab > E-Mail Address(es)

      +

      Enter the address of network administrators who should receive these +notifications and Save.

      +

      Please note the Report Issue link when accessed within a dataverse gives +the option of sending notification to the network or dataverse +administrator. Configuring the dataverse administrator address is done +at the dataverse level: +(Your) Dataverse home page > Options page > Settings tab > General subtab > E-Mail Address(es)

      +
      +
      +

      Enable Twitter

      +

      If your Dataverse Network has been configured for Automatic Tweeting, +you will see an option listed as “Enable Twitter.” When you click this, +you will be redirected to Twitter to authorize the Dataverse Network +application to send tweets for you.

      +

      To manage the Dataverse Twitter configuration, navigate to:

      +

      Dataverse home page > Options page > Settings tab > Promote Your Dataverse subtab > Sync Dataverse With Twitter

      +

      Once authorized, tweets will be sent for each new dataverse that is +released.

      +

      To disable Automatic Tweeting, go to the options page, and click +“Disable Twitter.”

      +
      +
      +
      +

      Terms Section

      +
      +

      Edit Terms for Account Creation

      +

      You can set up Terms of Use that require users with new accounts to +accept your terms before logging in for the first time.

      +

      To configure these terms navigate to the Options page:

      +

      Network home page > Options page > Permissions tab > Terms subtab > Account Term of Use

      +

      Enter your required terms as you would like them to appear to users. +HTML, JavaScript, and style tags are permitted. The html and +body element types are not allowed. Check Enable Terms of Use to +display these terms. Click Save and you are finished. To disable but +preserve your current terms, uncheck the Enable checkbox and save.

      +
      +
      +

      Edit Terms for Study Creation

      +

      You can set up Terms of Use for the Network that require users to accept +your terms before they can create or modify studies, including adding +data files. These terms are defined at the network level so they apply +across all dataverses. Users will be presented with these terms the +first time they attempt to modify or create a study during each session.

      +

      To configure these terms of use navigate to the Options page:

      +

      Network home page > Options page > Permissions tab > Terms subtab > Deposit Term of Use

      +

      Enter your terms as you would like to display them to the user. HTML, +JavaScript, and style tags are permitted. The html and body +element types are not allowed. Check Enable Terms of Use and save. +Uncheck Enable Terms of Use and save to disable but preserve existing +terms of use.

      +
      +
      +

      Edit Terms for File Download

      +

      You can set up Terms of Use for the Network that require users to accept +your terms before they can download or subset files from the Network. +Since this is defined at the network level it applies to all dataverses. +Users will be presented with these terms the first time they attempt to +download a file or access the subsetting and analysis page each session.

      +

      To configure these terms, navigate to the Options page:

      +

      Network home page > Options page > Permissions tab > Terms subtab > Download Term of Use

      +

      Enter the terms as you want them to appear to the user. HTML, +JavaScript, and style tags are permitted. The html and body +element types are not allowed. Check Enable Terms of Use and save. +Unchecking the checkbox and saving disables the display of the terms but +preserves the current content.

      +
      +
      +

      Download Tracking Data

      +

      You can view any guestbook responses that have been made in all +dataverses. Beginning with version 3.2 of Dataverse Network, for any +dataverse where the guestbook is not enabled data will be collected +silently based on the logged in user or anonymously. The data displayed +includes user account data or the session id of an anonymous user, the +global ID, study title and filename of the file downloaded, the time of +the download, the type of download and any custom questions that have +been answered. The username/session ID and download type were not +collected in the 3.1 version of DVN. A comma separated values file of +all download tracking data may be downloaded by clicking the Export +Results button.

      +

      To manage the Network download tracking data, navigate to:

      +

      Network home page > Options page > Permissions tab > Download Tracking Data subtab > Manage Download Tracking Data table

      +
      +
      +
      +

      Permissions and Users Section

      +
      +

      Manage Network Permissions

      +

      Permissions that are configured at the network level include:

      +
        +
      • Enabling users to create an account when they create a dataverse.
      • +
      • Granting privileged roles to existing users including network +administrator and dataverse creator.
      • +
      • Changing and revoking privileged roles of existing users.
      • +
      +

      Enabling users to create an account when they create a dataverse +displays a “Create a Dataverse” link on the network home page. New and +unregistered users coming to the site can click on this link, create an +account and a dataverse in one workflow rather than taking two separate +steps involving the network administrator.

      +

      Granting a user account network administrator status gives that user +full control over the application as managed through the UI.

      +

      Granting a user account dataverse creator status is somewhat a legacy +function since any user who creates a dataverse has this role.

      +

      To manage these permissions, navigate to the Manage Network Permissions +table on the Options page:

      +

      Network home page > Options page > Permissions tab > Permissions subtab > Manage Network Permissions table

      +

      Enable account with dataverse creation by checking that option and +saving.

      +

      Granting privileged status to a user requires entering a valid, existing +user name, clicking add, choosing the role, then saving changes.

      +
      +
      +

      Roles by Version State Table

      + +++++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
       Role    
      Version StateNoneContributor +, +++CuratorAdminNetwork Admin**
      Draft E,E2,D3,S,VE,E2,P,T,D3,R,VE,E2,P,T,D3,R,VE,E2,P,T,D3,D2,R,V
      In Review E,E2,D3,VE,E2,P,T,D3,R,VE,E2,P,T,D3,R,VE,E2,P,T,D3,R,D2,V
      ReleasedVE,VE,P,T,D1,VE,P,T,D1,VE,P,T,D2,D1,V
      ArchivedVVP,T,VP,T,VP,T,D2,V
      Deaccessioned  P,T,R2,VP,T,R2,VP,T,R2,D2,V
      +

      Legend:

      +

      E = Edit (Cataloging info, File meta data, Add files)

      +

      E2 = Edit Study Version Notes

      +

      D1 = Deaccession

      +

      P = Permission

      +

      T = Create Template

      +

      D2 = Destroy

      +

      D3 = Delete Draft, Delete Review Version

      +

      S = Submit for Review

      +

      R = Release

      +

      R2 = Restore

      +

      V = View

      +

      Notes:

      +

      *Same as Curator

      +

      **Same as Curator + D2

      +

      +Contributor actions (E,D3,S,V) depend on new DV permission settings. A +contributor role can act on their own studies (default) or all studies +in a dv, and registered users can become contributors and act on their +own studies or all studies in a dv.

      +

      ++ A contributor is defined either as a contributor role or as any +registered user in a DV that allows all registered users to contribute.

      +
      +
      +

      Authorization to access Terms-protected files via the API

      +

      As of DVN v. 3.2, a programmatic API has been provided for accessing DVN +materials. It supports Basic HTTP Auth where the client authenticates +itself as an existing DVN (or anonymous) user. Based on this, the API +determines whether the client has permission to access the requested +files or metadata. It is important to remember however, that in addition +to access permissions, DVN files may also be subject to “Terms of Use” +agreements. When access to such files is attempted through the Web +Download or Subsetting interfaces, the user is presented with an +agreement form. The API however is intended for automated clients, so +the remote party’s compliance with the Terms of Use must be established +beforehand. We advise you to have a written agreement with authorized +parties before allowing them to access data sets, bypassing the Terms of +Use. The authorized party should be responsible for enforcing the Terms +of Use to their end users.Once such an agreement has been +established, you can grant the specified user unrestricted access to +Terms-protected materials on the Network home page > Options page > +PERMISSIONS tab > Permissions subtab, in the “Authorize Users to bypass +Terms of Use” section.

      +

      Please consult the Data Sharing section of the Guide for additional +information on the Data Sharing API.

      +
      +
      +

      Create Account

      +

      There are several ways to create accounts: at the network level by the +network administrator, at the dataverse level by the dataverse +administrator, and by the new user themselves if the option to create an +account when creating a dataverse is enabled.

      +

      Accounts created by all methods are equivalent with the exception of +granting dataverse creator status during the create a dataverse +workflow. That status can be granted afterwards by the network +administrator if necessary.

      +

      To create an account at the network admin level, navigate to the Create +Account page from the Options page:

      +

      Network home page > Options page > Permissions tab > Users subtab > Create User link > Create Account page

      +

      Complete the required information denoted by the red asterisk and save. +Note: an email address can also be used as a username.

      +
      +
      +

      Manage Users

      +

      The Manage Users table gives the network administrator a list of all +user accounts in table form. It lists username, full name, roles +including at which dataverse the role is granted, and the current status +whether active or deactivated.

      +

      Usernames are listed alphabetically and clicking on a username takes you +to the account page that contains detailed information on that account. +It also provides the ability to update personal details and change +passwords.

      +

      The Manage Users table also provides the ability to deactivate a user +account.

      +

      To view the Manage Users table navigate to the Options page:

      +

      Network home page > Options page > Permissions tab > Users subtab > Manage Users table

      +
      +
      +

      Manage Groups

      +

      Groups in the Dataverse Network are a way to identify collections of +users so permissions can be applied collectively rather than +individually. This allows controlling permissions for individuals by +altering membership in the group without affecting permissions of other +members. Groups can be defined by user names or IP addresses.

      +

      The Manage Groups table lists information about existing groups in table +form including name, display or friendly name, and group membership.

      +

      Clicking on the name takes you to the Edit Group page where the group’s +configuration can be changed. It is also possible to create and delete +groups from the Manage Groups table.

      +

      To view the Manage Groups table, navigate to the Options page:

      +

      Network home page > Options page > Permissions tab > Groups subtab > +Manage Groups table

      +

      Once on the Groups subtab, viewing the Manage Groups table, you can +create or delete a group.

      +

      When creating a group you must choose whether to identify users by +username or by IP address with a Username Group or IP User Group.

      +

      With a Username Group, enter an existing username into the edit box, +click the “+” symbol to enter additional users, then save.

      +

      With an IP User Group, enter an IP address or domain name into the edit +box. Wildcards can be used by specifying an asterisk (*) in place of an +IP address octet (eg. 10.20.30.*), or for the sub-domain or host +portion of the domain name (eg. *.mydomain.edu).

      +

      Last, an optional special feature of the IP User Group is to allow for +an Affiliate Login Service. Effectively this allows for the use of a +proxy to access the Dataverse Network on behalf of a group such as a +University Library where identification and authorization of users is +managed by their proxy service. To enable this feature, enter IP +addresses of any proxy servers that will access Dataverse Network, check +This IP group has an affiliate login service, enter the Affiliate Name +as it will appear on the Dataverse Network Login page, and the Affiliate +URL which would go to the proxy server. Save and you are finished.

      +
      +
      +
      +

      Utilities

      +

      The Dataverse Network provides the network administrator with tools to +manually execute background processes, perform functions in batch, and +resolve occasional operational issues.

      +

      Navigate to the Utilities from the Options page:

      +

      Network home page > Options page > Utilities tab

      +

      Available tools include:

      +
        +
      • Study Utilities - Create draft versions of studies, release file locks and delete multiple studies by inputting ID’s.
      • +
      • Index Utilities - Create a search index.
      • +
      • Export Utilities - Select files and export them.
      • +
      • Harvest Utilities - Harvest selected studies from another Network.
      • +
      • File Utilities - Select files and apply the JHOVE file validation process to them.
      • +
      • Import Utilities - Import multiple study files by using this custom batch process.
      • +
      • Handle Utilities - Register and re-register study handles.
      • +
      +

      Study Utilities

      +

      Curating a large group of studies sometimes requires direct database +changes affecting a large number of studies that may belong to different +dataverses. An example might be changing the distributor name and logo +or the parent dataverse. Since the Dataverse Network employs study +versioning, it was decided that any such backend changes should +increment the affected studies’ version. However, incrementing a study’s +version is nontrivial as a database update. So, this utility to create a +draft of an existing study was created.

      +

      The practice would involve generating a list of study database ID’s that +need changing, use the utility to create drafts of those studies, then +run the database update scripts. The result is new, unreleased draft +versions of studies with modifications made directly through the +database. These studies would then need to be reviewed and released +manually.

      +

      Due to the transactional nature of study updates, particularly when +uploading large files, it is possible a study update is interrupted such +as during a system restart. When this occurs, the study lock, created to +prevent simultaneous updates while one is already in progress, remains +and the study cannot be edited until it is cleared.

      +

      Checking for this condition and clearing it is easy. Open this utility, +check if any locks are listed and remove them. The user should once +again be able to edit their study.

      +

      The user interface provides a convenient way to delete individual +studies but when faced with deleting a large number of studies that do +not conveniently belong to a single dataverse, use the Delete utility.

      +

      Specify studies by their database id single, as a comma-separated list +(1,7,200, etc.), or as a hyphen-separated range (1-1000, 2005, +2500-2700).

      +

      Index Utilities

      +

      Indexing is the process of making study metadata searchable. The Lucence +search engine used by the Dataverse Network uses file-based indexes. +Normally, any time a study or new study version is released the study +information is automatically indexed. Harvesting also indexes studies in +small batches as they are harvested. Sometimes this does not occur, such +as when the harvest process is interrupted. The index could also become +corrupt for some reason though this would be extremely rare.

      +

      The index utility allows for reindexing of studies, dataverses, and the +entire site. Studies and dataverses can be specified by their database +id’s alone, in a comma separated list, or in a hyphenated range: 1-1000. +Use index all sparingly, particularly if you have a large site. This is +a single transaction and should not be interrupted or you will need to +start again. A more flexible approach is to determine the lowest and +highest study ID’s and index in smaller ranges: 1-1000, 1001-2000, etc.

      +

      Note: if for some reason a study change was not indexed, there is an +automatic background process that will detect this, inform the +administrator and will be reindexed once every 24 hours so manually +reindexing is not required.

      +

      Export Utilities

      +

      Export is a background process that normally runs once every 24 hours. +Its purpose is to produce study metadata files in well known formats +such as DDI, DC, MIF, and FGDC that can be used to import studies to +other systems such as through harvesting.

      +

      Sometimes it’s useful to manually export a study, dataverse, any updated +studies, or all studies. Studies and dataverses are specified by +database id rather than global id or handle.

      +

      Export is tied to OAI set creation and Harvesting. To enable harvesting +of a subset of studies by another site, first an OAI set is created that +defines the group of studies. Next, the scheduled export runs and +creates the export files if they’re not already available. It also +associates those studies defined by the set with the set name so future +requests for the set receive updates — additions or deletions from the +set. This way remote sites harvesting the set maintain an updated study +list.

      +

      If you do not want to wait 24 hours to test harvest a newly created set, +use the export utility. Click “Run Export” to export any changed studies +and associate studies to the set. Exporting studies or dataverses alone +will not associate studies to a set, in those cases Update Harvest +Studies must also be run.

      +

      Harvest Utilities

      +

      The Harvest utility allows for on-demand harvesting of a single study. +First select one of the predefined harvesting dataverses which provide +remote server connection information as well as the local dataverse +where the study will be harvested to. Specify the harvest ID of the +study to be harvested. The harvest id is particular to the study and +server being harvested from. It can be obtained from the OAI protocol +ListIdentifiers command, from the harvest log if previously harvested, +or if from another DVN it takes the form: <OAI set alias>//<global id>. +A Dataverse Network study with globalID: hdl:1902.1/10004, from the OAI +set “My Set”, having alias “myset”, would have a harvest identifier of: +myset//hdl:1902.1/10004

      +

      File Utilities

      +

      The Dataverse Network attempts to identify file types on upload to +provide more information to an end user. It does this by calling a file +type identification library called JHOVE. Though JHOVE is a very +comprehensive library, sometimes a file type may not be recognized or is +similar to another type and misidentified. For these cases we provide an +override mechanism — a list of file extensions and a brief text +description. Since these are created after the files have been uploaded, +this file utility provides a way to re-identify the file types and +furthermore limits this process to specific file types or to studies, +specified by database ID singly, as a comma separated, or as a +hype-separated range.

      +

      Import Utilities

      +

      Importing studies usually is done by harvesting study metadata from a +remote site via the OAI protocol. This causes study metadata to be +hosted locally but files are served by the remote server. The Import +utility is provided for cases where an OAI server is unavailable or +where the intent is to relocate studies and their files to the Dataverse +Network.

      +

      At present this requires the help of the network administrator and can +be manually intensive. First, study metadata may need to be modified +slightly then saved in a specific directory structure on the server file +system. Next, the study metadata import format and destination dataverse +is chosen. Last, the top level directory where the study metadata and +files are stored and “Batch Import” is clicked. Because the DDI input +format can be quite complex and usage varies, verify the results are +what’s intended.

      +

      A single study import function is also provided as a test for importing +your study’s metadata syntax but is not meant for actual import. It will +not import associated files.

      +

      Before performing a batch import, you must organize your files in the +following manner:

      +
        +
      1. If you plan to import multiple files or studies, create a master +directory to hold all content that you choose to import.
      2. +
      3. Create a separate subdirectory for each study that you choose to +import. +The directory name is not important.
      4. +
      5. In each directory, place a file called study.xml and use that +file to hold the XML-formatted record for one study. +Note: Do not include file description elements in +the study.xml file. Including those fields results in the +addition of multiple blank files to that study.
      6. +
      7. Also place in the directory any additional files that you choose to +upload for that study.
      8. +
      +

      For an example of a simple study DDI, refer to the Metadata References +section.

      +

      Handle Utilities

      +

      When a study is created, the global ID is first assigned, then +registered with handle.net as a persistent identifier. This identifier +becomes part of the study’s citation and is guaranteed to always resolve +to the study. For the study with global ID, hdl:1902.1/16598 or handle +1902.1/16596, the URL in the citation would be: +http://hdl.handle.net/1902.1/16598.

      +

      If for any reason a study is created and not registered or is registered +in a way that needs to be changed, use the Handle utility to either +register currently unregistered studies or to re-register all registered +studies.

      +
      +
      +

      Web Statistics

      +

      The Dataverse Network provides the capability to compile and analyze +site usage through Google Analytics. A small amount of code is embedded +in each page so when enabled, any page access along with associated +browser and user information is recorded by Google. Later analysis of +this compiled access data can be performed using the Google Analytics utility.

      +

      Note: Access to Google Analytics is optional. If access to this utility +is not configured for your network, in place of the Manage Web Usage +menu option is a message +stating: Google Analytics are not configured for this Network.

      +

      To enable Google Analytics:

      +
        +
      1. Create a Gmail account.
      2. +
      3. Go to Google Analytics and create a profile for the server or website domain. You will +be assigned a Web Property ID.
      4. +
      5. Using the Glassfish Admin console, add a JVM option and assign it the value of the newly assigned Web Property ID: +Ddvn.googleanalytics.key=
      6. +
      7. Restart Glassfish.
      8. +
      9. It takes about 24 hours after installation and set up of this option for tracking data to become available for use.
      10. +
      +

      Note: Google provides the code necessary for tracking. This has already +been embedded into the Dataverse Network but not the Web Property ID. +That is configured as a JVM option by the network admin when enabling +this feature.

      +

      To view Web Statistics, navigate to:

      +
        +
      • Network home page > Options page > Settings tab > General subtab > Web Statistics
      • +
      • You will be redirected to Google Analytics. Log in using your Gmail account used to +create the profile.
      • +
      +
      +
      +
      +

      Appendix

      +

      Additional documentation complementary to Users Guides.

      +
      +

      Control Card-Based Data Ingest

      +

      As of version 2.2 the DVN supports ingesting plain text data files, in +addition to SPSS and STATA formats. This allows users and institutions +to ingest raw data into Dataverse Networks without having to purchase +and maintain proprietary, commercial software packages.

      +

      Tab-delimited and CSV files are supported. In order to ingest a plain +data file, an additional file containing the variable metadata needs to +be supplied.

      +

      Two Metadata Types Are Supported

      +
        +
      1. A simplified format based on the classic SPSS control card syntax; +this appears as “CSV/SPSS” in the menu on the Add Files page.
      2. +
      3. DDI, an xml format from the Data Documentation Inititative +consortium. Choose “TAB/DDI” to ingest a tab file with a DDI metadata sheet.
      4. +
      +

      The specifics of the formats are documented in the 2 sections below.

      +
      +

      CSV Data, SPSS-style Control Card

      +

      Unlike other supported “subsettable” formats, this ingest mechanism +requires 2 files: the CSV raw data file proper and an SPSS Setup file +(“control card”) with the data set metadata. In the future, support for +other data definition formats may be added (STATA, SAS, etc.). As +always, user feedback is welcome.

      +

      The supported SPSS command syntax:

      +

      Please note that it is not our goal to attempt to support any set of +arbitrary SPSS commands and/or syntax variations. The goal is to enable +users who do not own proprietary statistical software to prepare their +raw data for DVN ingest, using a select subset of SPSS data definitional +syntax.

      +

      (In addition to its simplicity and popularity, we chose to use the SPSS +command syntax because Dataverse Network already has support for the SPSS .SAV and .POR formats, so we have a good working knowledge of the SPSS formatting +conventions.)

      +

      The following SPSS commands are supported:

      +
      +
      DATA LIST 
      +
      VARIABLE LABELS 
      +
      NUMBER OF CASES
      +
      VALUE LABELS
      +
      FORMATS (actually, not supported as of now – see below)
      +
      MISSING VALUES
      +
      +

      We support mixed cases and all the abbreviations of the above commands +that are valid under SPSS. For example, both “var labels” and “Var Lab” +are acceptable commands.

      +

      Individual command syntax.

      +

      1. DATA LIST

      +

      An explicit delimiter definition is required. For example:

      +

      DATA LIST LIST(',')

      +

      specifies ',' as the delimiter. This line is followed by the '/' +separator and variable definitions. Explicit type definitions are +required. Each variable is defined by a name/value pair VARNAME

      +

      (VARTYPE) where VARTYPE is a standard SPSS fortran-type +definition.

      +

      Note that this is the only required section. The minimum +amount of metadata required to ingest a raw data file is the delimiter +character, the names of the variables and their data type. All of these +are defined in the DATA LIST section. Here’s an example of a +complete, valid control card:

      +

      DATA LIST LIST(’,’) +CASEID (f) NAME (A) RATIO (f) +.

      +

      It defines a comma-separated file with 3 variables named CASEID, +NAME and RATIO, two of them of the types numeric and one character +string.

      +

      Examples of valid type definitions:

      +
      +
      A8 8 byte character string;
      +
      A character string;
      +
      f10.2 numeric value, 10 decimal digits, with 2 fractional digits;
      +
      f8 defaults to F8.0
      +
      F defaults to F.0, i.e., numeric integer value
      +
      2 defaults to F.2, i.e., numeric float value with 2 fractional digits.
      +
      +

      The following SPSS date/time types are supported:

      +

      type                            format

      +

      DATE``                       ``yyyy-MM-dd

      +

      DATETIME``                ``yyyy-MM-dd HH:mm:ss

      +

      The variable definition pairs may be separated by any combination of +white space characters and newlines. Wrapped-around lines must start +with white spaces (i.e., newlines must be followed by spaces). The +list must be terminated by a line containing a single dot.

      +

      Please note, that the actual date values should be stored in the CSV +file as strings, in the format above. As opposed to how SPSS stores the +types of the same name (as integer numbers of seconds).

      +

      2. VARIABLE LABELS

      +

      Simple name/value pairs, separated by any combination of white space +characters and newlines (as described in section 1 above). The list is +terminated by a single dot.

      +

      For example:

      +
      +
      VARIABLE LABELS
      +
      CELLS "Subgroups for sample-see documentation"
      +
      STRATA "Cell aggregates for sample”
      +
      .
      +
      +

      3. NUMBER OF CASES (optional)

      +

      The number of cases may be explicitly specified. For example:

      +

      num of cases 1000

      +

      When the number of cases is specified, it will be checked against the +number of observations actually found in the CSV file, and a mismatch +would result in an ingest error.

      +

      4. VALUE LABELS

      +

      Each value label section is a variable name followed by a list of +value/label pairs, terminated by a single “/” character. The list of +value label sections is terminated by a single dot.

      +

      For example,

      +
      +
      VALUE labels
      +
      FOO 0 "NADA"
      +
      1 "NOT MUCH"
      +
      99999999 "A LOT"
      +
      /
      +
      BAR 97 "REFUSAL"
      +
      98 "DONT KNOW"
      +
      99 "MISSING"
      +
      /
      +
      .
      +
      +

      5. FORMATS

      +

      This command is actually redundant if you explicitly supply the variable +formats in the ``DATA LIST`` section above.

      +

      NOTE: It appears that the only reason the``FORMATS`` command exists is +that DATA LIST syntax does not support explicit fortran-style format +definitions when fixed-field data is defined. So it is in fact redundant +when we’re dealing with delimited files only.

      +

      Please supply valid, fortran-style variable formats in the ``DATA +LIST`` section, as described above.

      +

      6. MISSING VALUES

      +

      This is a space/newline-separate list of variable names followed by a +comma-separated list of missing values definition, in parentheses. For +example:

      +
      +
      INTVU4 (97, 98, 99)
      +
      The list is terminated with a single dot.
      +
      +

      An example of a valid ``MISSING VALUES`` control card section:

      +
      +
      MISSING VALUES
      +
      INTVU4 (97, 98, 99)
      +
      INTVU4A ('97', '98', '99')
      +
      .
      +
      +
      +
      An example of a control card ready for ingest:
      +
      +
      data list list(',') /
      +  CELLS (2)  STRATA (2)  WT2517 (2)
      +  SCRNRID (f) CASEID (f)  INTVU1 (f)
      +  INTVU2 (f)  INTVU3 (f)  INTVU4 (f)
      +  INTVU4A (A)
      +  .
      +VARIABLE LABELS
      +  CELLS "Subgroups for sample-see documentation"
      +  STRATA "Cell aggregates for sample-see documenta"
      +  WT2517 "weight for rep. sample-see documentation"
      +  SCRNRID "SCREENER-ID"
      +  CASEID "RESPONDENT'S CASE ID NUMBER"
      +  INTVU1 "MONTH RESPONDENT BEGAN INTERVIEW"
      +  INTVU2 "DAY RESPONDENT BEGAN INTERVIEW"
      +  INTVU3 "HOUR RESPONDENT BEGAN INTERVIEW"
      +  INTVU4 "MINUTE RESPONDENT BEGAN INTERVIEW"
      +  INTVU4A "RESPONDENT INTERVIEW BEGAN AM OR PM"
      +  .
      +VALUE labels
      +  CASEID   99999997 "REFUSAL"
      +                                  99999998 "DONT KNOW"
      +                                  99999999 "MISSING"
      +                                  /
      +  INTVU1   97 "REFUSAL"
      +                                  98 "DONT KNOW"
      +                                  99 "MISSING"
      +                                  /
      +  INTVU2   97 "REFUSAL"
      +                                  98 "DONT KNOW"
      +                                  99 "MISSING"
      +                                  /
      +  INTVU3   97 "REFUSAL"
      +                                  98 "DONT KNOW"
      +                                  99 "MISSING"
      +                                  /
      +  INTVU4   97 "REFUSAL"
      +                                  98 "DONT KNOW"
      +                                  99 "MISSING"
      +                                  /
      +  INTVU4A "97" "REFUSAL"
      +                                  "98" "DONT KNOW"
      +                                  "99" "MISSING"
      +                                  "AM" "MORNING"
      +                                  "PM" "EVENING"
      +  .
      +MISSING VALUES
      +  CASEID (99999997, 99999998, 99999999)
      +  INTVU1 (97, 98, 99)
      +  INTVU2 (97, 98, 99)
      +  INTVU3 (97, 98, 99)
      +  INTVU4 (97, 98, 99)
      +  INTVU4A ('97', '98', '99')
      +  .
      +NUMBER of CASES 2517
      +
      +
      +

      DATA FILE.

      +

      Data must be stored in a text file, one observation per line. Both DOS +and Unix new line characters are supported as line separators. On each +line, individual values must be separated by the delimiter character +defined in the DATA LISTsection. There may only be exactly (NUMBER OF +VARIABLES - 1) delimiter characters per line; i.e. character values must +not contain the delimiter character.

      +

      QUESTIONS, TODOS:

      +

      Is there any reason we may want to support RECODE command also?

      +

      — comments, suggestions are welcome! —

      +
      +
      +

      Tab Data, with DDI Metadata

      +

      As of version 2.2, another method of ingesting raw TAB-delimited data +files has been added to the Dataverse Network. Similarly to the SPSS control +card-based ingest (also added in this release), this ingest mechanism +requires 2 files: the TAB raw data file itself and the data set metadata +in the DDI/XML format.

      +

      Intended use case:

      +

      Similarly to the SPSS syntax-based ingest, the goal is to provide +another method of ingesting raw quantitative data into the DVN, without +having to first convert it into one of the proprietary, commercial +formats, such as SPSS or STATA. Pleaes note, that in our design +scenario, the DDI files supplying the ingest metadata will be somehow +machine-generated; by some software tool, script, etc. In other words, +this design method is targeted towards more of an institutional user, +perhaps another data archive with large quantities of data and some +institutional knowledge of its structure, and with some resources to +invest into developing an automated tool to generate the metadata +describing the datasets. With the final goal of ingesting all the data +into a DVN by another automated, batch process. The DVN project is also +considering developing a standalone tool of our own that would guide +users through the process of gathering the information describing their +data sets and producing properly formatted DDIs ready to be ingested.

      +

      For now, if you are merely looking for a way to ingest a single +“subsettable” data set, you should definitely be able to create a +working DDI by hand to achieve this goal. However, we strongly recommend +that you instead consider the CSV/SPSS control card method, which was +designed with this use case in mind. If anything, it will take +considerably fewer keystrokes to create an SPSS-syntax control card than +a DDI encoding the same amount of information.

      +

      The supported DDI syntax:

      +

      You can consult the DDI project for complete information on the DDI +metadata (http://icpsr.umich.edu/DDI). +However, only a small subset of the published format syntax is used for +ingesting individual data sets. Of the 7 main DDI sections, only 2, +fileDscr and dataDscr are used. Inside these sections, only a select set +of fields, those that have direct equivalents in the DVN data set +structure, are supported.

      +

      These fields are outlined below. All the fields are mandatory, unless +specified otherwise. An XSD schema of the format subset is also +provided, for automated validation of machine-generated XML.

      +
      <?xml version="1.0" encoding="UTF-8"?>
      +<codeBook xmlns="http://www.icpsr.umich.edu/DDI"\>
      +<fileDscr>
      +        <fileTxt ID="file1">
      +                        <dimensns>
      +                                        <caseQnty>NUMBER OF OBSERVATIONS</caseQnty>
      +                                        <varQnty>NUMBER OF VARIABLES</varQnty>
      +                        </dimensns>
      +        </fileTxt>
      +</fileDscr>
      +<dataDscr>
      +        <!-- var section for a discrete numeric variable: -->
      +        <var ID="v1.1" name="VARIABLE NAME" intrvl="discrete" >
      +                        <location fileid="file1"/>
      +                        <labl level="variable">VARIABLE LABEL</labl>
      +                        <catgry>
      +                                        <catValu>CATEGORY VALUE</catValu>
      +                        </catgry>
      +                …
      +                <!-- 1 or more category sections are allowed for discrete variables -->
      +                        <varFormat type="numeric" />
      +        </var>
      +   <!-- var section for a continuous numeric variable: -->
      +        <var ID="v1.2" name="VARIABLE NAME" intrvl="contin" >
      +                        <location fileid="file1"/>
      +                        <labl level="variable">VARIABLE LABEL</labl>
      +                        <varFormat type="numeric" />
      +        </var>
      +   <!-- var section for a character (string) variable: -->
      +        <var ID="v1.10" name="VARIABLE NAME" intrvl="discrete" >
      +                        <location fileid="file1"/>
      +                        <labl level="variable">VARIABLE LABEL</labl>
      +                        <varFormat type="character" />
      +        </var>
      +        <!-- a discrete variable with missing values defined: -->
      +</dataDscr>
      +</codeBook>
      +
      +
      +

      — comments, suggestions are welcome! —

      +
      +
      +
      +

      SPSS Data File Ingest

      +
      +

      Ingesting SPSS (.por) files with extended labels

      +

      This feature has been added to work around the limit on the length of +variable labels in SPSS Portable (.por) files. To use this +feature, select “SPSS/POR,(w/labels)” from the list of file types on +the AddFiles page. You will be prompted to first upload a text file +containing the extended, “long” versions of the labels, and then +upload the .por file. The label text file should contain one +TAB-separated variable name/variable label pair per line.

      +
      +
      +
      +

      Ingest of R (.RData) files

      +
      +

      Overview.

      +

      Support for ingesting R data files has been added in version 3.5. R +has been increasingly popular in the research/academic community, +owing to the fact that it is free and open-source (unlike SPSS and +STATA). Consequently, more and more data is becoming available +exclusively in RData format. This long-awaited feature makes it +possible to ingest such data into DVN as “subsettable” files.

      +
      +
      +

      Requirements.

      +

      R ingest relies on R having been installed, configured and made +available to the DVN application via RServe (see the Installers +Guide). This is in contrast to the SPSS and Stata ingest - which can +be performed without R present. (though R is still needed to perform +most subsetting/analysis tasks on the resulting data files).

      +

      The data must be formatted as an R dataframe (using data.frame() in +R). If an .RData file contains multiple dataframes, only the 1st one +will be ingested.

      +
      +
      +

      Data Types, compared to other supported formats (Stat, SPSS)

      +
      +
      Integers, Doubles, Character strings
      +

      The handling of these types is intuitive and straightforward. The +resulting tab file columns, summary statistics and UNF signatures +should be identical to those produced by ingesting the same vectors +from SPSS and Stata.

      +

      A couple of features that are unique to R/new in DVN:

      +

      R explicitly supports Missing Values for all of the types above; +Missing Values encoded in R vectors will be recognized and preserved +in TAB files (as ‘NA’), counted in the generated summary statistics +and data analysis.

      +

      In addition to Missing Values, R recognizes “Not a Number” (NaN) and +positive and negative infinity for floating point values. These +are now properly supported by the DVN.

      +

      Also note that, unlike Stata, where “float” and “double” are supported +as distinct data types, all floating point values in R are double +precision.

      +
      +
      +
      R Factors
      +

      These are ingested as “Categorical Values” in the DVN.

      +

      One thing to keep in mind: in both Stata and SPSS, the actual value of +a categorical variable can be both character and numeric. In R, all +factor values are strings, even if they are string representations of +numbers. So the values of the resulting categoricals in the DVN will +always be of string type too.

      +
      +
      New: To properly handle ordered factors in R, the DVN now supports the concept of an “Ordered Categorical” - a categorical value where an explicit order is assigned to the list of value labels.
      +
      +
      +
      +
      (New!) Boolean values
      +

      R Boolean (logical) values are supported.

      +
      +
      +
      Limitations of R data format, as compared to SPSS and STATA.
      +

      Most noticeably, R lacks a standard mechanism for defining descriptive +labels for the data frame variables. In the DVN, similarly to +both Stata and SPSS, variables have distinct names and labels; with +the latter reserved for longer, descriptive text. +With variables ingested from R data frames the variable name will be +used for both the “name” and the “label”.

      +
      +
      Optional R packages exist for providing descriptive variable labels; +in one of the future versions support may be added for such a +mechanism. It would of course work only for R files that were +created with such optional packages.
      +
      +

      Similarly, R categorical values (factors) lack descriptive labels too. +Note: This is potentially confusing, since R factors do +actually have “labels”. This is a matter of terminology - an R +factor’s label is in fact the same thing as the “value” of a +categorical variable in SPSS or Stata and DVN; it contains the actual +meaningful data for the given observation. It is NOT a field reserved +for explanatory, human-readable text, such as the case with the +SPSS/Stata “label”.

      +

      Ingesting an R factor with the level labels “MALE” and “FEMALE” will +produce a categorical variable with “MALE” and “FEMALE” in the +values and labels both.

      +
      +
      +
      +

      Time values in R

      +

      This warrants a dedicated section of its own, because of some unique +ways in which time values are handled in R.

      +

      R makes an effort to treat a time value as a real time instance. This +is in contrast with either SPSS or Stata, where time value +representations such as “Sep-23-2013 14:57:21” are allowed; note that +in the absence of an explicitly defined time zone, this value cannot +be mapped to an exact point in real time. R handles times in the +“Unix-style” way: the value is converted to the +“seconds-since-the-Epoch” Greenwitch time (GMT or UTC) and the +resulting numeric value is stored in the data file; time zone +adjustments are made in real time as needed.

      +

      Things get ambiguous and confusing when R displays this time +value: unless the time zone was explicitly defined, R will adjust the +value to the current time zone. The resulting behavior is often +counter-intuitive: if you create a time value, for example:

      +
      +
      timevalue<-as.POSIXct(“03/19/2013 12:57:00”, format = “%m/%d/%Y %H:%M:%OS”);
      +

      on a computer configured for the San Francisco time zone, the value +will be differently displayed on computers in different time zones; +for example, as “12:57 PST” while still on the West Coast, but as +“15:57 EST” in Boston.

      +

      If it is important that the values are always displayed the same way, +regardless of the current time zones, it is recommended that the time +zone is explicitly defined. For example:

      +
      +
      attr(timevalue,”tzone”)<-“PST”
      +
      +
      or
      +
      timevalue<-as.POSIXct(“03/19/2013 12:57:00”, format = “%m/%d/%Y %H:%M:%OS”, tz=”PST”);
      +
      +

      Now the value will always be displayed as “12:57 PST”, regardless of +the time zone that is current for the OS ... BUT ONLY if the OS +where R is installed actually understands the time zone “PST”, which +is not by any means guaranteed! Otherwise, it will quietly adjust +the stored GMT value to the current time zone, yet still +display it with the “PST” tag attached! One way to rephrase this is +that R does a fairly decent job storing time values in a +non-ambiguous, platform-independent manner - but gives no guarantee that +the values will be displayed in any way that is predictable or intuitive.

      +

      In practical terms, it is recommended to use the long/descriptive +forms of time zones, as they are more likely to be properly recognized +on most computers. For example, “Japan” instead of “JST”. Another possible +solution is to explicitly use GMT or UTC (since it is very likely to be +properly recognized on any system), or the “UTC+<OFFSET>” notation. Still, none of the above +guarantees proper, non-ambiguous handling of time values in R data +sets. The fact that R quietly modifies time values when it doesn’t +recognize the supplied timezone attribute, yet still appends it to the +changed time value does make it quite difficult. (These issues are +discussed in depth on R-related forums, and no attempt is made to +summarize it all in any depth here; this is just to made you aware of +this being a potentially complex issue!)

      +

      An important thing to keep in mind, in connection with the DVN ingest +of R files, is that it will reject an R data file with any time +values that have time zones that we can’t recognize. This is done in +order to avoid (some) of the potential issues outlined above.

      +

      It is also recommended that any vectors containing time values +ingested into the DVN are reviewed, and the resulting entries in the +TAB files are compared against the original values in the R data +frame, to make sure they have been ingested as expected.

      +

      Another potential issue here is the UNF. The way the UNF +algorithm works, the same date/time values with and without the +timezone (e.g. “12:45” vs. “12:45 EST”) produce different +UNFs. Considering that time values in Stata/SPSS do not have time +zones, but ALL time values in R do (yes, they all do - if the timezone +wasn’t defined explicitely, it implicitly becomes a time value in the +“UTC” zone!), this means that it is impossible to have 2 time +value vectors, in Stata/SPSS and R, that produce the same UNF.

      +

      A pro tip: if it is important to produce SPSS/Stata and R versions of +the same data set that result in the same UNF when ingested, you may +define the time variables as strings in the R data frame, and use +the “YYYY-MM-DD HH:mm:ss” formatting notation. This is the formatting used by the UNF +algorithm to normalize time values, so doing the above will result in +the same UNF as the vector of the same time values in Stata.

      +

      Note: date values (dates only, without time) should be handled the +exact same way as those in SPSS and Stata, and should produce the same +UNFs.

      +
      +
      +
      +

      FITS File format Ingest

      +

      This custom ingest is an experiment in branching out into a discipline +outside of the Social Sciences. It has been added in v.3.4 as part of the +collaboration between the IQSS and the Harvard-Smithsonian Center for +Astrophysics. FITS is a multi-part file format for storing +Astronomical data (http://fits.gsfc.nasa.gov/fits_standard.html). DVN +now offers an ingest plugin that parses FITS file headers for +key-value metadata that are extracted and made searchable.

      +

      FITS is now listed on the DVN AddFiles page as a recognized file +format. The same asynchronous process is used as for “subsettable” +files: the processing is done in the background, with an email +notification sent once completed.

      +

      Unlike with the “subsettable” file ingest, no format conversion takes +place and the FITS file is ingested as is, similarly to “other +materials” files. The process is limited to the extaction of the +searchable metadata. Once the file is ingested and the study is +re-indexed, these file-level FITS metadata fields can be searched on +from the Advanced Search page, on either the Dataverse or Network +level. Choose one of the FITS file Information listed in the drop +down, and enter the relevant search term. Search results that match +the query will show individual files as well as studies.

      +

      The ingest also generates a short summary of the file contents (number +and type of Header-Data Units) and adds it to the file description.

      +
      +
      +

      Metadata References

      +

      The Dataverse Network metadata is compliant with the DDI schema +version 2. The Cataloging +Information fields associated with each study contain most of the fields +in the study description section of the DDI. That way the Dataverse +Network metadata can be mapped easily to a DDI, and be exported into XML +format for preservation and interoperability.

      +

      Dataverse Network data also is compliant with Simple Dublin +Core (DC) requirements. For imports +only, Dataverse Network data is compliant with the Content Standard +for Digital Geospatial Metadata (CSDGM), Vers. 2 (FGDC-STD-001-1998) (FGDC).

      +

      Attached is a PDF file that defines and maps all Dataverse Network +Cataloging Information fields. Information provided in the file includes +the following:

      +
        +
      • Field label - For each Cataloging Information field, the field label +appears first in the mapping matrix.
      • +
      • Description - A description of each field follows the field label.
      • +
      • Query term - If a field is available for use in building a query, the +term to use for that field is listed.
      • +
      • Dataverse Network database element name - The Dataverse Network +database element name for the field is provided.
      • +
      • Advanced search - If a field is available for use in an advanced +search, that is indicated.
      • +
      • DDI element mapping for imports - For harvested or imported studies, +the imported DDI elements are mapped to Dataverse Network fields.
      • +
      • DDI element mapping for exports - When a study or dataverse is +harvested or exported in DDI format, the Dataverse Network fields are +mapped to DDI elements.
      • +
      • DC element mapping for imports - For harvested or imported studies, +the imported DC elements are mapped to specific Dataverse Network +fields.
      • +
      • DC element mapping for exports - When a study or dataverse is +harvested or exported in DC format, specific Dataverse Network fields +are mapped to the DC elements.
      • +
      • FGDC element mapping for imports - For harvested or imported studies, +the imported FGDC elements are mapped to specific Dataverse Network fields.
      • +
      +

      Also attached is an example of a DDI for a simple study containing +title, author, description, keyword, and topic classification cataloging +information fields suitable for use with batch import.

      +

      image9 +catalogingfields11apr08.pdf

      +

      image10 +simple_study.xml

      +
      +
      +

      Zelig Interface

      +

      Zelig is statistical software for everyone: researchers, instructors, +and students. It is a front-end and back-end for R (Zelig is written in +R). The Zellig software:

      +
        +
      • Unifies diverse theories of inference
      • +
      • Unifies different statistical models and notation
      • +
      • Unifies R packages in a common syntax
      • +
      +

      Zelig is distributed under the GNU General Public License, Version 2. +After installation, the source code is located in your R library +directory. You can download a tarball of the latest Zelig source code +from http://projects.iq.harvard.edu/zelig.

      +

      The Dataverse Network software uses Zelig to perform advanced +statistical analysis functions. The current interface schema used by the +Dataverse Network for Zelig processes is in the following location:

      +

      Criteria for Model Availability

      +

      Three factors determine which Zelig models are available for analysis in +the Dataverse Network:

      +
        +
      • Some new models require data structures and modeling parameters that +are not compatible with the current framework of the Dataverse Network +and other web-driven applications. These types of models are not +available in the Dataverse Network.
      • +
      • Models must be explicitly listed in the Zelig packages to be used in +the Dataverse Network, and all models must be disclosed fully, including +runtime errors. Zelig models that do not meet these specifications are +excluded from the Dataverse Network until they are disclosed with a +complete set of information.
      • +
      • An installation-based factor also can limit the Zelig models available +in the Dataverse Network. A minimum version of the core software package +GCC 4.0 must be installed on any Linux OS-based R machine used with the +Dataverse Network, to install and run a key Zelig package, MCMCpack. If +a Linux machine that is designated to R is used for DSB services and +does not have the minimum version of the GCC package installed, the +Dataverse Network looses at least eight models from the available +advanced analysis models.
      • +
      +

      image11 +configzeliggui.xml

      +
      +
      +
      + + +
      +
      +
      +
      + +
      +
      +
      + + + + + \ No newline at end of file diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/genindex.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/doc/guides/genindex.html Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,256 @@ + + + + + + + + + + + + Index — The Harvard Dataverse Network 3.6.1 documentation + + + + + + + + + + + + + + +
      +
      +
      + +
      +
      +
      + + +

      Index

      + +
      + _ + | B + | E + | G + | M + | N + | P + | S + +
      +

      _

      + + +
      + +
      + __main__ +
      + +
      + +
      module +
      + +
      +
      + +

      B

      + + +
      + +
      BNF +
      + +
      + +

      E

      + + +
      + +
      + execution +
      + +
      + +
      context +
      + +
      +
      + +

      G

      + + +
      + +
      grammar +
      + +
      + +

      M

      + + +
      + +
      + module +
      + +
      + +
      __main__ +
      + + +
      search path +
      + + +
      sys +
      + +
      +
      + +

      N

      + + +
      + +
      notation +
      + +
      + +

      P

      + + +
      + +
      + path +
      + +
      + +
      module search +
      + +
      +
      + +

      S

      + + + +
      + +
      + search +
      + +
      + +
      path, module +
      + +
      + +
      syntax +
      + +
      + +
      + sys +
      + +
      + +
      module +
      + +
      +
      + + + +
      +
      +
      +
      + +
      +
      +
      + + + + + \ No newline at end of file diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/index.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/doc/guides/index.html Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,413 @@ + + + + + + + + + + Dataverse Network Guides — The Harvard Dataverse Network 3.6.1 documentation + + + + + + + + + + + + + + + +
      +
      +
      + +
      +
      +
      + +
      +

      Dataverse Network Guides

      +

      Contents:

      +
      + +
      +
      +

      The execution context

      +
      +
      +
      +

      Indices and tables

      + +
      + + +
      +
      +
      +
      + +
      +
      +
      + + + + + \ No newline at end of file diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/objects.inv Binary file DVN-web/installer/dvninstall/doc/guides/objects.inv has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/search.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/doc/guides/search.html Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,128 @@ + + + + + + + + + + Search — The Harvard Dataverse Network 3.6.1 documentation + + + + + + + + + + + + + + + + + + +
      +
      +
      + +
      +
      +
      + +

      Search

      +
      + +

      + Please activate JavaScript to enable the search + functionality. +

      +
      +

      + From here you can search these documents. Enter your search + words into the box below and click "search". Note that the search + function will automatically search for all of the words. Pages + containing fewer words won't appear in the result list. +

      +
      + + + +
      + +
      + +
      + +
      +
      +
      +
      + +
      +
      +
      + + + + + \ No newline at end of file diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/doc/guides/searchindex.js --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/doc/guides/searchindex.js Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,1 @@ +Search.setIndex({envversion:42,terms:{prefil:2,show_bug:5,prefix:4,whose:2,educ:2,frequencyofdatacollect:2,networkdata:4,tweet:[2,4],editnetworkprivilegesservicebean:5,authus:0,under:[5,2,4],xmx512m:4,formatschema:0,digit:[],everi:[5,2,4],risk:[5,4],caffein:0,govern:2,affect:2,servlet:[],viewabl:2,four:[2,4],school:2,scholar:2,correct:[5,2,4],vector:[1,2],ecmerg:5,ifac:[],direct:[5,2,4],commerci:2,consequ:[1,2],second:[1,2],aggreg:[0,2],junit_4:[],even:[1,2,4],postgres_databas:4,neg:[1,2],asid:[5,2],listidentifi:2,conduct:2,"new":[],databank:[],ever:[2,4],fdbfe57:5,distributorcontactaffili:2,abov:[0,1,2,5,4],createdb:4,never:2,here:[0,1,2,5,4],met:2,"1tawps4rouqaiw":2,path:5,pleasur:[],metadatasearchresult:0,acceptor:4,reenabl:2,everyon:2,precis:[1,2],datetim:[2,4],permit:[2,4],parlanc:4,portabl:2,collectionmod:2,brought:2,unix:[5,1,2,4],txt:[0,2,4],seriesinform:2,unit:[0,2,4],describ:[0,5,2,4],would:[1,2,4],call:[2,4],recommend:[5,1,2,4],decentr:2,timeperiodcoveredend:2,type:[],tell:4,notif:[],aggressiveheap:4,geographicunit:2,notic:[1,2,4],warn:[5,2,4],hold:[0,5,2,4],unpack:[2,4],must:[5,1,2,4],join:2,work:[],introduc:4,topicclassvalu:2,root:[5,2,4],undirect:2,searchfieldbundl:5,overrid:[2,4],workbench_help:[],give:[1,2,4],indic:[],cautiou:5,captcha:4,dsun:4,unavail:[0,2],want:[5,2,4],hog:4,end:[2,4],prepopul:2,ordinari:2,vagrant:4,dsbingest:4,how:[],rhistoryfil:2,answer:[5,2,4],disappoint:5,config:4,catgri:2,updat:[5,2,4],partialexclud:0,recogn:[1,2],after:[5,2,4],wrong:[2,4],os_files_xxlarg:[],studyfil:0,parallel:5,demonstr:0,attempt:[0,1,2,5,4],fyxlawzria:2,credenti:[0,5],exclud:[0,2],receiv:[5,2],swordprofil:0,pg_dump:4,environ:[],reloc:2,enter:[2,4],exclus:[1,2],lambda:2,order:[],indeped:[],oper:[0,2,4],feedback:2,araxi:5,diagnos:4,offici:[],orang:2,becaus:[5,1,2,4],jpeg:0,privileg:2,japan:[1,2],flexibl:[2,4],vari:2,murrai:2,fit:[],mydv:2,fix:[5,2],quantit:[2,4],persist:[0,5,2,4],comprehens:2,directaccess:0,hidden:2,coffeemil:0,easier:[2,4],them:[5,2,4],thei:[5,1,2,4],proce:4,safe:[2,4],opendiff:5,"break":2,db_name:4,promis:[],interrupt:2,choic:[2,4],geographiccoverag:[0,2],dvn333:0,roast:0,each:[0,2,4],side:[2,4],bone:4,mean:[5,1,2,4],sample_onli:2,logo:2,extract:2,pgsq1:4,network:[],newli:[5,2,4],content:[0,3,2,5,4],rewrit:[],eprint:[],"6ceb24f":5,gov:2,dsb:[2,4],linear:2,navig:2,situat:2,strata:2,filemimetyp:0,standard:[0,1,2,4],nth:2,fixm:[],md5:4,filter:[2,4],regress:2,portal:[],confus:[1,2,4],licens:[0,2],indexserviceloc:5,rang:[2,4],independ:[1,2],rank:2,necess:4,restrict:[0,2],sword2rubi:0,unlik:[1,2],wasn:[1,2],authenti:0,agre:2,primari:[5,2],rewritten:4,meatadatasearchfield:0,top:[0,2,4],sometim:2,master:[],too:[1,2,4],similarli:[1,2,4],john:0,listen:4,consol:[5,2,4],circl:2,namespac:[0,4],tool:[0,5,2,4],thisfeatur:0,took:2,varformat:2,thedata:[2,4],crawl:[2,4],technic:[2,4],target:[2,4],keyword:[0,2],disableexplicitgc:4,provid:[0,1,2,5,4],tree:[],zero:[0,2],googleanalyt:[2,4],matter:[5,1,2],minut:[5,2,4],isreferencedbi:0,boston:[1,2],modern:4,mind:[1,2],udp:4,raw:2,inherit:2,keywordvocabulari:2,seem:2,incompat:2,minu:[],"3ccahxgwy7kwptqg6m9m4":[],recreat:2,icpsr:[0,2],inexpens:2,latter:[1,2],transmit:0,simplifi:[2,4],perl_lib:[],though:[1,2],usernam:[0,5,2,4],object:[],regular:[5,4],specifi:[0,2,4],letter:2,filedownloadinfo:0,keystrok:2,simplic:2,doi:[0,2,4],don:[],doc:[0,4],pgadmin:4,doe:[],bracket:2,tech:[],wildcard:2,unchang:[5,4],section:[],came:[],pkp:[],visitor:2,containth:2,syntax:2,radio:2,protocol:[0,2,4],involv:2,absolut:4,exactli:[2,4],acquir:[5,4],menu:[2,4],explain:[2,4],apach:[],theme:2,busi:[2,4],folder:[],axiom:[],stop:4,compli:2,coast:[1,2],report:[2,4],recalcul:2,net:[0,5,2,4],devguid:[],bar:2,emb:2,method:[0,2],cleanli:5,septemb:[],fieldnam:0,studyui:5,elimin:2,roadmap:[],num:2,mandatori:[0,2],result:[],respons:[0,2,4],fail:[5,2,4],relatedpubl:2,best:[2,4],subject:[0,2],awar:[1,2],hopefulli:4,wikipedia:[],hmdc:[5,2,4],simplest:2,awai:[2,4],approach:2,xvf:4,accord:5,datacit:0,manpag:0,extens:2,harvard:[5,2,4],protect:[],biblcit:0,howev:[5,2,4],against:[1,2],facet:[5,2],unc:[],unf:[1,2],logic:[1,2],countri:2,login:[],seri:2,com:[0,5,2,4],fileformat:0,dataversehasbeenreleas:0,imagethumb:0,foobar:0,ulimit:4,height:2,googleanalyticstrackingcod:4,diff:5,trust:[5,4],assum:[5,2],summar:[1,2],speak:[2,4],haxx:0,requestor:2,chrome:2,three:[5,2],github:[],listsect:2,much:[5,2,4],interest:[0,2,4],basic:[0,2,4],deviationsfromsampledesign:2,quickli:2,life:0,vimdiff:5,mod_mbox:[],adminadmin:4,ani:[0,1,2,5,4],child:[5,2],spin:4,emploi:2,dspace:[],ident:[5,1,2,4],forum:[1,2],gnu:2,properti:[5,2,4],sourceforg:[5,4],calcul:2,publicli:2,vagu:2,privatekei:4,bmckinnei:[],indexmessag:4,timefram:2,tabl:[],conf:[5,4],sever:[5,2],n2t:4,studyrelpubl:0,prior:2,perform:[1,2,4],suggest:[2,4],complex:[1,2,4],split:4,splu:2,complet:[0,5,2,4],blue:2,hand:2,fairli:[1,2,4],requestprocess:4,refin:2,kib:5,dsbqueueconnectionfactori:4,scenario:[0,2,4],thu:2,postgresql84:4,domainnam:4,bugzilla:5,contact:[],pwrd:4,thi:[0,1,2,5,4],dvownerid:2,everyth:4,left:[2,4],kindofdata:[0,2],identifi:[],just:[5,1,2,4],laptop:4,ordin:2,human:[1,2,4],kdiff3:5,yet:[0,1,2,5,4],languag:0,previous:2,onload:2,easi:[2,4],mix:2,had:[2,4],extact:2,fortran:2,spread:4,collections:2,els:5,ffffff:2,save:[5,2,4],explanatori:[1,2],opt:2,applic:[],dvnoai:4,setateln9ubu2:[],preserv:[1,2,4],disposit:0,javaserver2:[],background:2,sampleurl:2,measur:[2,4],daemon:[2,4],specif:[0,2,4],arbitrari:2,reassign:2,greenwitch:[1,2],grizzli:[],www:[0,5,2,4],right:[0,5,2,4],old:[2,4],deal:2,wt2517:2,interv:4,somehow:2,swordv2collectionservlet:[],intern:[2,4],partialinclud:0,successfulli:5,total:5,bottom:2,formatavail:0,ear:4,condit:[0,2],gsfc:2,localhost:[5,4],core:[],plu:2,sfu:[],studynotesubject:2,widget:2,insecur:[],promot:2,peer:2,peet:0,chapter:[5,4],postgresql:[],slightli:2,unfortun:[],unsav:2,accessgr:0,commit:[],produc:[0,1,2,4],asquickstartup:4,"float":[1,2],encod:[1,2],bound:2,down:[5,2,4],creativ:0,"519cd8c":5,wrap:2,opportun:2,storag:[2,4],javax:4,git:[],suffici:[5,2,4],support:[],"class":[],avail:[],width:2,reli:[1,2],fraction:2,jane:0,war:5,lowest:2,head:[5,2],form:[0,1,2,4],forc:[],pagerank:2,dvn_server:0,formatnam:0,"true":[0,2,4],freenod:[],reset:2,attr:[1,2],createus:4,geospati:2,indexservicebean:5,unmerg:5,maximum:[2,4],until:[5,2],absenc:[1,2],fundament:2,later:[2,4],classic:2,ddoi:4,"abstract":[0,2],distributiond:2,unreleas:2,diagnost:0,exist:[0,1,2,5,4],baseurlstr:4,encrypt:[5,4],tkdiff:5,when:[0,1,2,5,4],actor:2,role:[],test:0,roll:2,timemethod:2,univari:2,node:2,intend:[5,2,4],irclog:[],tzone:[1,2],asterisk:2,intens:2,intent:2,consid:[],sql:4,femal:[1,2],longer:[1,2],furthermor:[2,4],anywher:2,studylevelerrornot:2,dateofcollectionend:2,idno:0,ignor:2,datafram:[1,2],time:[],daili:2,"1st":[1,2],osx:[],mydomain:2,concept:[1,2],queueconnectionfactori:4,skip:2,global:[0,2,4],snip:5,studycomplet:2,menubar:2,hierarch:2,decid:[2,4],middl:2,depend:[5,2,4],system:[],zone:[1,2],graph:2,proxim:2,umich:[0,2],"2b88b68":5,sourc:[],string:[],embark:4,cook:4,word:[5,2],brows:2,intvu4:2,foo:2,contenttyp:0,hour:[2,4],administr:[],level:[0,1,2,4],did:4,gui:4,item:2,team:[5,2,4],div:2,databasenam:4,metsdspacesip:[],prevent:[2,4],brave:4,sign:[5,4],patient:4,port:4,bold:[5,4],appear:[5,2,4],samplingerrorestim:2,placeofaccess:2,uniform:2,current:[],rephras:[1,2],domain1:4,meld:5,portnumb:4,deriv:4,zelig:[],gener:[],agreement:2,satisfi:2,modif:[2,4],address:[5,2,4],along:2,redmin:[5,2],wait:2,box:[],dublic:[],consumerkei:4,invit:2,dublin:[],unport:0,queue:4,simplezip:0,poisson:2,extrem:2,commonli:2,elect:2,extra:[0,5,4],modul:[3,4],glassfish3:4,instal:[],should:[5,1,2,4],regex:2,memori:4,catalogingfields11apr08:2,univers:[0,2,4],visit:[0,4],subvers:[],everybodi:5,criteria:2,scope:[0,2],checkout:5,minim:[2,4],admcredfil:4,peopl:[5,2],timeperiodcoveredstart:2,appendix:[],examin:2,effort:[1,2],behalf:2,dvndbpool:4,graphic:[2,4],local:[0,5,2,4],prepar:[5,2],uniqu:[1,2,4],descriptor:4,can:[0,1,2,5,4],tabul:[2,4],whiteboardofswordv2api:[],purpos:[0,2],stream:0,predict:[1,2],handle_technical_manu:4,topic:2,heard:4,critic:4,agenc:[0,2],occur:2,alwai:[1,2],multipl:[5,1,2,4],write:[0,5,2,4],anyon:[0,4],xhtml:[5,2,4],purl:0,map:[],product:[0,2,4],max:[2,4],membership:2,mai:[0,1,2,5,4],grow:2,goal:2,oai_dc:0,practic:[1,2,4],relatedmateri:[0,2],divid:2,favorit:4,bean:0,programmat:[0,2],"switch":[],combin:2,gamma:2,subcollect:2,talk:[5,4],dvnoaiplugin:[2,4],approv:2,tablular:2,entitl:2,still:[5,1,2,4],dynam:2,entiti:2,p4merg:5,disconnect:2,monitor:[2,4],polici:[2,4],platform:[5,1,2,4],window:[2,4],sample_only_http:2,mail:[],main:[5,2,4],metadatasearchablefield:0,financi:0,initi:[2,4],nation:2,interview:2,therebi:2,verifi:[5,2,4],now:[0,1,2,5,4],discuss:[1,2,4],nor:2,introduct:[0,4],term:[],csv:[],name:[],perspect:0,didn:4,separ:[5,2,4],januari:4,compil:2,domain:[2,4],javas:4,citat:[0,2],individu:[],receipt:[],continu:[5,2,4],significantli:4,year:2,distributor:2,happen:2,canada:0,subnet:4,shown:2,accomplish:2,"3rd":4,space:[2,4],faculti:[],ejb:[],profil:[0,2],factori:4,integr:2,earlier:[2,4],migrat:2,million:2,dataverse_network_integr:[],theori:2,enterprisedb:[],org:[0,5,2,4],"byte":[0,5,2,4],card:[],care:[2,4],peform:2,weibul:2,recod:2,version2:0,frequenc:2,dvninstall_v3_4:5,refus:2,dvninstall_v3_0:4,thing:[],place:[5,2],djhove:4,imposs:[1,2],frequent:4,first:[],origin:[5,1,2],directli:[0,2,4],distributorcontactemail:2,onc:[5,2,4],beverag:0,fast:5,oppos:2,custom:[2,4],open:[],predefin:2,size:[2,4],given:[0,1,2,4],mergetool:5,sheet:2,silent:[2,4],convent:2,virgin:4,availabilitystatu:2,draft:2,averag:[2,4],fgdc:2,confidentialitydeclar:2,conveni:2,cite:2,copi:[],artifact:[5,2],csdgm:2,logist:2,than:[2,4],png:0,serv:[0,2,4],jhove:2,balanc:4,lucenc:2,were:[5,1,2,4],posit:[1,2],browser:[5,2,4],pre:[5,4],fork:[5,4],sai:5,san:[1,2],nicer:5,openarch:0,pro:[1,2],svnroot:4,argument:0,dash:[],sav:2,producernam:2,engin:2,squar:2,destroi:[2,4],consortium:2,libpath:4,note:[0,1,2,5,4],ideal:5,maintain:[2,4],take:[5,2,4],advis:2,noth:[5,2],begin:[2,4],sure:[5,1,2,4],controloper:2,normal:[5,1,2,4],track:[],studynotetext:2,compress:5,beta:2,varqnti:2,abus:[2,4],pair:[2,4],america:2,homepag:[5,2,4],icon:[2,4],renam:[5,2],oaihandl:[2,4],adopt:5,drive:2,quantiti:2,iqss:[5,2,4],runtim:2,axi:2,subtab:2,shop:0,heap:4,show:[5,2,4],delta:5,permiss:[],configzeliggui:2,corner:2,help:[5,2,4],explicitli:[1,2],ratio:2,transact:[2,4],activ:[5,2,4],enough:4,analyz:2,nvie:5,analys:2,over:[0,5,2,4],carolina:[],variou:[5,2],get:[],installmodul:4,cannot:[5,1,2],study2:0,requir:[],thumbnail:[0,4],mediat:[],where:[0,1,2,5,4],summari:[1,2],wiki:[2,4],samplingprocedur:2,advsearchpag:5,rmi:4,detect:2,review:[1,2],enumer:[0,2],label:[],behind:[2,4],listinfo:[],between:2,"import":[5,1,2,4],paramet:[0,2,4],across:[2,4],parent:[5,2],screen:2,"192m":4,come:[2,4],distributorcontact:2,pertain:2,job:[1,2,4],audienc:5,improv:[2,4],inat:[],among:2,reindex:2,jdbc4:4,rescal:2,datvers:2,por:[],dir:4,pop:2,cancel:2,coupl:[1,2],marc:2,west:[1,2],mark:[5,2,4],listen_address:4,reflex:5,fedoraproject:4,addfil:2,resolut:[5,2],rubi:0,vartyp:2,workspac:2,i18n:[],those:[1,2,4],"case":[5,1,2,4],interoper:[0,2],thesi:2,keygen:5,testus:0,hdl:[0,2,4],henc:4,sameus:[5,4],chkconfig:4,xyxz:0,develop:[],dvnapi_v1_0:[],media:0,same:[0,1,2,5,4],epoch:[1,2],html:[0,5,2,4],"6_31":4,eventu:5,week:2,ifram:2,finish:[5,2,4],screenshot:[],leonid:0,nest:2,assist:2,driver:4,someon:5,driven:[2,4],capabl:2,mani:[5,2,4],extern:[0,2],appropri:[5,2],selectsupport:0,markup:2,without:[0,1,2,4],zipfile_:2,scholarship:[],execut:[],dateofdeposit:2,rest:4,weekli:2,b7fae01:5,svr_1:4,touch:2,passphras:[5,4],speed:2,versu:2,death:2,specialpermiss:2,except:[2,4],param:0,blog:[],color:2,swordv2mediaresourceservlet:[],blob:5,netapp:4,real:[0,1,2,4],hover:5,around:[5,2],read:[],period:2,traffic:4,batchdownload:4,world:2,pst:[1,2],javaclient2:0,integ:[],swordapp:0,benefit:4,either:[1,2,4],manag:[],yyyi:[0,1,2,4],authent:[0,5,2,4],constitut:2,deselect:2,slice:2,confirm:2,definit:[5,2],achiev:2,ddi:[],exit:[2,4],testiqss:2,complic:[5,4],decent:[1,2],undertak:4,power:2,fulli:[5,2,4],docroot:4,"throw":4,src:[5,2,4],swordappv2:0,central:2,firstnam:0,spss:[],degre:2,eigenvalu:2,lucen:[],act:2,studylist:5,processor:4,mulitnomi:2,gari:2,terminolog:[1,2],unregist:2,disablecustom:2,quietli:[0,1,2],studylistingpag:5,yyi:4,your:[0,5,2,4],log:[5,2,4],her:[2,4],area:2,overwrit:2,start:[0,5,2,4],"3b82f88":5,interfac:[],low:2,lot:2,strictli:4,machin:[2,4],svn:4,verb:0,xms512m:4,bundl:[],regard:[5,2],diffus:5,categor:[1,2],facetresultui:5,faster:4,extend:[],pull:[],manifestpag:[2,4],netbeansproject:5,possibl:[1,2,4],"default":[],insert:[5,2,4],unusu:2,prerequisist:4,embed:[0,2],connect:[5,1,2,4],sword_:[],dcterm:0,certain:2,censor:2,deep:[],strongli:[0,5,2,4],deem:2,dvn_url:2,proport:2,fill:[5,2,4],incorrect:2,again:[5,2,4],googl:[],upcom:[5,2],carriag:2,field:[],binomi:2,valid:[0,2,4],you:[],db_usernam:4,symbol:2,vertex:2,searchhit:0,dropbox:2,pool:4,reduc:2,unselect:2,descript:[],sword2:0,potenti:[0,1,2,4],latestversionst:0,cpu:4,cdlib:4,represent:[1,2],all:[0,1,2,5,4],dist:4,consider:[0,2],intrvl:2,lack:[1,2],dollar:2,connectionattribut:4,month:2,abil:[2,4],follow:[0,5,2,4],disk:[5,2,4],white:2,articl:[0,5,2],init:4,queri:[0,2,4],timerserv:4,macosx:[5,4],straightforward:[5,1,2,4],subsett:[1,2,4],far:[],faq:[],util:[],sparingli:2,candid:5,mechan:[1,2],tmprwsfile:2,failur:2,veri:[5,1,2,4],ticket:[5,2],prettypictur:0,consumersecret:4,trench:[],adjust:[5,1,2,4],wst:[],user_nam:4,enterpris:4,neighborhood:2,ten:2,sync:2,past:[2,4],rate:2,design:2,nbm:[],pass:[2,4],further:2,sub:2,sun:[5,4],sum:2,abl:[5,2,4],brief:2,dcom:4,abbrevi:2,version:[],xom:[],"public":[0,5,2,4],contrast:[1,2],essenc:5,hasn:2,full:[0,5,2,4],themselv:2,variat:2,nb6:[],infin:[1,2],free:[1,2],trunk:4,ver:2,modifi:[0,1,2,5,4],legend:2,valu:[],search:[],ahead:5,addfilespag:2,amount:[2,4],vdcnetwork:4,probit:2,social:2,action:[2,4],narrow:2,via:[],curat:2,transit:4,vim:5,filenam:[0,2,4],href:2,inappropri:2,famili:2,establish:[2,4],select:[],versionnumb:5,distinct:[1,2],regist:[2,4],two:[5,2],coverag:[0,2],morn:2,basi:2,toggl:2,more:[0,1,2,5,4],desir:[0,5,2],flag:2,particular:[5,2,4],known:[5,2,4],compani:0,cach:[5,4],psql:[5,4],none:[0,1,2],jdk:[5,4],hous:2,outlin:[1,2,4],dev:[],histori:2,buildupd:5,remain:[2,4],paragraph:4,"28protocol":[],male:[1,2],pgdriver:4,"10ece42ec9236ccd2e58eea2e69c7b54fc783133":[],prompt:[5,2,4],challeng:4,registr:[2,4],accept:[5,2,4],minimum:[2,4],phrase:4,uncheck:2,cours:[1,2,4],xxxx:4,newlin:2,secur:[],rather:[2,4],anoth:[1,2,4],perhap:[2,4],snippet:2,reject:[1,2],characteristicofsourc:2,css:2,dvn_subpag:2,resourc:[],overcommit:4,facetui:5,inquiri:2,"600px":2,associ:[2,4],"short":[5,2],footer:2,overnight:5,ambigu:[1,2],caus:2,callback:4,alphabet:2,cleaningoper:2,checkbox:2,rotat:4,notifymailsess:4,octob:5,paper:2,through:[2,4],unitofanalysi:2,hierarchi:2,dont:2,style:[],exact:[1,2],bypass:2,might:[5,2],alter:[2,4],good:2,"return":[0,5,2,4],sentenc:2,largest:2,framework:2,ecolog:2,intvu2:2,jdbc30datasourc:4,intvu3:2,citationrequir:2,troubleshoot:2,intvu1:2,instruct:[5,2,4],refresh:2,infer:2,easili:[2,4],token:4,compris:2,found:[0,2,4],intervent:2,id_rsa:5,truncat:2,england:0,weight:2,hard:4,idea:2,realli:[5,2,4],expect:[0,1,2,5,4],twtter:2,todo:2,event:[2,4],closest:2,datakind:0,basicsearchfrag:5,fielddescript:0,publish:[],research:[0,1,2],print:[2,4],qualifi:[],postgr:[],proxi:2,advanc:[2,4],pub:5,labl:2,reason:[5,2,4],base:[],ask:[5,2,4],earliest:2,workstat:4,"40mail":[],thread:[5,4],script:[],american:2,perman:[0,2,4],undergo:2,assign:[1,2,4],filedscr:2,feed:0,major:2,notifi:2,your_usernam:5,lastnam:0,placehold:4,dvn_client:0,done:[5,1,2,4],least:[5,2,4],blank:[5,2,4],stabl:4,miss:[1,2],stage:[5,4],differ:[1,2,4],exponenti:2,geogcov:0,interact:[2,4],unrestrict:2,construct:2,otherrefer:2,authornam:[0,2],esac:4,scheme:2,store:[5,1,2],dichotom:2,adher:2,otherid:[0,2],xmln:[0,2],option:[],relationship:2,selector:2,useparalleloldgc:4,part:[0,5,2,4],pars:2,consult:[2,4],webadmin:[],std:2,king:2,kind:[0,2],protocoloperations_editingcontent_metadata:0,whenev:5,remot:[],remov:[5,2],authmethod:0,jqueri:2,overcommit_memori:4,reus:5,arrang:4,schema:[0,2],set_dvn_url:2,comput:[1,2],beforehand:2,packag:[0,1,2,5,4],dedic:[1,2,4],"null":2,built:[0,5,4],lib:[5,4],self:2,also:[0,1,2,5,4],authoraffili:2,build:[],distribut:[5,2,4],exec:[2,4],unsur:2,previou:[5,2],reach:2,chart:2,jst:[1,2],most:[0,1,2,5,4],mcmcpack:2,plan:2,maco:4,alpha:2,charg:2,ddvn:[2,4],exo:2,clear:[0,2,4],cover:2,ext:[],clean:5,repli:4,subgroup:2,awesom:4,think:[2,4],somewhat:2,fink:4,particularli:2,cdr:[],astrophys:2,fine:5,find:[],firewal:[2,4],bibliographiccit:0,solut:[1,2],"_hmpgmainmessag":2,leisur:4,factor:[],hit:[5,4],unus:0,express:0,nativ:2,mainten:[2,4],banner:2,restart:[5,2,4],datadscr:[0,2],formattyp:0,common:[],hype:2,dta:2,certif:[],set:[],art:[],timevalu:[1,2],creator:[0,2],see:[0,1,2,5,4],bare:4,arg:0,reserv:[1,2],someth:2,particip:4,won:[5,2],"800px":2,nontrivi:2,experi:[5,2,4],xss128l:4,altern:[0,2],signatur:[1,2],popup:2,responser:2,numer:[1,2,4],javascript:2,isol:2,complementari:2,distinguish:2,classnam:4,popul:[2,4],disclaim:2,j2ee:4,toward:[5,2],last:[5,2],delimit:2,alon:2,bonacich:2,context:[],pdf:[2,4],accessrestrict:0,srdpe:4,load:4,accesspermiss:0,simpli:[5,2,4],point:[5,1,2,4],schedul:[],neatli:[],header:[0,2],"724px":2,"100mb":4,suppli:[0,1,2,5,4],throughout:4,backend:2,identif:2,vertic:2,framebord:2,due:2,empti:0,whom:2,secret:4,screener:2,strategi:5,files:0,adventur:4,imag:[0,5,2,4],convert:[1,2,4],unnecessarili:[5,4],fund:2,understand:[1,2,4],demand:2,instructor:2,creativecommon:0,look:[0,5,2,4],straight:4,histor:[],durat:2,pace:4,"while":[5,1,2,4],unifi:2,behavior:[1,2],error:[],anonym:2,mbox:[],loos:2,manner:[1,2],readi:[5,2],readm:5,jpg:[0,2],itself:[5,2],cento:4,fedora:[],grant:2,belong:[0,2],cert:[],conflict:[5,2],higher:[2,4],dgc:4,howto:5,formatmim:0,moment:2,temporari:4,user:[],repopul:2,oerpub:[],chang:[],recent:[5,2],lower:2,task:[],equival:[2,4],older:[5,4],searchabl:[0,2],parenthes:2,pdurbin:5,person:[0,2,4],reflect:4,academ:[1,2],propos:2,explan:4,dataverse_alia:0,collabor:2,administ:2,regardless:[1,2],tech_manu:4,relatedstudi:2,timevar:[],entrynam:4,ee6:[],wai:[5,1,2,4],apr:[],fundingag:2,input:2,subsequ:2,bin:[5,4],complaint:4,vendor:4,obsolet:[],format:[],intuit:[1,2],indexmessagefactori:4,bia:2,datasourc:[0,2,4],dvdc:4,accessservicessupport:0,outright:2,success:[5,2],xxdiff:5,resolv:[5,2],manifest:[2,4],collect:[],popular:[1,2,4],encount:4,studynotetyp:2,often:[1,2],acknowledg:2,visibl:2,some:[0,1,2,5,4],back:[5,2,4],understood:2,"30k":4,sampl:[],mirror:4,affili:2,scale:4,per:[0,5,2],recognit:2,substitut:2,larg:[2,4],slash:2,inetaddress:4,proc:4,cgi:5,myset:2,run:[],thefiledownloadinfo:0,proddat:0,step:[],dataversenotifi:4,subtract:2,thedataorg:2,vdcnetd:4,row:[2,4],materi:[0,2,4],datacollectionsitu:2,idl:2,dialog:5,zellig:2,block:2,file1:2,emphasi:4,primarili:4,within:[2,4],contributor:[0,2],announc:2,span:2,spam:[2,4],question:[2,4],submit:2,biom:[],adjac:2,includ:[0,5,2,4],suit:[2,4],forward:[5,2],xpath:0,properli:[1,2,4],repeatedli:2,subgraph:2,link:[],translat:2,newer:4,searchablefield:0,line:[5,2,4],murphi:5,info:[0,5,2],utc:[1,2],utf:2,consist:[5,2,4],caseid:2,highlight:5,similar:[2,4],impl:[],parser:[],accesstosourc:2,doesn:[1,2],repres:[0,2],datacollector:2,incomplet:2,choosedataverseforcreatestudi:5,singli:2,sequenti:2,nan:[1,2],invalid:2,servicenam:0,nat:2,nice:5,usert:4,gigabyt:[2,4],studyid:[0,2],elsewher:2,meaning:[1,2],posixct:[1,2],keywordvalu:2,msg00331:[],originofsourc:2,abdera:[],algorithm:[1,2],depth:[1,2],nasa:2,dot:2,fresh:4,scroll:2,adminstr:2,code:[],partial:[0,2],edg:2,scratch:5,maxperms:4,edu:[0,5,2,4],privat:[5,2,4],handleregistr:4,sensit:2,documen:0,dvndb:4,friendli:2,send:[2,4],macport:4,sens:[2,4],sent:2,deactiv:2,unzip:[2,4],disclos:2,distributornam:2,electron:2,volum:[5,4],implicitli:[1,2],nextwork:2,relev:2,tri:4,gender:2,button:2,geograph:[0,2],fewer:2,race:2,pleas:[0,5,2,4],smaller:2,natur:[0,2,4],pleae:2,click:[5,2,4],append:[1,2],compat:[2,4],index:[0,3,2,5,4],compar:[],cell:2,productiond:[0,2],whatev:4,perimet:2,poverti:2,chose:[5,2],bodi:2,let:2,networkadmin:[5,2],sinc:[5,1,2,4],great:4,survei:[0,2],convers:2,dvnextra:4,larger:2,dvnapp:4,depositorrequir:2,swordv2:[],typic:[5,2],chanc:4,firefox:2,holdingsuri:0,revok:2,appli:[2,4],app:[0,2,4],disciplin:2,studyfilesfrag:5,"boolean":[],regexp:2,home:[0,2,4],pgdownload:[],myvalu:2,acceptpackag:[],from:[],objectid:0,resultswithfacet:5,upgrad:[],next:[],websit:[0,2,4],few:2,usr:4,cran:4,simpler:[],servicedesc:0,tempsubsetfil:2,um06qkr:2,mismatch:2,actual:[1,2,4],confi:4,harvest:[],alik:4,alia:[2,4],datasrc:0,meet:2,fetch:[5,2],control:[],reveiw:2,tar:[2,4],process:[0,5,2,4],lock:[0,2],sudo:5,"1xxzy":0,proprietari:2,tab:[],hulmail:[],tarbal:2,onlin:[2,4],default_transaction_read_onli:4,gcc:2,sit:2,citationfil:2,dtwitter4j:4,subdirectori:2,instead:[5,1,2,4],sucess:2,await:[1,2],originalarch:2,redund:2,physic:4,essenti:2,seriou:[],counter:[1,2],arrow:2,element:[],issu:[5,1,2,4],allot:4,allow:[0,1,2,5,4],move:[5,2,4],microsystem:4,own:[0,1,2,5,4],comma:2,outer:2,chosen:2,byt:[],statusnotic:4,infrastructur:2,tsv:[],therefor:2,greater:2,filetxt:2,intvu4a:2,dai:2,auth:[2,4],devel:4,stdydscr:0,flash:2,facilit:[2,4],mediashelf:[],fingerprint:[5,2],front:2,nbinst:[],anyth:[0,2,4],edit:[],catvalu:2,pgpoolingdatasourc:4,astronom:2,februari:4,mode:[],dvninstal:[5,4],batch:2,querystr:0,francisco:[1,2],subset:[],consum:4,searchqueri:0,"static":2,our:[0,5,2,4],patch:4,malformedstreamexcept:[],special:[2,4],indexu:4,variabl:[1,2,4],matrix:2,servicearg:0,influenc:2,misidentifi:2,categori:2,suitabl:[5,2],rel:[2,4],lockss:[],hardwar:4,red:2,statist:[],insid:[2,4],workflow:[0,2,4],manipul:[2,4],undo:2,standalon:[2,4],scrnrid:2,rdata:[],afterward:2,complianc:2,dvnqueri:5,guarante:[1,2,4],could:2,latest:[5,2,4],mac:[5,4],timer:[2,4],keep:[5,1,2,4],length:[0,2,4],enforc:2,outsid:[2,4],pg_hba:[5,4],timezon:[1,2],gvimdiff:5,softwar:[2,4],suffix:4,echo:4,date:[0,1,2,5],owner:[2,4],technetwork:4,suffic:2,publickei:4,"long":[5,1,2,4],workbench:[],respond:2,sep:[1,2],mkdir:[],compliant:2,messag:[0,5,2,4],attach:[1,2],termin:[5,2],"final":[5,2,4],tortoisemerg:5,shell:4,gear:2,appdeploi:[],methodolog:2,rsa:5,jndi:4,accompani:2,nobodi:4,excludesupport:0,abstractd:2,structur:[2,4],charact:[],instanceroot:4,becom:[1,2,4],light:2,f10:2,f11:[],datapass:2,explicit:[1,2],clearli:4,robot:4,correspond:[2,4],sysadmin:2,corrupt:2,have:[],close:[5,2],need:[],turn:4,codebook:[0,2],min:2,mif:2,documenta:2,discret:2,which:[0,1,2,5,4],vertici:2,divers:2,singl:[0,2,4],fileid:[0,2],analysi:[],unless:[1,2,4],deploy:5,who:[5,2,4],oracl:[5,4],discov:2,eight:2,why:[5,2,4],gather:2,request:[0,2,4],uri:0,face:[2,4],inde:2,deni:[0,2],yum:4,brew:0,occasion:2,fact:[1,2,4],text:[1,2,4],bring:[2,4],anywai:5,varnam:2,redirect:[2,4],inlin:2,locat:[0,2,4],jar:[5,2,4],dispers:2,smallest:2,suppos:2,initdb:4,discontinu:5,hope:5,meant:[2,4],contribut:[5,2],notat:[0,1,2,4],regularli:2,msg00317:[],increas:[2,4],solr:5,organ:[],upper:2,stanford:2,stuff:2,she:4,actionstominimizeloss:2,contain:[],attribut:[0,1,2],cottagelab:[],view:[],conform:0,btw:[],legaci:2,frame:[1,2],knowledg:2,temporarili:2,multipart:[],dyadic:2,cold:0,gmail:2,closer:[2,4],allowlink:4,record:[0,2,4],pattern:[],boundari:2,state:[],"8fd223d":5,stata:[],censu:[0,2],progress:[0,2],neither:2,email:[2,4],kei:[5,2,4],handleprefix:4,entir:[2,4],addit:[0,1,2,4],restrctn:0,plugin:[],admin:[0,5,2,4],april:[],instanc:[1,2,4],caseqnti:2,initit:2,nbproject:[],guidelin:2,hyphen:2,arriv:2,chmod:4,walk:2,respect:4,rpm:4,mailto:2,quit:[1,2,4],ezid:4,addition:2,djava:4,compos:2,compon:[],besid:2,treat:[1,2],immedi:[2,4],bit:4,both:[0,1,2,5,4],subset_:2,decim:2,togeth:[2,4],subtitl:2,present:[1,2,4],replic:[2,4],multi:[2,4],plain:2,cursor:2,defin:[0,1,2,4],observ:[1,2],purchas:2,customiz:2,almost:2,demo:4,archiv:[0,2,4],incom:2,surprisingli:4,scienc:2,substant:2,welcom:2,parti:[2,4],began:2,cross:2,member:2,auto:5,nada:2,difficult:[1,2],http:[],hostnam:4,denot:2,upon:2,effect:[5,2],coffe:0,student:2,php:0,expand:2,referencedata:4,off:[5,2,4],center:[2,4],mention:4,builder:2,well:[2,4],command:[0,2,4],gcinterv:4,choos:[5,2,4],usual:2,newest:5,less:4,boot:4,obtain:[5,2,4],tcp:4,clinic:0,virtual:4,increasingli:[1,2],simple_studi:2,skill:4,simultan:2,demograph:2,abstracttext:2,book:0,warrant:[1,2],match:[5,2],gmt:[1,2],rememb:[5,2],crud:[],smithsonian:2,five:[],know:[5,2,4],press:[0,2],password:[0,5,2,4],recurs:5,mypag:2,institut:[0,2,4],resid:2,like:[0,1,2,5,4],lost:[],researchinstru:2,necessari:[2,4],servernam:4,xsd:[0,2],resiz:4,page:[5,3,2,4],crawler:2,dateofcollect:2,drop:[0,5,2,4],captur:2,twitter:[],linux:[2,4],collectionpolici:0,contin:2,"export":[],swordpoc:0,proper:[0,1,2],small:[2,4],seriesnam:2,librari:[],est:[1,2],admpriv:4,lead:[2,4],avoid:[1,2],octet:2,overlap:2,setti:2,nesstar:2,leav:[5,2,4],encourag:[0,5,2],investig:[5,2],globalid:[2,4],journal:[0,4],usag:[2,4],maven:5,host:[2,4],dissert:[],although:4,offset:[1,2],panel:2,about:[5,2,4],rare:2,socket:[5,4],column:[1,2],commons_log:[],ingest:[],lifecycl:[],fals:[0,2,4],discard:5,addendum:[],disabl:2,desper:4,seamlessli:2,automat:[5,2,4],dataset:[0,2],"_length":2,"2bxwoapwxzmlme1m3rg":[],stumptown:0,mere:2,merg:[],appl:5,"var":[2,4],"function":[5,2,4],otherdataapprais:2,north:2,brand:[],fileupload:[],baseurl:4,uninstal:[5,4],relmat:0,oauth:4,highest:2,bug:2,count:[5,1,2,4],depositor:2,succe:4,made:[5,1,2,4],temp:4,dmg:5,whether:[2,4],wish:[2,4],troubl:[],asynchron:2,fits_standard:2,below:[0,5,2,4],meta:2,limit:[],invest:2,otherwis:[1,2,4],problem:[5,2,4],dimensn:2,epel:4,evalu:0,dure:[5,2,4],graphml:2,probabl:[5,2,4],migtht:[],percent:2,detail:[0,5,2,4],oai:[],other:[],futur:[1,2,4],branch:[],scholarli:[],bc3:5,repeat:2,studyvers:2,stai:4,topicclassvocabulari:2,experienc:4,amp:0,portion:2,emerg:5,sincer:4,rep:2,accessservic:0},objtypes:{},objnames:{},filenames:["dataverse-api-main","dataverse-R-ingest","dataverse-user-main","index","dataverse-installer-main","dataverse-developer-main"],titles:["APIs Guide","Ingest of R (.RData) files","User Guide","Dataverse Network Guides","Installers Guide","DVN Developers Guide"],objects:{},titleterms:{lab:[],code:[0,2],identifi:4,ant:[],execut:3,blocker:4,session:4,ejb:4,permiss:2,file:[0,1,2,4],asadmin:4,find:2,xml:0,web:2,onli:[0,4],layout:2,fit:2,field:2,copi:5,configur:[5,4],readabl:[],written:[],common:2,add:[0,2],glassfish:[5,4],digit:4,jvm:4,factor:[1,2],mail:2,analyt:4,spec:[],applic:5,non:0,sourc:5,string:[1,2],format:[1,2],read:4,recaptcha:4,autom:[],repo:5,netbean:5,ssh:5,consid:2,studi:[0,2],requir:[1,2,4],name:2,term:2,document:0,metadataformatsavail:0,enabl:[2,4],administr:2,edit:2,list:[0,5,2],upload:2,integ:[1,2],server:5,"try":[],collect:2,api:[0,2],mode:4,datavers:[0,3,2],contain:4,quick:4,output:[],subnetwork:2,refer:[0,2],manag:2,view:2,subset:2,set:[5,2,4],replac:0,twitter:[2,4],creation:2,startup:4,keypair:5,video:[],result:2,download:[0,5,2],run:[5,4],charact:[1,2],servic:[0,4],librari:0,out:5,what:4,network:[0,3,2],compar:[1,2],databas:[0,5],section:2,http:4,publish:2,label:2,access:[0,2,4],delet:0,ddi:[0,2],version:[5,2],sword:0,stata:[1,2],"new":[5,1,2],crosswalk:0,merg:5,metadata:[0,2],core:0,qualifi:0,postgr:[],imagemagick:4,extend:2,state:2,zelig:2,gener:2,chang:5,style:2,step:2,base:2,javamail:4,repositori:5,rdata:[1,2],put:[],group:2,post:[],organ:2,card:2,deaccess:0,box:2,manual:[],search:2,postgresql:[5,4],xmllint:[],login:5,current:5,bot:4,ingest:[1,2],spss:[1,2],thing:2,befor:5,lucen:5,unabl:[],context:3,swordv2:0,commit:5,implement:[],backup:4,first:5,comment:2,via:2,prerequisit:4,redhat:4,overview:[0,1,2],por:2,number:5,releas:[0,5],"boolean":[1,2],ensur:[],instal:[5,4],guid:[0,3,2,5,4],open:5,select:[],duplic:5,avail:5,jdbc:4,from:5,zip:0,commun:[],support:[1,2],doubl:[1,2],system:4,been:0,next:2,compon:4,binari:[],master:5,valu:[1,2],statement:0,handl:4,cottag:[],interfac:2,reformat:[],preview:5,type:[1,2],individu:4,exampl:[0,5,4],etc:0,analysi:2,option:4,offer:[],python:0,checklist:[],brand:2,relat:[],protect:2,task:2,"export":2,appendix:[2,4],link:2,templat:[2,4],atom:0,alreadi:5,git:5,harvest:2,haven:5,statist:2,wide:[],account:[5,2],retriev:0,have:5,tab:2,restor:4,"default":2,dvn:[0,5,4],setup:4,plugin:[],tree:5,displai:0,dev:5,junit:5,project:5,url:0,intro:[],limit:[1,2],time:[5,1,2],error:5,downloadinfo:0,sampl:0,metadatasearch:0,control:2,sort:2,classif:2,featur:5,explor:2,creat:[0,5,2],dublin:0,share:0,site:[],indic:3,destin:5,tabular:2,openscholar:2,schedul:2,tag:5,increment:5,tabl:[3,2],need:4,curl:0,work:[],check:5,icefac:[],develop:5,googl:4,secur:4,titl:0,make:5,get:2,tip:5,"switch":5,oai:2,client:0,how:2,other:[1,2],role:2,build:5,branch:5,test:5,you:[5,4],csv:2,simpl:[],bundl:5,map:0,stat:[1,2],notif:2,resourc:[2,4],track:2,lockss:[2,4],clone:5,object:4,statu:[],metadatasearchfield:0,model:5,perl:[],catalog:0,user:2,vocabulari:2,deploi:[],data:[0,1,2],"class":5,guestbook:2,pull:5,util:2,github:5,don:5,java:[],directori:5,entri:0,author:[0,2],descript:2,visual:2,doe:4,inform:0,script:5,contact:2,environ:5,deposit:0,determin:0,rserv:4,push:5,element:0,remot:[5,4],order:2}}) \ No newline at end of file diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/domain.xml --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/domain.xml Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,477 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + -XX:+DisableExplicitGC + -XX:+UseParallelOldGC + -Dcom.sun.grizzly.maxSelectors=32 + -Djava.awt.headless=true + -XX:+UnlockDiagnosticVMOptions + -DANTLR_USE_DIRECT_CLASS_LOADING=true + -Dcom.sun.enterprise.security.httpsOutboundKeyAlias=s1as + -Dfelix.fileinstall.disableConfigSave=false + -Dfelix.fileinstall.poll=5000 + -Dfelix.fileinstall.log.level=2 + -Dfelix.fileinstall.bundles.startTransient=true + -Dfelix.fileinstall.bundles.new.start=true + -Dfelix.fileinstall.dir=${com.sun.aas.installRoot}/modules/autostart/ + -Dosgi.shell.telnet.maxconn=1 + -Dosgi.shell.telnet.ip=127.0.0.1 + -Dosgi.shell.telnet.port=6666 + -Dgosh.args=--nointeractive + -Dorg.glassfish.additionalOSGiBundlesToStart=org.apache.felix.shell,org.apache.felix.gogo.runtime,org.apache.felix.gogo.shell,org.apache.felix.gogo.command,org.apache.felix.fileinstall + -XX:NewRatio=2 + -XX:MaxPermSize=384m + -Djava.security.policy=${com.sun.aas.instanceRoot}/config/server.policy + -Djava.security.auth.login.config=${com.sun.aas.instanceRoot}/config/login.conf + -Djavax.net.ssl.keyStore=${com.sun.aas.instanceRoot}/config/keystore.jks + -Djavax.net.ssl.trustStore=${com.sun.aas.instanceRoot}/config/cacerts.jks + -Dcom.sun.enterprise.config.config_environment_factory_class=com.sun.enterprise.config.serverbeans.AppserverConfigEnvironmentFactory + -Djdbc.drivers=org.apache.derby.jdbc.ClientDriver + -Djavax.management.builder.initial=com.sun.enterprise.v3.admin.AppServerMBeanServerBuilder + -Djava.endorsed.dirs=${com.sun.aas.installRoot}/modules/endorsed${path.separator}${com.sun.aas.installRoot}/lib/endorsed + -server + -Xmx2959m + -Djhove.conf.dir=${com.sun.aas.instanceRoot}/config + -Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext${path.separator}${com.sun.aas.derbyRoot}/lib + -Dcom.sun.enterprise.taglibs=appserv-jstl.jar,jsf-impl.jar + -Dcom.sun.enterprise.taglisteners=jsf-impl.jar + -Ddvn.inetAddress=jurzua-lenovo + -Ddvn.networkData.libPath=${com.sun.aas.instanceRoot}/config/networkData/lib + -Ddvn.index.location=${com.sun.aas.instanceRoot}/config + -Dvdc.dsb.host=localhost + -Dvdc.dsb.rserve.port=6311 + -Dvdc.dsb.rserve.pwrd=rserve + -Dvdc.dsb.rserve.user=rserve + -Dvdc.import.log.dir=${com.sun.aas.instanceRoot}/logs/import + -Dvdc.export.log.dir=${com.sun.aas.instanceRoot}/logs/export + -Dvdc.temp.file.dir=${com.sun.aas.instanceRoot}/config/files/temp + -Dvdc.study.file.dir=${com.sun.aas.instanceRoot}/config/files/studies + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + -XX:MaxPermSize=192m + -server + -Djava.awt.headless=true + -XX:+UnlockDiagnosticVMOptions + -Djava.endorsed.dirs=${com.sun.aas.installRoot}/modules/endorsed${path.separator}${com.sun.aas.installRoot}/lib/endorsed + -Djava.security.policy=${com.sun.aas.instanceRoot}/config/server.policy + -Djava.security.auth.login.config=${com.sun.aas.instanceRoot}/config/login.conf + -Dcom.sun.enterprise.security.httpsOutboundKeyAlias=s1as + -Djavax.net.ssl.keyStore=${com.sun.aas.instanceRoot}/config/keystore.jks + -Djavax.net.ssl.trustStore=${com.sun.aas.instanceRoot}/config/cacerts.jks + -Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext + -Djdbc.drivers=org.apache.derby.jdbc.ClientDriver + -DANTLR_USE_DIRECT_CLASS_LOADING=true + -Dcom.sun.enterprise.config.config_environment_factory_class=com.sun.enterprise.config.serverbeans.AppserverConfigEnvironmentFactory + -XX:NewRatio=2 + -Xmx2959m + -Dorg.glassfish.additionalOSGiBundlesToStart=org.apache.felix.shell,org.apache.felix.gogo.runtime,org.apache.felix.gogo.shell,org.apache.felix.gogo.command,org.apache.felix.fileinstall + -Dosgi.shell.telnet.port=${OSGI_SHELL_TELNET_PORT} + -Dosgi.shell.telnet.maxconn=1 + -Dosgi.shell.telnet.ip=127.0.0.1 + -Dgosh.args=--noshutdown -c noop=true + -Dfelix.fileinstall.dir=${com.sun.aas.installRoot}/modules/autostart/ + -Dfelix.fileinstall.poll=5000 + -Dfelix.fileinstall.log.level=3 + -Dfelix.fileinstall.bundles.new.start=true + -Dfelix.fileinstall.bundles.startTransient=true + -Dfelix.fileinstall.disableConfigSave=false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/domain.xml.TEMPLATE --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/domain.xml.TEMPLATE Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,477 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + -XX:+DisableExplicitGC + -XX:+UseParallelOldGC + -Dcom.sun.grizzly.maxSelectors=32 + -Djava.awt.headless=true + -XX:+UnlockDiagnosticVMOptions + -DANTLR_USE_DIRECT_CLASS_LOADING=true + -Dcom.sun.enterprise.security.httpsOutboundKeyAlias=s1as + -Dfelix.fileinstall.disableConfigSave=false + -Dfelix.fileinstall.poll=5000 + -Dfelix.fileinstall.log.level=2 + -Dfelix.fileinstall.bundles.startTransient=true + -Dfelix.fileinstall.bundles.new.start=true + -Dfelix.fileinstall.dir=${com.sun.aas.installRoot}/modules/autostart/ + -Dosgi.shell.telnet.maxconn=1 + -Dosgi.shell.telnet.ip=127.0.0.1 + -Dosgi.shell.telnet.port=6666 + -Dgosh.args=--nointeractive + -Dorg.glassfish.additionalOSGiBundlesToStart=org.apache.felix.shell,org.apache.felix.gogo.runtime,org.apache.felix.gogo.shell,org.apache.felix.gogo.command,org.apache.felix.fileinstall + -XX:NewRatio=2 + -XX:MaxPermSize=384m + -Djava.security.policy=${com.sun.aas.instanceRoot}/config/server.policy + -Djava.security.auth.login.config=${com.sun.aas.instanceRoot}/config/login.conf + -Djavax.net.ssl.keyStore=${com.sun.aas.instanceRoot}/config/keystore.jks + -Djavax.net.ssl.trustStore=${com.sun.aas.instanceRoot}/config/cacerts.jks + -Dcom.sun.enterprise.config.config_environment_factory_class=com.sun.enterprise.config.serverbeans.AppserverConfigEnvironmentFactory + -Djdbc.drivers=org.apache.derby.jdbc.ClientDriver + -Djavax.management.builder.initial=com.sun.enterprise.v3.admin.AppServerMBeanServerBuilder + -Djava.endorsed.dirs=${com.sun.aas.installRoot}/modules/endorsed${path.separator}${com.sun.aas.installRoot}/lib/endorsed + -server + -Xmx%DEF_MEM_SIZE% + -Djhove.conf.dir=${com.sun.aas.instanceRoot}/config + -Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext${path.separator}${com.sun.aas.derbyRoot}/lib + -Dcom.sun.enterprise.taglibs=appserv-jstl.jar,jsf-impl.jar + -Dcom.sun.enterprise.taglisteners=jsf-impl.jar + -Ddvn.inetAddress=%HOST_DNS_ADDRESS% + -Ddvn.networkData.libPath=${com.sun.aas.instanceRoot}/config/networkData/lib + -Ddvn.index.location=${com.sun.aas.instanceRoot}/config + -Dvdc.dsb.host=%RSERVE_HOST% + -Dvdc.dsb.rserve.port=%RSERVE_PORT% + -Dvdc.dsb.rserve.pwrd=%RSERVE_PASSWORD% + -Dvdc.dsb.rserve.user=%RSERVE_USER% + -Dvdc.import.log.dir=${com.sun.aas.instanceRoot}/logs/import + -Dvdc.export.log.dir=${com.sun.aas.instanceRoot}/logs/export + -Dvdc.temp.file.dir=${com.sun.aas.instanceRoot}/config/files/temp + -Dvdc.study.file.dir=${com.sun.aas.instanceRoot}/config/files/studies + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + -XX:MaxPermSize=192m + -server + -Djava.awt.headless=true + -XX:+UnlockDiagnosticVMOptions + -Djava.endorsed.dirs=${com.sun.aas.installRoot}/modules/endorsed${path.separator}${com.sun.aas.installRoot}/lib/endorsed + -Djava.security.policy=${com.sun.aas.instanceRoot}/config/server.policy + -Djava.security.auth.login.config=${com.sun.aas.instanceRoot}/config/login.conf + -Dcom.sun.enterprise.security.httpsOutboundKeyAlias=s1as + -Djavax.net.ssl.keyStore=${com.sun.aas.instanceRoot}/config/keystore.jks + -Djavax.net.ssl.trustStore=${com.sun.aas.instanceRoot}/config/cacerts.jks + -Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext + -Djdbc.drivers=org.apache.derby.jdbc.ClientDriver + -DANTLR_USE_DIRECT_CLASS_LOADING=true + -Dcom.sun.enterprise.config.config_environment_factory_class=com.sun.enterprise.config.serverbeans.AppserverConfigEnvironmentFactory + -XX:NewRatio=2 + -Xmx%DEF_MEM_SIZE% + -Dorg.glassfish.additionalOSGiBundlesToStart=org.apache.felix.shell,org.apache.felix.gogo.runtime,org.apache.felix.gogo.shell,org.apache.felix.gogo.command,org.apache.felix.fileinstall + -Dosgi.shell.telnet.port=${OSGI_SHELL_TELNET_PORT} + -Dosgi.shell.telnet.maxconn=1 + -Dosgi.shell.telnet.ip=127.0.0.1 + -Dgosh.args=--noshutdown -c noop=true + -Dfelix.fileinstall.dir=${com.sun.aas.installRoot}/modules/autostart/ + -Dfelix.fileinstall.poll=5000 + -Dfelix.fileinstall.log.level=3 + -Dfelix.fileinstall.bundles.new.start=true + -Dfelix.fileinstall.bundles.startTransient=true + -Dfelix.fileinstall.disableConfigSave=false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/install --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/install Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,1183 @@ +#!/usr/bin/perl + +use Getopt::Long; +use Socket; +use File::Copy; + +my( %opts ) = ( ); +my( $rez ) = GetOptions( \%opts, "pg_only!"); + +my $postgresonly = 0; + +my @CONFIG_VARIABLES = (); + +unless ($opts{pg_only}) +{ + @CONFIG_VARIABLES = ( + 'HOST_DNS_ADDRESS', + 'GLASSFISH_DIRECTORY', + 'MAIL_SERVER', + + 'POSTGRES_SERVER', + 'POSTGRES_PORT', + 'POSTGRES_DATABASE', + 'POSTGRES_USER', + 'POSTGRES_PASSWORD', + + 'RSERVE_HOST', + 'RSERVE_PORT', + 'RSERVE_USER', + 'RSERVE_PASSWORD' + + ); +} +else +{ + @CONFIG_VARIABLES = ( + 'POSTGRES_SERVER', + 'POSTGRES_PORT', + 'POSTGRES_DATABASE', + 'POSTGRES_USER', + 'POSTGRES_PASSWORD' + ); + + $postgresonly = 1; +} + +my %CONFIG_DEFAULTS = + ( + 'HOST_DNS_ADDRESS', '', + 'GLASSFISH_DIRECTORY', '/home/glassfish/glassfish', + 'MAIL_SERVER', 'localhost', + + 'POSTGRES_SERVER', 'localhost', + 'POSTGRES_PORT', 5432, + 'POSTGRES_DATABASE','dvnDb', + 'POSTGRES_USER', 'postgres', + 'POSTGRES_PASSWORD','admin', + + 'RSERVE_HOST', 'localhost', + 'RSERVE_PORT', 6311, + 'RSERVE_USER', 'rserve', + 'RSERVE_PASSWORD', 'rserve' + + ); + + +my %CONFIG_PROMPTS = + ( + 'HOST_DNS_ADDRESS', 'Internet Address of your host', + 'GLASSFISH_DIRECTORY', 'Glassfish Directory', + 'MAIL_SERVER', 'SMTP (mail) server to relay notification messages', + + 'POSTGRES_SERVER', 'Postgres Server', + 'POSTGRES_PORT', 'Postgres Server Port', + 'POSTGRES_DATABASE','Name of the Postgres Database', + 'POSTGRES_USER', 'Name of the Postgres User', + 'POSTGRES_PASSWORD','Postgres user password', + + 'RSERVE_HOST', 'Rserve Server', + 'RSERVE_PORT', 'Rserve Server Port', + 'RSERVE_USER', 'Rserve User Name', + 'RSERVE_PASSWORD', 'Rserve User Password' + + ); + +# Supported Posstgres JDBC drivers: +# (have to be configured explicitely, so that Perl "taint" (security) mode +# doesn't get paranoid) + +my $POSTGRES_DRIVER_8_3 = "postgresql-8.3-603.jdbc4.jar"; +#my $POSTGRES_DRIVER_8_4 = "postgresql-8.4-703.jdbc4.jar"; +my $POSTGRES_DRIVER_8_4 = "postgresql-8.3-603.jdbc4.jar"; +my $POSTGRES_DRIVER_9_0 = "postgresql-9.0-802.jdbc4.jar"; +my $POSTGRES_DRIVER_9_1 = "postgresql-9.1-902.jdbc4.jar"; + + +# A few preliminary checks: + +# user -- must be root: + +$user_real = `who am i`; +chop $user_real; +$user_real =~s/ .*$//; + +if ( $< != 0 ) +{ + print STDERR "\nERROR: You must be logged in as root to run the installer.\n\n"; + exit 1; +} + +# OS: + +my $uname_out = `uname -a`; + +# hostname: + +my $hostname_from_cmdline = `hostname`; +chop $hostname_from_cmdline; + +$CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} = $hostname_from_cmdline; + + +print "\nWelcome to the DVN installer.\n"; +unless ($opts{pg_only}) +{ + print "You will be guided through the process of setting up a NEW\n"; + print "instance of the DVN application\n"; +} +else +{ + print "You will be guided through the process of configuring the\n"; + print "LOCAL instance of PostgreSQL database for use by the DVN\n"; + print "application.\n"; +} + +my @uname_tokens = split (" ", $uname_out); + +if ( $uname_tokens[0] eq "Darwin" ) +{ + print "\nThis appears to be a MacOS X system; good.\n"; + # TODO: check the OS version + + $WORKING_OS = "MacOSX"; +} +elsif ( $uname_tokens[0] eq "Linux" ) +{ + if ( -f "/etc/redhat-release" ) + { + print "\nThis appears to be a RedHat system; good.\n"; + $WORKING_OS = "RedHat"; + # TODO: check the distro version + } + else + { + print "\nThis appears to be a non-RedHat Linux system;\n"; + print "this installation *may* succeed; but we're not making any promises!\n"; + $WORKING_OS = "Linux"; + } +} +else +{ + print "\nWARNING: This appears to be neither a Linux or MacOS X system!\n"; + print "This installer script will most likely fail. Please refer to the\n"; + print "DVN Installers Guide for more information.\n\n"; + + $WORKING_OS = "Unknown"; + + print "Do you wish to continue?\n [y/n] "; + + + my $yesnocont = <>; chop $yesnocont; + + while ( $yesnocont ne "y" && $yesnocont ne "n" ) + { + print "Please enter 'y' or 'n'!\n"; + print "(or ctrl-C to exit the installer)\n"; + $yesnocont = <>; chop $yesnocont; + } + + if ( $yesnocont eq "n" ) + { + exit 0; + } + +} + + ENTERCONFIG: + +print "\n"; +print "Please enter the following configuration values:\n"; +print "(hit [RETURN] to accept the default value)\n"; +print "\n"; + +for $ENTRY (@CONFIG_VARIABLES) +{ + print $CONFIG_PROMPTS{$ENTRY} . ": "; + print "[" . $CONFIG_DEFAULTS{$ENTRY} . "] "; + + $user_entry = <>; + chop $user_entry; + + if ($user_entry ne "") + { + $CONFIG_DEFAULTS{$ENTRY} = $user_entry; + } + + print "\n"; +} + +# CONFIRM VALUES ENTERED: + + +print "\nOK, please confirm what you've entered:\n\n"; + +for $ENTRY (@CONFIG_VARIABLES) +{ + print $CONFIG_PROMPTS{$ENTRY} . ": " . $CONFIG_DEFAULTS{$ENTRY} . "\n"; +} + +print "\nIs this correct? [y/n] "; + + +my $yesno = <>; chop $yesno; + +while ( $yesno ne "y" && $yesno ne "n" ) +{ + print "Please enter 'y' or 'n'!\n"; + print "(or ctrl-C to exit the installer)\n"; + $yesno = <>; chop $yesno; +} + +if ( $yesno eq "n" ) +{ + goto ENTERCONFIG; +} + +# VALIDATION/VERIFICATION OF THE CONFIGURATION VALUES: +# 1. VERIFY MAIL SERVER THEY CONFIGURED: + +=pod + +unless ( $postgresonly ) +{ + + my ( $mail_server_iaddr, $mail_server__paddr, $mail_server_proto, $mail_server_status ); + + $mail_server_status = 1; + + unless ( $mail_server_iaddr = inet_aton($CONFIG_DEFAULTS{'MAIL_SERVER'}) ) + { + print STDERR "Could not look up $CONFIG_DEFAULTS{'MAIL_SERVER'},\n"; + print STDERR "the host you specified as your mail server.\n"; + $mail_server_status = 0; + } + + if ($mail_server_status) { + $mail_server_paddr = sockaddr_in(25, $mail_server_iaddr); + $mail_server_proto = getprotobyname('tcp'); + + unless ( socket(SOCK, PF_INET, SOCK_STREAM, $mail_server_proto) && + connect(SOCK, $mail_server_paddr) ) + { + print STDERR "Could not establish connection to $CONFIG_DEFAULTS{'MAIL_SERVER'},\n"; + print STDERR "the address you provided for your Mail server.\n"; + print STDERR "Please select a valid mail server, and try again.\n\n"; + + $mail_server_status = 0; + } + + } + + close (SOCK); + + unless ($mail_server_status) + { + goto ENTERCONFIG; + } +} +=cut + +# 2. CHECK IF THE WAR FILE IS AVAILABLE: +unless ( -f "appdeploy/dist/DVN-web.war" ) +{ + print "\nWARNING: Can't find the project .war file in appdeploy/dist/!\n"; + print "\tAre you running the installer in the right directory?\n"; + print "\tHave you built the war file?\n"; + print "\t(if not, build the project and run the installer again)\n"; + + exit 0; +} + +# check the working (installer) dir: +my $cwd; +chomp($cwd = `pwd`); + +# 2b. CHECK IF THE SQL TEMPLATE IS IN PLACE AND CREATE THE SQL FILE + +$SQL_REFERENCE_DATA = "referenceData.sql"; +$SQL_REFERENCE_TEMPLATE = "referenceData.sql.TEMPLATE"; + +unless ( -f $SQL_REFERENCE_TEMPLATE ) +{ + print "\nWARNING: Can't find .sql data template!\n"; + print "(are you running the installer in the right directory?)\n"; + + exit 0; +} + +open DATATEMPLATEIN, $SQL_REFERENCE_TEMPLATE || die $@; +open SQLDATAOUT, '>'.$SQL_REFERENCE_DATA || die $@; + +while( ) +{ + s/%POSTGRES_USER%/$CONFIG_DEFAULTS{'POSTGRES_USER'}/g; + print SQLDATAOUT $_; +} + + +close DATATEMPLATEIN; +close SQLDATAOUT; + +# 3. CHECK POSTGRES AVAILABILITY: + +my $pg_local_connection = 0; + +if ( $CONFIG_DEFAULTS{'POSTGRES_SERVER'} eq 'localhost' ) +{ + $pg_local_connection = 1; + + # 3a. CHECK FOR USER postgres: + + print "\nChecking system user \"postgres\"... "; + + $POSTGRES_SYS_NAME = "postgres"; + $POSTGRES_SYS_UID = (getpwnam ("postgres"))[2]; + + if ($POSTGRES_SYS_UID == undef) { + print STDERR "\nERROR: I haven't been able to find user \"postgres\" on the system!\n"; + print STDERR "(TODO: prompt the user instead to supply an alternative username, if\n"; + print STDERR "available)\n"; + + exit 1; + } + + print "OK.\n"; + + # 3b. LOCATE THE EXECUTABLE: + + $sys_path = $ENV{'PATH'}; + @sys_path_dirs = split ( ":", $sys_path ); + + $psql_exec = ""; + + for $sys_path_dir ( @sys_path_dirs ) + { + if ( -x $sys_path_dir . "/psql" ) + { + $psql_exec = $sys_path_dir; + last; + } + } + + $pg_major_version = 0; + $pg_minor_version = 0; + + if ( $psql_exec eq "" && $WORKING_OS eq "MacOSX" ) + { + for $pg_minor_version ( "1", "0" ) + { + if ( -x "/Library/PostgreSQL/9." . $pg_minor_version . "/bin/psql" ) + { + $pg_major_version = 9; + $psql_exec = "/Library/PostgreSQL/9." . $pg_minor_version . "/bin"; + last; + } + } + if (!$pg_major_version) + { + for $pg_minor_version ( "4", "3" ) + { + if ( -x "/Library/PostgreSQL/8." . $pg_minor_version . "/bin/psql" ) + { + $pg_major_version = 8; + $psql_exec = "/Library/PostgreSQL/8." . $pg_minor_version . "/bin"; + last; + } + } + } + } + + if ( $psql_exec eq "" ) + { + print STDERR "\nERROR: I haven't been able to find the psql command in your PATH!\n"; + print STDERR "Please make sure PostgresQL is properly installed and try again.\n\n"; + + exit 1; + } + + + + # 3c. CHECK POSTGRES VERSION: + + open (PSQLOUT, $psql_exec . "/psql --version|"); + + $psql_version_line = ; + chop $psql_version_line; + close PSQLOUT; + + my ($postgresName, $postgresNameLong, $postgresVersion) = split ( " ", $psql_version_line ); + + unless ( $postgresName eq "psql" && $postgresVersion =~ /^[0-9][0-9\.]*$/ ) + { + print STDERR "\nERROR: Unexpected output from psql command!\n"; + print STDERR "Please make sure PostgresQL is properly installed and try again.\n\n"; + + exit 1; + } + + + my (@postgres_version_tokens) = split ( '\.', $postgresVersion ); + + unless ( ($postgres_version_tokens[0] == 8 && $postgres_version_tokens[1] >= 3) || ($postgres_version_tokens[0] >= 9) ) + { + print STDERR "\nERROR: PostgresQL version 8.3, or newer, is required!\n"; + print STDERR "Found a copy of psql ($psql_exec/psql) that belongs to version $postgresVersion.\n\n"; + print STDERR "Please make sure the right version of PostgresQL is properly installed,\n"; + print STDERR "and the right version of psql comes first in the PATH,\n"; + print STDERR "then try again.\n"; + + exit 1; + } + + print "\n\nFound Postgres psql command, version $postgresVersion. Good.\n\n"; + + $pg_major_version = $postgres_version_tokens[0]; + $pg_minor_version = $postgres_version_tokens[1]; + + # 4. CONFIGURE POSTGRES: + + print "\nConfiguring Postgres Database:\n"; + + + + $< = $POSTGRES_SYS_UID; + $> = $POSTGRES_SYS_UID; + + # 4a. CHECK IF POSTGRES IS RUNNING: + print "Checking if a local instance of Postgres is running and accessible...\n"; + + # (change to /tmp before executing the command below - + # we are trying to do it as user postgres, and it may not have + # access to the current, installer directory; the command would still + # work, but there would be an error message from the shell init on screen + # - potentially confusing) + chdir ("/tmp"); + + if (!system ($psql_exec . "/psql -c 'SELECT * FROM pg_roles' > /dev/null 2>&1")) + { + print "Yes, it is.\n"; + } + else + { + print "Nope, I haven't been able to connect to the local instance of PostgresQL.\n"; + print "daemon. Is postgresql running? \n"; + print "On a RedHat system, you can check the status of the daemon with\n\n"; + print " service postgresql status\n\n"; + print "and, if it's not running, start the daemon with\n\n"; + print " service postgresql start\n\n"; + print "On MacOSX, use Applications -> PostgresQL -> Start Server.\n"; + print "Also, please make sure that the daemon is listening to network connections,\n"; + print "at leaset on the localhost interface. (See \"Installing Postgres\" section\n"; + print "of the installation manual).\n"; + print "Finally, please make sure that the postgres user can make localhost \n"; + print "connections without supplying a password. (That's controlled by the \n"; + print "\"localhost ... ident\" line in pg_hba.conf; again, please consult the \n"; + print "installation manual).\n"; + + + exit 1; + } + + + # 4c. CHECK IF THIS DB ALREADY EXISTS: + + $psql_command_dbcheck = $psql_exec . "/psql -c \"\" -d " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . ">/dev/null 2>&1"; + if ( ($exitcode = system($psql_command_dbcheck)) == 0 ) + { + # switch back to root uid: + $> = 0; + $< = 0; + chdir ($cwd); + + print "WARNING! Database " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " already exists!\n"; + print "\nPlease note that you can only use this installer to create a blank, \n"; + print "new and shiny DVN database. I.e., you cannot install on top of an \n"; + print "existing database. Please enter a different name for the DVN database.\n"; + print "\nPress any key to continue, or ctrl-C to exit the installer...\n\n"; + + system "stty cbreak /dev/tty 2>&1"; + my $key = getc(STDIN); + system "stty -cbreak /dev/tty 2>&1"; + print "\n"; + + goto ENTERCONFIG; + + } + + # 4d. CHECK IF THIS USER ALREADY EXISTS: + + $psql_command_rolecheck = $psql_exec . "/psql -c \"\" -d postgres " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " >/dev/null 2>&1"; + if ( ($exitcode = system($psql_command_rolecheck)) == 0 ) + { + print "User (role) . " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " already exists;\n"; + print "Proceeding."; + } + else + { + # 4e. CREATE DVN DB USER: + + print "\nCreating Postgres user (role) for the DVN:\n"; + + open TMPCMD, ">/tmp/pgcmd.$$.tmp"; + + # with unencrypted password: + #print TMPCMD "CREATE ROLE ".$CONFIG_DEFAULTS{'POSTGRES_USER'}." UNENCRYPTED PASSWORD '".$CONFIG_DEFAULTS{'POSTGRES_PASSWORD'}."' NOSUPERUSER CREATEDB CREATEROLE NOINHERIT LOGIN"; + + # with md5-encrypted password: + $pg_password_md5 = &create_pg_hash ($CONFIG_DEFAULTS{'POSTGRES_USER'},$CONFIG_DEFAULTS{'POSTGRES_PASSWORD'}); + my $sql_command = "CREATE ROLE \"".$CONFIG_DEFAULTS{'POSTGRES_USER'}."\" PASSWORD 'md5". $pg_password_md5 ."' NOSUPERUSER CREATEDB CREATEROLE INHERIT LOGIN"; + + print TMPCMD $sql_command; + close TMPCMD; + + my $psql_commandline = $psql_exec . "/psql -f /tmp/pgcmd.$$.tmp"; + + unless ( ($exitcode = system($psql_commandline)) == 0 ) + { + print STDERR "Could not create the DVN Postgres user role!\n"; + print STDERR "(SQL: " . $sql_command . ")\n"; + print STDERR "(psql exit code: " . $exitcode . ")\n"; + exit 1; + } + + unlink "/tmp/pgcmd.$$.tmp"; + print "done.\n"; + } + + # 4f. CREATE DVN DB: + + print "\nCreating Postgres database:\n"; + + $psql_command = $psql_exec . "/createdb ".$CONFIG_DEFAULTS{'POSTGRES_DATABASE'}." --owner=".$CONFIG_DEFAULTS{'POSTGRES_USER'}; + + unless ( ($exitcode = system("$psql_command")) == 0 ) + { + print STDERR "Could not create Postgres database for the DVN app!\n"; + print STDERR "(command: " . $psql_command . ")\n"; + print STDERR "(psql exit code: " . $exitcode . ")\n"; + print STDERR "\naborting the installation (sorry!)\n\n"; + exit 1; + } + + +# Changing back to root UID: + + $> = 0; + $< = 0; + + chdir ($cwd); + +} +else +{ + if (0) # DEV. INSTALLER ONLY: + { + print "\nIt is strongly recommended that you use a local PostgresQL server,\n"; + print "running on localhost, in your development environment!\n\n"; + + print "Do you wish to continue?\n [y/n] "; + + + my $yesnocont = <>; chop $yesnocont; + + while ( $yesnocont ne "y" && $yesnocont ne "n" ) + { + print "Please enter 'y' or 'n'!\n"; + print "(or ctrl-C to exit the installer)\n"; + $yesnocont = <>; chop $yesnocont; + } + + if ( $yesnocont eq "n" ) + { + print "(aborting the installation)\n". + exit 0; + } + } + + if ( $opts{$pg_only} ) + { + print "The script must be run in the --pg_only mode ONLY locally,\n"; + print "i.e., on the server where PostgresQL is running.\n"; + + exit 1; + } + + print "In order to use a PostgresQL database running on a remote server,\n"; + print "Please run this installer on that host with the \"--pg_only\" option:\n\n"; + print "./install --pg_only\n\n"; + + print "Press any key to continue the installation process once that has been\n"; + print "done. Or press ctrl-C to exit the installer.\n\n"; + + chdir ("/tmp"); + system "stty cbreak /dev/tty 2>&1"; + my $key = getc(STDIN); + system "stty -cbreak /dev/tty 2>&1"; + print "\n"; + chdir ($cwd); + + # Check if the role and database have been created on the remote server: + # -- TODO; + + # Find out what Postgres version is running remotely: + + $pg_major_version = 9; + $pg_minor_version = 1; + + print "What version of PostgresQL is installed on the remote server?\n [" . $pg_major_version . "." . $pg_minor_version . "] "; + + + my $postgresVersion = <>; chop $postgresVersion; + + while ( $postgresVersion ne "" && !($postgresVersion =~/^[0-9]+\.[0-9]+$/) ) + { + print "Please enter valid Postgres version!\n"; + print "(or ctrl-C to exit the installer)\n"; + $postgresVersion = <>; chop $postgresVersion; + } + + unless ( $postgresVersion eq "" ) + { + my (@postgres_version_tokens) = split ( '\.', $postgresVersion ); + + unless ( ($postgres_version_tokens[0] == 8 && $postgres_version_tokens[1] >= 3) || ($postgres_version_tokens[0] >= 9) ) + { + print STDERR "\nERROR: PostgresQL version 8.3, or newer, is required!\n"; + print STDERR "Please make sure the right version of PostgresQL is properly installed\n"; + print STDERR "on the remote server, then try again.\n"; + + exit 1; + } + + $pg_major_version = $postgres_version_tokens[0]; + $pg_minor_version = $postgres_version_tokens[1]; + } + +} + + +if ( $postgresonly ) +{ + print "\nOK, done.\n"; + print "You can now resume the installation on the main DVN host.\n\n"; + + exit 0; +} + + +# 5. CONFIGURE GLASSFISH + +print "\nProceeding with the Glassfish setup.\n"; +print "\nChecking your Glassfish installation..."; + +my $glassfish_dir = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}; + +# 5a. CHECK IF GLASSFISH DIR LOOKS OK: + +print $glassfish_dir."/glassfish/domains/domain1"; + +unless ( -d $glassfish_dir."/glassfish/domains/domain1" ) +{ + # TODO: need better check than this + + while ( ! ( -d $glassfish_dir."/glassfish/domains/domain1" ) ) + { + print "\nInvalid Glassfish directory " . $glassfish_dir . "!\n"; + print "Enter the root directory of your Glassfish installation:\n"; + print "(Or ctrl-C to exit the installer): "; + + $glassfish_dir = <>; + chop $glassfish_dir; + } +} + +print "OK!\n"; + +# 5b. DETERMINE HOW MUCH MEMORY TO GIVE TO GLASSFISH AS HEAP: + +$gf_heap_default = "2048m"; +$sys_mem_total = 0; + +if ( -e "/proc/meminfo" && open MEMINFO, "/proc/meminfo" ) +{ + # Linux + + while ( $mline = ) + { + if ( $mline =~ /MemTotal:[ \t]*([0-9]*) kB/ ) + { + $sys_mem_total = $1; + } + } + + close MEMINFO; + +} +elsif ( -x "/usr/sbin/sysctl" ) +{ + # MacOS X, probably... + + $sys_mem_total = `/usr/sbin/sysctl -n hw.memsize`; + chop $sys_mem_total; + if ($sys_mem_total > 0) + { + $sys_mem_total = int ($sys_mem_total / 1024); + # size in kb + } +} + +if ( $sys_mem_total > 0 ) +{ + # setting the default heap size limit to 3/8 of the available + # amount of memory: + $gf_heap_default = ( int ($sys_mem_total / (8 / 3 * 1024) ) ); + + print "\nSetting the heap limit for Glassfish to " . $gf_heap_default . "MB. \n"; + print "You may need to adjust this setting to better suit \n"; + print "your system.\n\n"; + + $gf_heap_default .= "m"; + +} +else +{ + print "\nCould not determine the amount of memory on your system.\n"; + print "Setting the heap limit for Glassfish to 2GB. You may need \n"; + print "to adjust the value to better suit your system.\n\n"; +} + +push @CONFIG_VARIABLES, "DEF_MEM_SIZE"; +$CONFIG_DEFAULTS{"DEF_MEM_SIZE"} = $gf_heap_default; + +print "\nPress any key to continue...\n\n"; + +system "stty cbreak /dev/tty 2>&1"; + my $key = getc(STDIN); + system "stty -cbreak /dev/tty 2>&1"; + print "\n"; + +# 5c. GENERATE GLASSFISH CONFIGURATION FILE: + +print "\nWriting glassfish configuration file (domain.xml)... "; + +# 5cc. FIND THE "special-admin-indicator" IN THE ORIGINAL GLASSFISH CONFIG: + +open ( GFCNFG, $glassfish_dir."/glassfish/domains/domain1/config/domain.xml") || die $@; + +while ( ) +{ + if (/domain.xml'; + +while( ) +{ + for $ENTRY (@CONFIG_VARIABLES) + { + $patin = '%' . $ENTRY . '%'; + $patout = $CONFIG_DEFAULTS{$ENTRY}; + + s/$patin/$patout/g; + } + + print CONFIGOUT $_; + +} + +close TEMPLATEIN; +close CONFIGOUT; + +print "done.\n"; + +system ("/bin/cp -f domain.xml ".$glassfish_dir."/glassfish/domains/domain1/config"); +#diagnostics needed! + +# check if the supllied config files are in the right place: + +unless ( -f "config/logging.properties" ) +{ + print "\nERROR! Configuration files not found in config dir!\n"; + print "(are you running the installer in the right directory?\n"; + print "Aborting...\n"; + exit 1; +} + +print "\nCopying additional configuration files... "; + +system ( "/bin/cp -Rf config/* ".$glassfish_dir."/glassfish/domains/domain1/config"); +#diagnostics needed! + +# install pre-configured robots.txt blocking bot crawlers: +system ( "/bin/cp -f robots.txt ".$glassfish_dir."/glassfish/domains/domain1/docroot"); + +# install the DVN guides (HTML) into the application docroot: +system ( "/bin/cp -Rf doc/guides/* ".$glassfish_dir."/glassfish/domains/domain1/docroot/guides"); + + +print "done!\n"; + +print "\nInstalling the Glassfish PostgresQL driver... "; + +my $install_driver_jar = ""; + +if ( $pg_major_version == 8 ) +{ + if ( $pg_minor_version == 3 ) + { + $install_driver_jar = $POSTGRES_DRIVER_8_3; + } + elsif ( $pg_minor_version == 4 ) + { + $install_driver_jar = $POSTGRES_DRIVER_8_4; + } +} +elsif ( $pg_major_version == 9 ) +{ + if ( $pg_minor_version == 0 ) + { + $install_driver_jar = $POSTGRES_DRIVER_9_0; + } + elsif ( $pg_minor_version == 1 ) + { + $install_driver_jar = $POSTGRES_DRIVER_9_1; + } +} + +=poc +unless ( $install_driver_jar ) +{ + die "Installer could not find POSTGRES JDBC driver for your version of PostgresQL!\n"; + +} +=cut + +system ( "/bin/cp", "pgdriver/" . $install_driver_jar, $glassfish_dir."/glassfish/lib"); +#diagnostics needed! + +print "done!\n"; + +# 5d. STOP GLASSFISH (OK IF NOT RUNNING): +print "\nStopping glassfish...\n"; + +unless ( ($exit_code=system ($glassfish_dir."/bin/asadmin stop-domain domain1")) == 0 ) +{ + print STDERR "(that's OK!)\n"; +} + +# 5dd. INSTALL PATCHED WEBCORE GLASSFISH MODULE: + +$gf_webcore_jar = $glassfish_dir."/glassfish/modules/web-core.jar"; + +system ("/bin/mv -f ".$gf_webcore_jar . " " . $gf_webcore_jar.".PRESERVED"); +system ("/bin/cp web-core.jar ".$gf_webcore_jar); + +# 5ddd. DELETE EJB TIMER APP LOCK FILE, if exists (just in case!): + +system ( "/bin/rm -f ".$glassfish_dir."/glassfish/domains/domain1/generated/ejb-timer-service-app" ); + +# 5e. START GLASSFISH: +print "\nStarting glassfish.\n"; + +unless ( ($exit_code=system ($glassfish_dir."/bin/asadmin start-domain domain1")) == 0 ) +{ + print STDERR "Could not start glassfish!\n"; + print STDERR "(exit code: " . $exitcode . ")\n"; + exit 1; +} + + +# check if glassfish is running: +# TODO. + +# 6. DEPLOY APPLICATION: +# 6a. DO WE HAVE ANT? +# (we are no longer using ant to deply -- L.A.) +# +#$sys_path = $ENV{'PATH'}; +#@sys_path_dirs = split ( ":", $sys_path ); + +#$ant_exec = ""; +# +#for $sys_path_dir ( @sys_path_dirs ) +#{ +# if ( -x $sys_path_dir . "/ant" ) +# { +# $ant_exec = $sys_path_dir . "/ant"; +# last; +# } +#} +# +#if ( $ant_exec eq "" ) +#{ +# print STDERR "\nERROR: I haven't been able to find ant command in your PATH!\n"; +# print STDERR "Please make sure and is installed and in your PATH; then try again.\n\n"; +# +# exit 1; +#} +# 6b. TRY TO DEPLOY: + +print "\nAttempting to deploy the application:\n\n"; + +$CONFIG_DEFAULTS{'GLASSFISH_ADMIN_PASSWORD'} = 'adminadmin'; +# TODO: ask for password! -- in case they have already changed it +# (update: chances are we don't even need the password anymore, as +# long as we are deploying locally (?)) + +my $glassfish_password = $CONFIG_DEFAULTS{'GLASSFISH_ADMIN_PASSWORD'}; + +# create deployment properties files: +# (these properties files are no longer used, because we are no longer +# using ant to deploy the app. -- L.A.) + +#for $prop_file ('AS', 'glassfish') +#{ +# open ( TEMPLIN, "appdeploy/" . $prop_file . ".properties.TEMPLATE" ) +# || die "failed to open appdeploy/" . $prop_file . ".properties.TEMPLATE"; +# open ( PROPOUT, ">appdeploy/" . $prop_file . ".properties" ) +# || die "failed to open appdeploy/" . $prop_file . ".properties for writing"; +# +# while( ) +# { +# s/%GF_ADMIN_PASSWORD%/$glassfish_password/g; +# s/%GF_ROOT_DIR%/$glassfish_dir/g; +# print PROPOUT $_; +# } +# +# close TEMPLIN; +# close PROPOUT; +#} + +# Create the .asadminpass file, or replace it, if exists: + +$asadminpass_file = $ENV{'HOME'} . "/.asadminpass"; + +if ( -e $asadminpass_file ) +{ + system ("/bin/mv -f " . $asadminpass_file . " " . $asadminpass_file . ".PRESERVED"); +} + +system ("echo 'asadmin://admin@localhost:4848 ' > " . $asadminpass_file); + +$deploy_command = $glassfish_dir."/bin/asadmin deploy --force=true --name=DVN-web dist/DVN-web.war"; + +unless ( ($exit_code = system ("cd appdeploy; " . $deploy_command)) == 0 ) +{ + print STDERR "Could not deploy DVN application!\n"; + print STDERR "(exit code: " . $exitcode . ")\n"; + exit 1; +} + +if ( $pg_local_connection ) +{ + print "\nOK; now we are going to stop glassfish and populate the database with\n"; + print "some initial content, then start glassfish again.\n"; +} +else +{ + print "\nOK; stopping glasfish.\n"; +} + + +# 6c. SHUT DOWN: + +$gf_stop_command = $glassfish_dir."/bin/asadmin stop-domain domain1"; + +unless ( ($exit_code = system ($gf_stop_command)) == 0 ) +{ + print STDERR "Could not stop glassfish!\n"; + print STDERR "(command line: " . $gf_stop_command . ")\n"; + print STDERR "(exit code: " . $exitcode . ")\n"; + print STDERR "\nPlease finish the installation process manually: \n"; + print STDERR "stop/kill glassfish; then populate the database with \n"; + print STDERR "the supplied initial content, by executing the following \n"; + print STDERR "command, *as Unix user postgres*: \n\n"; + + $psql_command = $psql_exec . "/psql -d $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} -f " . $SQL_REFERENCE_DATA; + + print STDERR $psql_command . "\n\n"; + print STDERR "Then start glassfish again... Voila, you should then have \n"; + print STDERR "a running DVN instance at the following URL:\n\n"; + print STDERR " http://" . $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} . "/dvn\n"; + + print STDERR "\naborting the installer... (sorry!)\n"; + + exit 1; +} + +# 7. POPULATE DATABASE: + +if ( $pg_local_connection ) +{ + # 7a. POPULATE LOCALLY: + print "\nPopulating the database (local PostgresQL instance):\n\n"; + + # Copy the SQL file to /tmp, where user postgres will definitely + # have read access to it: + + copy("referenceData.sql","/tmp") or die "Could not copy referenceData.sql to /tmp: $!"; + + $< = $POSTGRES_SYS_UID; + $> = $POSTGRES_SYS_UID; + chdir ("/tmp"); + $psql_command = $psql_exec . "/psql -d $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} -f referenceData.sql"; + + unless ( ($exitcode = system("$psql_command")) == 0 ) + { + print STDERR "Could not populate Postgres database for the DVN app!\n"; + print STDERR "(command: " . $psql_command . ")\n"; + print STDERR "(psql exit code: " . $exitcode . ")\n"; + print STDERR "\nYou must populate the database before you can use your new\n"; + print STDERR "DVN instance. Please consult the installation manual and/or\n"; + print STDERR "seek support from the DVN team.\n\n"; + exit 1; + + } + + chdir ($cwd); + print "\nOK, done!\n"; + +} +else +{ + # 7b. INSTRUCT THE USER TO POPULATE THE DB ON THE REMOTE SERVER: + # NOT SUPPORTED YET -- TODO + print "The database needs to be populated with some intial content \n"; + print "before we restart the DVN one last time. \n"; + print "However, populating a database on a remote PostgresQL server "; + print "is not supported yet!\n"; + print "Please copy the file referenceData.sql (found in this directory)\n"; + print "onto the remote server and populate the database manually,\n"; + print "as user postgres, with the following command:\n\n"; + print " psql -d $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} -f referenceData.sql\n"; + print "then start glassfish again on this server with \n\n"; + print " " . $glassfish_dir."/bin/asadmin start-domain domain1\n\n"; + + $> = 0; + $< = 0; + + exit 0; + +} + +# back to root: + +$> = 0; +$< = 0; + +# 8. START GLASSFISH AGAIN: +print "\nStarting glassfish, again:\n\n"; + +$gf_start_command = $glassfish_dir."/bin/asadmin start-domain domain1"; + +# delete the EJB TIMER app lock file, if exists (just in case!): +system ( "/bin/rm -f ".$glassfish_dir."/glassfish/domains/domain1/generated/ejb-timer-service-app" ); + +unless ( ($exit_code = system ($gf_start_command)) == 0 ) +{ + print STDERR "Could not start glassfish!\n"; + print STDERR "(command line: " . $gf_start_command . ")\n"; + print STDERR "(exit code: " . $exit_code . ")\n"; + exit 1; +} + + +print "\nYou should now have a running DVN instance;\n"; +print "Please go to the application at the following URL:\n\n"; +print " http://" . $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} . "/dvn\n"; +print "\nand log in by using \"networkAdmin\" as both the user name\n"; +print "and password. Click the \"networkAdmin\" link on the right side\n"; +print "Of the main screen, then click \"Update Account\". Change this\n"; +print "default password and default e-mail address.\n"; + +# 9. FINALLY, CHECK IF RSERVE IS RUNNING: +print "\n\nFinally, checking if Rserve is running and accessible...\n"; + +unless ( $CONFIG_DEFAULTS{'RSERVE_PORT'}=~/^[0-9][0-9]*$/ ) +{ + print $CONFIG_DEFAULTS{'RSERVE_HOST'} . " does not look like a valid port number,\n"; + print "defaulting to 6311.\n\n"; + + $CONFIG_DEFAULTS{'RSERVE_PORT'} = 6311; +} + +my ( $rserve_iaddr, $rserve_paddr, $rserve_proto ); + +unless ( $rserve_iaddr = inet_aton($CONFIG_DEFAULTS{'RSERVE_HOST'}) ) +{ + print STDERR "Could not look up $CONFIG_DEFAULTS{'RSERVE_HOST'},\n"; + print STDERR "the host you specified as your R server.\n"; + print STDERR "\nDVN can function without a working R server, but\n"; + print STDERR "much of the functionality concerning running statistics\n"; + print STDERR "and analysis on quantitative data will not be available.\n"; + print STDERR "Please consult the Installers guide for more info.\n"; + + exit 0; +} + +$rserve_paddr = sockaddr_in($CONFIG_DEFAULTS{'RSERVE_PORT'}, $rserve_iaddr); +$rserve_proto = getprotobyname('tcp'); + +unless ( socket(SOCK, PF_INET, SOCK_STREAM, $rserve_proto) && + connect(SOCK, $rserve_paddr) ) +{ + print STDERR "Could not establish connection to $CONFIG_DEFAULTS{'RSERVE_HOST'}\n"; + print STDERR "on port $CONFIG_DEFAULTS{'RSERVE_PORT'}, the address you provided\n"; + print STDERR "for your R server.\n"; + print STDERR "DVN can function without a working R server, but\n"; + print STDERR "much of the functionality concerning running statistics\n"; + print STDERR "and analysis on quantitative data will not be available.\n"; + print STDERR "Please consult the \"Installing R\" section in the Installers guide\n"; + print STDERR "for more info.\n"; + + exit 0; + +} + +close (SOCK); +print "\nOK!\n"; + +exit 0; + + +sub create_pg_hash { + local $pg_username = shift @_; + local $pg_password = shift @_; + + $encode_line = $pg_password . $pg_username; + + # for Redhat: + + ##print STDERR "executing /bin/echo -n $encode_line | md5sum\n"; + + if ( $WORKING_OS eq "MacOSX" ) + { + $hash = `/bin/echo -n $encode_line | md5`; + } + else + { + $hash = `/bin/echo -n $encode_line | md5sum`; + } + + chop $hash; + + $hash =~s/ \-$//; + + if ( (length($hash) != 32) || ($hash !~ /^[0-9a-f]*$/) ) + { + print STDERR "Failed to generate a MD5-encrypted password hash for the Postgres database.\n"; + exit 1; + } + + + return $hash; +} diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/install~ --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/install~ Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,1183 @@ +#!/usr/bin/perl + +use Getopt::Long; +use Socket; +use File::Copy; + +my( %opts ) = ( ); +my( $rez ) = GetOptions( \%opts, "pg_only!"); + +my $postgresonly = 0; + +my @CONFIG_VARIABLES = (); + +unless ($opts{pg_only}) +{ + @CONFIG_VARIABLES = ( + 'HOST_DNS_ADDRESS', + 'GLASSFISH_DIRECTORY', + 'MAIL_SERVER', + + 'POSTGRES_SERVER', + 'POSTGRES_PORT', + 'POSTGRES_DATABASE', + 'POSTGRES_USER', + 'POSTGRES_PASSWORD', + + 'RSERVE_HOST', + 'RSERVE_PORT', + 'RSERVE_USER', + 'RSERVE_PASSWORD' + + ); +} +else +{ + @CONFIG_VARIABLES = ( + 'POSTGRES_SERVER', + 'POSTGRES_PORT', + 'POSTGRES_DATABASE', + 'POSTGRES_USER', + 'POSTGRES_PASSWORD' + ); + + $postgresonly = 1; +} + +my %CONFIG_DEFAULTS = + ( + 'HOST_DNS_ADDRESS', '', + 'GLASSFISH_DIRECTORY', '/home/glassfish/glassfish', + 'MAIL_SERVER', 'localhost', + + 'POSTGRES_SERVER', 'localhost', + 'POSTGRES_PORT', 5432, + 'POSTGRES_DATABASE','dvnDb', + 'POSTGRES_USER', 'postgres', + 'POSTGRES_PASSWORD','admin', + + 'RSERVE_HOST', 'localhost', + 'RSERVE_PORT', 6311, + 'RSERVE_USER', 'rserve', + 'RSERVE_PASSWORD', 'rserve' + + ); + + +my %CONFIG_PROMPTS = + ( + 'HOST_DNS_ADDRESS', 'Internet Address of your host', + 'GLASSFISH_DIRECTORY', 'Glassfish Directory', + 'MAIL_SERVER', 'SMTP (mail) server to relay notification messages', + + 'POSTGRES_SERVER', 'Postgres Server', + 'POSTGRES_PORT', 'Postgres Server Port', + 'POSTGRES_DATABASE','Name of the Postgres Database', + 'POSTGRES_USER', 'Name of the Postgres User', + 'POSTGRES_PASSWORD','Postgres user password', + + 'RSERVE_HOST', 'Rserve Server', + 'RSERVE_PORT', 'Rserve Server Port', + 'RSERVE_USER', 'Rserve User Name', + 'RSERVE_PASSWORD', 'Rserve User Password' + + ); + +# Supported Posstgres JDBC drivers: +# (have to be configured explicitely, so that Perl "taint" (security) mode +# doesn't get paranoid) + +my $POSTGRES_DRIVER_8_3 = "postgresql-8.3-603.jdbc4.jar"; +#my $POSTGRES_DRIVER_8_4 = "postgresql-8.4-703.jdbc4.jar"; +my $POSTGRES_DRIVER_8_4 = "postgresql-8.3-603.jdbc4.jar"; +my $POSTGRES_DRIVER_9_0 = "postgresql-9.0-802.jdbc4.jar"; +my $POSTGRES_DRIVER_9_1 = "postgresql-9.1-902.jdbc4.jar"; + + +# A few preliminary checks: + +# user -- must be root: + +$user_real = `who am i`; +chop $user_real; +$user_real =~s/ .*$//; + +if ( $< != 0 ) +{ + print STDERR "\nERROR: You must be logged in as root to run the installer.\n\n"; + exit 1; +} + +# OS: + +my $uname_out = `uname -a`; + +# hostname: + +my $hostname_from_cmdline = `hostname`; +chop $hostname_from_cmdline; + +$CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} = $hostname_from_cmdline; + + +print "\nWelcome to the DVN installer.\n"; +unless ($opts{pg_only}) +{ + print "You will be guided through the process of setting up a NEW\n"; + print "instance of the DVN application\n"; +} +else +{ + print "You will be guided through the process of configuring the\n"; + print "LOCAL instance of PostgreSQL database for use by the DVN\n"; + print "application.\n"; +} + +my @uname_tokens = split (" ", $uname_out); + +if ( $uname_tokens[0] eq "Darwin" ) +{ + print "\nThis appears to be a MacOS X system; good.\n"; + # TODO: check the OS version + + $WORKING_OS = "MacOSX"; +} +elsif ( $uname_tokens[0] eq "Linux" ) +{ + if ( -f "/etc/redhat-release" ) + { + print "\nThis appears to be a RedHat system; good.\n"; + $WORKING_OS = "RedHat"; + # TODO: check the distro version + } + else + { + print "\nThis appears to be a non-RedHat Linux system;\n"; + print "this installation *may* succeed; but we're not making any promises!\n"; + $WORKING_OS = "Linux"; + } +} +else +{ + print "\nWARNING: This appears to be neither a Linux or MacOS X system!\n"; + print "This installer script will most likely fail. Please refer to the\n"; + print "DVN Installers Guide for more information.\n\n"; + + $WORKING_OS = "Unknown"; + + print "Do you wish to continue?\n [y/n] "; + + + my $yesnocont = <>; chop $yesnocont; + + while ( $yesnocont ne "y" && $yesnocont ne "n" ) + { + print "Please enter 'y' or 'n'!\n"; + print "(or ctrl-C to exit the installer)\n"; + $yesnocont = <>; chop $yesnocont; + } + + if ( $yesnocont eq "n" ) + { + exit 0; + } + +} + + ENTERCONFIG: + +print "\n"; +print "Please enter the following configuration values:\n"; +print "(hit [RETURN] to accept the default value)\n"; +print "\n"; + +for $ENTRY (@CONFIG_VARIABLES) +{ + print $CONFIG_PROMPTS{$ENTRY} . ": "; + print "[" . $CONFIG_DEFAULTS{$ENTRY} . "] "; + + $user_entry = <>; + chop $user_entry; + + if ($user_entry ne "") + { + $CONFIG_DEFAULTS{$ENTRY} = $user_entry; + } + + print "\n"; +} + +# CONFIRM VALUES ENTERED: + + +print "\nOK, please confirm what you've entered:\n\n"; + +for $ENTRY (@CONFIG_VARIABLES) +{ + print $CONFIG_PROMPTS{$ENTRY} . ": " . $CONFIG_DEFAULTS{$ENTRY} . "\n"; +} + +print "\nIs this correct? [y/n] "; + + +my $yesno = <>; chop $yesno; + +while ( $yesno ne "y" && $yesno ne "n" ) +{ + print "Please enter 'y' or 'n'!\n"; + print "(or ctrl-C to exit the installer)\n"; + $yesno = <>; chop $yesno; +} + +if ( $yesno eq "n" ) +{ + goto ENTERCONFIG; +} + +# VALIDATION/VERIFICATION OF THE CONFIGURATION VALUES: +# 1. VERIFY MAIL SERVER THEY CONFIGURED: + +=pod + +unless ( $postgresonly ) +{ + + my ( $mail_server_iaddr, $mail_server__paddr, $mail_server_proto, $mail_server_status ); + + $mail_server_status = 1; + + unless ( $mail_server_iaddr = inet_aton($CONFIG_DEFAULTS{'MAIL_SERVER'}) ) + { + print STDERR "Could not look up $CONFIG_DEFAULTS{'MAIL_SERVER'},\n"; + print STDERR "the host you specified as your mail server.\n"; + $mail_server_status = 0; + } + + if ($mail_server_status) { + $mail_server_paddr = sockaddr_in(25, $mail_server_iaddr); + $mail_server_proto = getprotobyname('tcp'); + + unless ( socket(SOCK, PF_INET, SOCK_STREAM, $mail_server_proto) && + connect(SOCK, $mail_server_paddr) ) + { + print STDERR "Could not establish connection to $CONFIG_DEFAULTS{'MAIL_SERVER'},\n"; + print STDERR "the address you provided for your Mail server.\n"; + print STDERR "Please select a valid mail server, and try again.\n\n"; + + $mail_server_status = 0; + } + + } + + close (SOCK); + + unless ($mail_server_status) + { + goto ENTERCONFIG; + } +} +=cut + +# 2. CHECK IF THE WAR FILE IS AVAILABLE: +unless ( -f "appdeploy/dist/DVN-web.war" ) +{ + print "\nWARNING: Can't find the project .war file in appdeploy/dist/!\n"; + print "\tAre you running the installer in the right directory?\n"; + print "\tHave you built the war file?\n"; + print "\t(if not, build the project and run the installer again)\n"; + + exit 0; +} + +# check the working (installer) dir: +my $cwd; +chomp($cwd = `pwd`); + +# 2b. CHECK IF THE SQL TEMPLATE IS IN PLACE AND CREATE THE SQL FILE + +$SQL_REFERENCE_DATA = "referenceData.sql"; +$SQL_REFERENCE_TEMPLATE = "referenceData.sql.TEMPLATE"; + +unless ( -f $SQL_REFERENCE_TEMPLATE ) +{ + print "\nWARNING: Can't find .sql data template!\n"; + print "(are you running the installer in the right directory?)\n"; + + exit 0; +} + +open DATATEMPLATEIN, $SQL_REFERENCE_TEMPLATE || die $@; +open SQLDATAOUT, '>'.$SQL_REFERENCE_DATA || die $@; + +while( ) +{ + s/%POSTGRES_USER%/$CONFIG_DEFAULTS{'POSTGRES_USER'}/g; + print SQLDATAOUT $_; +} + + +close DATATEMPLATEIN; +close SQLDATAOUT; + +# 3. CHECK POSTGRES AVAILABILITY: + +my $pg_local_connection = 0; + +if ( $CONFIG_DEFAULTS{'POSTGRES_SERVER'} eq 'localhost' ) +{ + $pg_local_connection = 1; + + # 3a. CHECK FOR USER postgres: + + print "\nChecking system user \"postgres\"... "; + + $POSTGRES_SYS_NAME = "postgres"; + $POSTGRES_SYS_UID = (getpwnam ("postgres"))[2]; + + if ($POSTGRES_SYS_UID == undef) { + print STDERR "\nERROR: I haven't been able to find user \"postgres\" on the system!\n"; + print STDERR "(TODO: prompt the user instead to supply an alternative username, if\n"; + print STDERR "available)\n"; + + exit 1; + } + + print "OK.\n"; + + # 3b. LOCATE THE EXECUTABLE: + + $sys_path = $ENV{'PATH'}; + @sys_path_dirs = split ( ":", $sys_path ); + + $psql_exec = ""; + + for $sys_path_dir ( @sys_path_dirs ) + { + if ( -x $sys_path_dir . "/psql" ) + { + $psql_exec = $sys_path_dir; + last; + } + } + + $pg_major_version = 0; + $pg_minor_version = 0; + + if ( $psql_exec eq "" && $WORKING_OS eq "MacOSX" ) + { + for $pg_minor_version ( "1", "0" ) + { + if ( -x "/Library/PostgreSQL/9." . $pg_minor_version . "/bin/psql" ) + { + $pg_major_version = 9; + $psql_exec = "/Library/PostgreSQL/9." . $pg_minor_version . "/bin"; + last; + } + } + if (!$pg_major_version) + { + for $pg_minor_version ( "4", "3" ) + { + if ( -x "/Library/PostgreSQL/8." . $pg_minor_version . "/bin/psql" ) + { + $pg_major_version = 8; + $psql_exec = "/Library/PostgreSQL/8." . $pg_minor_version . "/bin"; + last; + } + } + } + } + + if ( $psql_exec eq "" ) + { + print STDERR "\nERROR: I haven't been able to find the psql command in your PATH!\n"; + print STDERR "Please make sure PostgresQL is properly installed and try again.\n\n"; + + exit 1; + } + + + + # 3c. CHECK POSTGRES VERSION: + + open (PSQLOUT, $psql_exec . "/psql --version|"); + + $psql_version_line = ; + chop $psql_version_line; + close PSQLOUT; + + my ($postgresName, $postgresNameLong, $postgresVersion) = split ( " ", $psql_version_line ); + + unless ( $postgresName eq "psql" && $postgresVersion =~ /^[0-9][0-9\.]*$/ ) + { + print STDERR "\nERROR: Unexpected output from psql command!\n"; + print STDERR "Please make sure PostgresQL is properly installed and try again.\n\n"; + + exit 1; + } + + + my (@postgres_version_tokens) = split ( '\.', $postgresVersion ); + + unless ( ($postgres_version_tokens[0] == 8 && $postgres_version_tokens[1] >= 3) || ($postgres_version_tokens[0] >= 9) ) + { + print STDERR "\nERROR: PostgresQL version 8.3, or newer, is required!\n"; + print STDERR "Found a copy of psql ($psql_exec/psql) that belongs to version $postgresVersion.\n\n"; + print STDERR "Please make sure the right version of PostgresQL is properly installed,\n"; + print STDERR "and the right version of psql comes first in the PATH,\n"; + print STDERR "then try again.\n"; + + exit 1; + } + + print "\n\nFound Postgres psql command, version $postgresVersion. Good.\n\n"; + + $pg_major_version = $postgres_version_tokens[0]; + $pg_minor_version = $postgres_version_tokens[1]; + + # 4. CONFIGURE POSTGRES: + + print "\nConfiguring Postgres Database:\n"; + + + + $< = $POSTGRES_SYS_UID; + $> = $POSTGRES_SYS_UID; + + # 4a. CHECK IF POSTGRES IS RUNNING: + print "Checking if a local instance of Postgres is running and accessible...\n"; + + # (change to /tmp before executing the command below - + # we are trying to do it as user postgres, and it may not have + # access to the current, installer directory; the command would still + # work, but there would be an error message from the shell init on screen + # - potentially confusing) + chdir ("/tmp"); + + if (!system ($psql_exec . "/psql -c 'SELECT * FROM pg_roles' > /dev/null 2>&1")) + { + print "Yes, it is.\n"; + } + else + { + print "Nope, I haven't been able to connect to the local instance of PostgresQL.\n"; + print "daemon. Is postgresql running? \n"; + print "On a RedHat system, you can check the status of the daemon with\n\n"; + print " service postgresql status\n\n"; + print "and, if it's not running, start the daemon with\n\n"; + print " service postgresql start\n\n"; + print "On MacOSX, use Applications -> PostgresQL -> Start Server.\n"; + print "Also, please make sure that the daemon is listening to network connections,\n"; + print "at leaset on the localhost interface. (See \"Installing Postgres\" section\n"; + print "of the installation manual).\n"; + print "Finally, please make sure that the postgres user can make localhost \n"; + print "connections without supplying a password. (That's controlled by the \n"; + print "\"localhost ... ident\" line in pg_hba.conf; again, please consult the \n"; + print "installation manual).\n"; + + + exit 1; + } + + + # 4c. CHECK IF THIS DB ALREADY EXISTS: + + $psql_command_dbcheck = $psql_exec . "/psql -c \"\" -d " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . ">/dev/null 2>&1"; + if ( ($exitcode = system($psql_command_dbcheck)) == 0 ) + { + # switch back to root uid: + $> = 0; + $< = 0; + chdir ($cwd); + + print "WARNING! Database " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " already exists!\n"; + print "\nPlease note that you can only use this installer to create a blank, \n"; + print "new and shiny DVN database. I.e., you cannot install on top of an \n"; + print "existing database. Please enter a different name for the DVN database.\n"; + print "\nPress any key to continue, or ctrl-C to exit the installer...\n\n"; + + system "stty cbreak /dev/tty 2>&1"; + my $key = getc(STDIN); + system "stty -cbreak /dev/tty 2>&1"; + print "\n"; + + goto ENTERCONFIG; + + } + + # 4d. CHECK IF THIS USER ALREADY EXISTS: + + $psql_command_rolecheck = $psql_exec . "/psql -c \"\" -d postgres " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " >/dev/null 2>&1"; + if ( ($exitcode = system($psql_command_rolecheck)) == 0 ) + { + print "User (role) . " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " already exists;\n"; + print "Proceeding."; + } + else + { + # 4e. CREATE DVN DB USER: + + print "\nCreating Postgres user (role) for the DVN:\n"; + + open TMPCMD, ">/tmp/pgcmd.$$.tmp"; + + # with unencrypted password: + #print TMPCMD "CREATE ROLE ".$CONFIG_DEFAULTS{'POSTGRES_USER'}." UNENCRYPTED PASSWORD '".$CONFIG_DEFAULTS{'POSTGRES_PASSWORD'}."' NOSUPERUSER CREATEDB CREATEROLE NOINHERIT LOGIN"; + + # with md5-encrypted password: + $pg_password_md5 = &create_pg_hash ($CONFIG_DEFAULTS{'POSTGRES_USER'},$CONFIG_DEFAULTS{'POSTGRES_PASSWORD'}); + my $sql_command = "CREATE ROLE \"".$CONFIG_DEFAULTS{'POSTGRES_USER'}."\" PASSWORD 'md5". $pg_password_md5 ."' NOSUPERUSER CREATEDB CREATEROLE INHERIT LOGIN"; + + print TMPCMD $sql_command; + close TMPCMD; + + my $psql_commandline = $psql_exec . "/psql -f /tmp/pgcmd.$$.tmp"; + + unless ( ($exitcode = system($psql_commandline)) == 0 ) + { + print STDERR "Could not create the DVN Postgres user role!\n"; + print STDERR "(SQL: " . $sql_command . ")\n"; + print STDERR "(psql exit code: " . $exitcode . ")\n"; + exit 1; + } + + unlink "/tmp/pgcmd.$$.tmp"; + print "done.\n"; + } + + # 4f. CREATE DVN DB: + + print "\nCreating Postgres database:\n"; + + $psql_command = $psql_exec . "/createdb ".$CONFIG_DEFAULTS{'POSTGRES_DATABASE'}." --owner=".$CONFIG_DEFAULTS{'POSTGRES_USER'}; + + unless ( ($exitcode = system("$psql_command")) == 0 ) + { + print STDERR "Could not create Postgres database for the DVN app!\n"; + print STDERR "(command: " . $psql_command . ")\n"; + print STDERR "(psql exit code: " . $exitcode . ")\n"; + print STDERR "\naborting the installation (sorry!)\n\n"; + exit 1; + } + + +# Changing back to root UID: + + $> = 0; + $< = 0; + + chdir ($cwd); + +} +else +{ + if (0) # DEV. INSTALLER ONLY: + { + print "\nIt is strongly recommended that you use a local PostgresQL server,\n"; + print "running on localhost, in your development environment!\n\n"; + + print "Do you wish to continue?\n [y/n] "; + + + my $yesnocont = <>; chop $yesnocont; + + while ( $yesnocont ne "y" && $yesnocont ne "n" ) + { + print "Please enter 'y' or 'n'!\n"; + print "(or ctrl-C to exit the installer)\n"; + $yesnocont = <>; chop $yesnocont; + } + + if ( $yesnocont eq "n" ) + { + print "(aborting the installation)\n". + exit 0; + } + } + + if ( $opts{$pg_only} ) + { + print "The script must be run in the --pg_only mode ONLY locally,\n"; + print "i.e., on the server where PostgresQL is running.\n"; + + exit 1; + } + + print "In order to use a PostgresQL database running on a remote server,\n"; + print "Please run this installer on that host with the \"--pg_only\" option:\n\n"; + print "./install --pg_only\n\n"; + + print "Press any key to continue the installation process once that has been\n"; + print "done. Or press ctrl-C to exit the installer.\n\n"; + + chdir ("/tmp"); + system "stty cbreak /dev/tty 2>&1"; + my $key = getc(STDIN); + system "stty -cbreak /dev/tty 2>&1"; + print "\n"; + chdir ($cwd); + + # Check if the role and database have been created on the remote server: + # -- TODO; + + # Find out what Postgres version is running remotely: + + $pg_major_version = 9; + $pg_minor_version = 1; + + print "What version of PostgresQL is installed on the remote server?\n [" . $pg_major_version . "." . $pg_minor_version . "] "; + + + my $postgresVersion = <>; chop $postgresVersion; + + while ( $postgresVersion ne "" && !($postgresVersion =~/^[0-9]+\.[0-9]+$/) ) + { + print "Please enter valid Postgres version!\n"; + print "(or ctrl-C to exit the installer)\n"; + $postgresVersion = <>; chop $postgresVersion; + } + + unless ( $postgresVersion eq "" ) + { + my (@postgres_version_tokens) = split ( '\.', $postgresVersion ); + + unless ( ($postgres_version_tokens[0] == 8 && $postgres_version_tokens[1] >= 3) || ($postgres_version_tokens[0] >= 9) ) + { + print STDERR "\nERROR: PostgresQL version 8.3, or newer, is required!\n"; + print STDERR "Please make sure the right version of PostgresQL is properly installed\n"; + print STDERR "on the remote server, then try again.\n"; + + exit 1; + } + + $pg_major_version = $postgres_version_tokens[0]; + $pg_minor_version = $postgres_version_tokens[1]; + } + +} + + +if ( $postgresonly ) +{ + print "\nOK, done.\n"; + print "You can now resume the installation on the main DVN host.\n\n"; + + exit 0; +} + + +# 5. CONFIGURE GLASSFISH + +print "\nProceeding with the Glassfish setup.\n"; +print "\nChecking your Glassfish installation..."; + +my $glassfish_dir = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}; + +# 5a. CHECK IF GLASSFISH DIR LOOKS OK: + +print $glassfish_dir."/glassfish/domains/domain1"; + +unless ( -d $glassfish_dir."/glassfish/domains/domain1" ) +{ + # TODO: need better check than this + + while ( ! ( -d $glassfish_dir."/glassfish/domains/domain1" ) ) + { + print "\nInvalid Glassfish directory " . $glassfish_dir . "!\n"; + print "Enter the root directory of your Glassfish installation:\n"; + print "(Or ctrl-C to exit the installer): "; + + $glassfish_dir = <>; + chop $glassfish_dir; + } +} + +print "OK!\n"; + +# 5b. DETERMINE HOW MUCH MEMORY TO GIVE TO GLASSFISH AS HEAP: + +$gf_heap_default = "2048m"; +$sys_mem_total = 0; + +if ( -e "/proc/meminfo" && open MEMINFO, "/proc/meminfo" ) +{ + # Linux + + while ( $mline = ) + { + if ( $mline =~ /MemTotal:[ \t]*([0-9]*) kB/ ) + { + $sys_mem_total = $1; + } + } + + close MEMINFO; + +} +elsif ( -x "/usr/sbin/sysctl" ) +{ + # MacOS X, probably... + + $sys_mem_total = `/usr/sbin/sysctl -n hw.memsize`; + chop $sys_mem_total; + if ($sys_mem_total > 0) + { + $sys_mem_total = int ($sys_mem_total / 1024); + # size in kb + } +} + +if ( $sys_mem_total > 0 ) +{ + # setting the default heap size limit to 3/8 of the available + # amount of memory: + $gf_heap_default = ( int ($sys_mem_total / (8 / 3 * 1024) ) ); + + print "\nSetting the heap limit for Glassfish to " . $gf_heap_default . "MB. \n"; + print "You may need to adjust this setting to better suit \n"; + print "your system.\n\n"; + + $gf_heap_default .= "m"; + +} +else +{ + print "\nCould not determine the amount of memory on your system.\n"; + print "Setting the heap limit for Glassfish to 2GB. You may need \n"; + print "to adjust the value to better suit your system.\n\n"; +} + +push @CONFIG_VARIABLES, "DEF_MEM_SIZE"; +$CONFIG_DEFAULTS{"DEF_MEM_SIZE"} = $gf_heap_default; + +print "\nPress any key to continue...\n\n"; + +system "stty cbreak /dev/tty 2>&1"; + my $key = getc(STDIN); + system "stty -cbreak /dev/tty 2>&1"; + print "\n"; + +# 5c. GENERATE GLASSFISH CONFIGURATION FILE: + +print "\nWriting glassfish configuration file (domain.xml)... "; + +# 5cc. FIND THE "special-admin-indicator" IN THE ORIGINAL GLASSFISH CONFIG: + +open ( GFCNFG, $glassfish_dir."/glassfish/domains/domain1/config/domain.xml") || die $@; + +while ( ) +{ + if (/domain.xml'; + +while( ) +{ + for $ENTRY (@CONFIG_VARIABLES) + { + $patin = '%' . $ENTRY . '%'; + $patout = $CONFIG_DEFAULTS{$ENTRY}; + + s/$patin/$patout/g; + } + + print CONFIGOUT $_; + +} + +close TEMPLATEIN; +close CONFIGOUT; + +print "done.\n"; + +system ("/bin/cp -f domain.xml ".$glassfish_dir."/glassfish/domains/domain1/config"); +#diagnostics needed! + +# check if the supllied config files are in the right place: + +unless ( -f "config/logging.properties" ) +{ + print "\nERROR! Configuration files not found in config dir!\n"; + print "(are you running the installer in the right directory?\n"; + print "Aborting...\n"; + exit 1; +} + +print "\nCopying additional configuration files... "; + +system ( "/bin/cp -Rf config/* ".$glassfish_dir."/glassfish/domains/domain1/config"); +#diagnostics needed! + +# install pre-configured robots.txt blocking bot crawlers: +system ( "/bin/cp -f robots.txt ".$glassfish_dir."/glassfish/domains/domain1/docroot"); + +# install the DVN guides (HTML) into the application docroot: +system ( "/bin/cp -Rf doc/guides/* ".$glassfish_dir."/glassfish/domains/domain1/docroot/guides"); + + +print "done!\n"; + +print "\nInstalling the Glassfish PostgresQL driver... "; + +my $install_driver_jar = ""; + +if ( $pg_major_version == 8 ) +{ + if ( $pg_minor_version == 3 ) + { + $install_driver_jar = $POSTGRES_DRIVER_8_3; + } + elsif ( $pg_minor_version == 4 ) + { + $install_driver_jar = $POSTGRES_DRIVER_8_4; + } +} +elsif ( $pg_major_version == 9 ) +{ + if ( $pg_minor_version == 0 ) + { + $install_driver_jar = $POSTGRES_DRIVER_9_0; + } + elsif ( $pg_minor_version == 1 ) + { + $install_driver_jar = $POSTGRES_DRIVER_9_1; + } +} + +=poc +unless ( $install_driver_jar ) +{ + die "Installer could not find POSTGRES JDBC driver for your version of PostgresQL!\n"; + +} +=cut + +system ( "/bin/cp", "pgdriver/" . $install_driver_jar, $glassfish_dir."/glassfish/lib"); +#diagnostics needed! + +print "done!\n"; + +# 5d. STOP GLASSFISH (OK IF NOT RUNNING): +print "\nStopping glassfish...\n"; + +unless ( ($exit_code=system ($glassfish_dir."/bin/asadmin stop-domain domain1")) == 0 ) +{ + print STDERR "(that's OK!)\n"; +} + +# 5dd. INSTALL PATCHED WEBCORE GLASSFISH MODULE: + +$gf_webcore_jar = $glassfish_dir."/glassfish/modules/web-core.jar"; + +system ("/bin/mv -f ".$gf_webcore_jar . " " . $gf_webcore_jar.".PRESERVED"); +system ("/bin/cp web-core.jar ".$gf_webcore_jar); + +# 5ddd. DELETE EJB TIMER APP LOCK FILE, if exists (just in case!): + +system ( "/bin/rm -f ".$glassfish_dir."/glassfish/domains/domain1/generated/ejb-timer-service-app" ); + +# 5e. START GLASSFISH: +print "\nStarting glassfish.\n"; + +unless ( ($exit_code=system ($glassfish_dir."/bin/asadmin start-domain domain1")) == 0 ) +{ + print STDERR "Could not start glassfish!\n"; + print STDERR "(exit code: " . $exitcode . ")\n"; + exit 1; +} + + +# check if glassfish is running: +# TODO. + +# 6. DEPLOY APPLICATION: +# 6a. DO WE HAVE ANT? +# (we are no longer using ant to deply -- L.A.) +# +#$sys_path = $ENV{'PATH'}; +#@sys_path_dirs = split ( ":", $sys_path ); + +#$ant_exec = ""; +# +#for $sys_path_dir ( @sys_path_dirs ) +#{ +# if ( -x $sys_path_dir . "/ant" ) +# { +# $ant_exec = $sys_path_dir . "/ant"; +# last; +# } +#} +# +#if ( $ant_exec eq "" ) +#{ +# print STDERR "\nERROR: I haven't been able to find ant command in your PATH!\n"; +# print STDERR "Please make sure and is installed and in your PATH; then try again.\n\n"; +# +# exit 1; +#} +# 6b. TRY TO DEPLOY: + +print "\nAttempting to deploy the application:\n\n"; + +$CONFIG_DEFAULTS{'GLASSFISH_ADMIN_PASSWORD'} = 'adminadmin'; +# TODO: ask for password! -- in case they have already changed it +# (update: chances are we don't even need the password anymore, as +# long as we are deploying locally (?)) + +my $glassfish_password = $CONFIG_DEFAULTS{'GLASSFISH_ADMIN_PASSWORD'}; + +# create deployment properties files: +# (these properties files are no longer used, because we are no longer +# using ant to deploy the app. -- L.A.) + +#for $prop_file ('AS', 'glassfish') +#{ +# open ( TEMPLIN, "appdeploy/" . $prop_file . ".properties.TEMPLATE" ) +# || die "failed to open appdeploy/" . $prop_file . ".properties.TEMPLATE"; +# open ( PROPOUT, ">appdeploy/" . $prop_file . ".properties" ) +# || die "failed to open appdeploy/" . $prop_file . ".properties for writing"; +# +# while( ) +# { +# s/%GF_ADMIN_PASSWORD%/$glassfish_password/g; +# s/%GF_ROOT_DIR%/$glassfish_dir/g; +# print PROPOUT $_; +# } +# +# close TEMPLIN; +# close PROPOUT; +#} + +# Create the .asadminpass file, or replace it, if exists: + +$asadminpass_file = $ENV{'HOME'} . "/.asadminpass"; + +if ( -e $asadminpass_file ) +{ + system ("/bin/mv -f " . $asadminpass_file . " " . $asadminpass_file . ".PRESERVED"); +} + +system ("echo 'asadmin://admin@localhost:4848 ' > " . $asadminpass_file); + +$deploy_command = $glassfish_dir."/bin/asadmin deploy --force=true --name=DVN-web dist/DVN-web.war"; + +unless ( ($exit_code = system ("cd appdeploy; " . $deploy_command)) == 0 ) +{ + print STDERR "Could not deploy DVN application!\n"; + print STDERR "(exit code: " . $exitcode . ")\n"; + exit 1; +} + +if ( $pg_local_connection ) +{ + print "\nOK; now we are going to stop glassfish and populate the database with\n"; + print "some initial content, then start glassfish again.\n"; +} +else +{ + print "\nOK; stopping glasfish.\n"; +} + + +# 6c. SHUT DOWN: + +$gf_stop_command = $glassfish_dir."/bin/asadmin stop-domain domain1"; + +unless ( ($exit_code = system ($gf_stop_command)) == 0 ) +{ + print STDERR "Could not stop glassfish!\n"; + print STDERR "(command line: " . $gf_stop_command . ")\n"; + print STDERR "(exit code: " . $exitcode . ")\n"; + print STDERR "\nPlease finish the installation process manually: \n"; + print STDERR "stop/kill glassfish; then populate the database with \n"; + print STDERR "the supplied initial content, by executing the following \n"; + print STDERR "command, *as Unix user postgres*: \n\n"; + + $psql_command = $psql_exec . "/psql -d $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} -f " . $SQL_REFERENCE_DATA; + + print STDERR $psql_command . "\n\n"; + print STDERR "Then start glassfish again... Voila, you should then have \n"; + print STDERR "a running DVN instance at the following URL:\n\n"; + print STDERR " http://" . $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} . "/dvn\n"; + + print STDERR "\naborting the installer... (sorry!)\n"; + + exit 1; +} + +# 7. POPULATE DATABASE: + +if ( $pg_local_connection ) +{ + # 7a. POPULATE LOCALLY: + print "\nPopulating the database (local PostgresQL instance):\n\n"; + + # Copy the SQL file to /tmp, where user postgres will definitely + # have read access to it: + + copy("referenceData.sql","/tmp") or die "Could not copy referenceData.sql to /tmp: $!"; + + $< = $POSTGRES_SYS_UID; + $> = $POSTGRES_SYS_UID; + chdir ("/tmp"); + $psql_command = $psql_exec . "/psql -d $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} -f referenceData.sql"; + + unless ( ($exitcode = system("$psql_command")) == 0 ) + { + print STDERR "Could not populate Postgres database for the DVN app!\n"; + print STDERR "(command: " . $psql_command . ")\n"; + print STDERR "(psql exit code: " . $exitcode . ")\n"; + print STDERR "\nYou must populate the database before you can use your new\n"; + print STDERR "DVN instance. Please consult the installation manual and/or\n"; + print STDERR "seek support from the DVN team.\n\n"; + exit 1; + + } + + chdir ($cwd); + print "\nOK, done!\n"; + +} +else +{ + # 7b. INSTRUCT THE USER TO POPULATE THE DB ON THE REMOTE SERVER: + # NOT SUPPORTED YET -- TODO + print "The database needs to be populated with some intial content \n"; + print "before we restart the DVN one last time. \n"; + print "However, populating a database on a remote PostgresQL server "; + print "is not supported yet!\n"; + print "Please copy the file referenceData.sql (found in this directory)\n"; + print "onto the remote server and populate the database manually,\n"; + print "as user postgres, with the following command:\n\n"; + print " psql -d $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} -f referenceData.sql\n"; + print "then start glassfish again on this server with \n\n"; + print " " . $glassfish_dir."/bin/asadmin start-domain domain1\n\n"; + + $> = 0; + $< = 0; + + exit 0; + +} + +# back to root: + +$> = 0; +$< = 0; + +# 8. START GLASSFISH AGAIN: +print "\nStarting glassfish, again:\n\n"; + +$gf_start_command = $glassfish_dir."/bin/asadmin start-domain domain1"; + +# delete the EJB TIMER app lock file, if exists (just in case!): +system ( "/bin/rm -f ".$glassfish_dir."/glassfish/domains/domain1/generated/ejb-timer-service-app" ); + +unless ( ($exit_code = system ($gf_start_command)) == 0 ) +{ + print STDERR "Could not start glassfish!\n"; + print STDERR "(command line: " . $gf_start_command . ")\n"; + print STDERR "(exit code: " . $exit_code . ")\n"; + exit 1; +} + + +print "\nYou should now have a running DVN instance;\n"; +print "Please go to the application at the following URL:\n\n"; +print " http://" . $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} . "/dvn\n"; +print "\nand log in by using \"networkAdmin\" as both the user name\n"; +print "and password. Click the \"networkAdmin\" link on the right side\n"; +print "Of the main screen, then click \"Update Account\". Change this\n"; +print "default password and default e-mail address.\n"; + +# 9. FINALLY, CHECK IF RSERVE IS RUNNING: +print "\n\nFinally, checking if Rserve is running and accessible...\n"; + +unless ( $CONFIG_DEFAULTS{'RSERVE_PORT'}=~/^[0-9][0-9]*$/ ) +{ + print $CONFIG_DEFAULTS{'RSERVE_HOST'} . " does not look like a valid port number,\n"; + print "defaulting to 6311.\n\n"; + + $CONFIG_DEFAULTS{'RSERVE_PORT'} = 6311; +} + +my ( $rserve_iaddr, $rserve_paddr, $rserve_proto ); + +unless ( $rserve_iaddr = inet_aton($CONFIG_DEFAULTS{'RSERVE_HOST'}) ) +{ + print STDERR "Could not look up $CONFIG_DEFAULTS{'RSERVE_HOST'},\n"; + print STDERR "the host you specified as your R server.\n"; + print STDERR "\nDVN can function without a working R server, but\n"; + print STDERR "much of the functionality concerning running statistics\n"; + print STDERR "and analysis on quantitative data will not be available.\n"; + print STDERR "Please consult the Installers guide for more info.\n"; + + exit 0; +} + +$rserve_paddr = sockaddr_in($CONFIG_DEFAULTS{'RSERVE_PORT'}, $rserve_iaddr); +$rserve_proto = getprotobyname('tcp'); + +unless ( socket(SOCK, PF_INET, SOCK_STREAM, $rserve_proto) && + connect(SOCK, $rserve_paddr) ) +{ + print STDERR "Could not establish connection to $CONFIG_DEFAULTS{'RSERVE_HOST'}\n"; + print STDERR "on port $CONFIG_DEFAULTS{'RSERVE_PORT'}, the address you provided\n"; + print STDERR "for your R server.\n"; + print STDERR "DVN can function without a working R server, but\n"; + print STDERR "much of the functionality concerning running statistics\n"; + print STDERR "and analysis on quantitative data will not be available.\n"; + print STDERR "Please consult the \"Installing R\" section in the Installers guide\n"; + print STDERR "for more info.\n"; + + exit 0; + +} + +close (SOCK); +print "\nOK!\n"; + +exit 0; + + +sub create_pg_hash { + local $pg_username = shift @_; + local $pg_password = shift @_; + + $encode_line = $pg_password . $pg_username; + + # for Redhat: + + ##print STDERR "executing /bin/echo -n $encode_line | md5sum\n"; + + if ( $WORKING_OS eq "MacOSX" ) + { + $hash = `/bin/echo -n $encode_line | md5`; + } + else + { + $hash = `/bin/echo -n $encode_line | md5sum`; + } + + chop $hash; + + $hash =~s/ \-$//; + + if ( (length($hash) != 32) || ($hash !~ /^[0-9a-f]*$/) ) + { + print STDERR "Failed to generate a MD5-encrypted password hash for the Postgres database.\n"; + exit 1; + } + + + return $hash; +} diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/pgdriver/postgresql-8.3-603.jdbc4.jar Binary file DVN-web/installer/dvninstall/pgdriver/postgresql-8.3-603.jdbc4.jar has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/pgdriver/postgresql-8.4-703.jdbc4.jar Binary file DVN-web/installer/dvninstall/pgdriver/postgresql-8.4-703.jdbc4.jar has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/pgdriver/postgresql-9.0-802.jdbc4.jar Binary file DVN-web/installer/dvninstall/pgdriver/postgresql-9.0-802.jdbc4.jar has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/pgdriver/postgresql-9.1-902.jdbc4.jar Binary file DVN-web/installer/dvninstall/pgdriver/postgresql-9.1-902.jdbc4.jar has changed diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/referenceData.sql --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/referenceData.sql Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,1223 @@ +-- +-- PostgreSQL database dump +-- + +-- Started on 2006-09-19 16:05:05 Eastern Standard Time + +SET client_encoding = 'UTF8'; +SET check_function_bodies = false; +SET client_min_messages = warning; + +SET search_path = public, pg_catalog; + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('metadata', 'id'), 10, false); + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('template', 'id'), 10, false); + + +-- +-- TOC entry 1840 (class 0 OID 0) +-- Dependencies: 1304 +-- Name: datatable_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('pagedef', 'id'), 500, false); + + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('datatable', 'id'), 1, false); + + +-- +-- TOC entry 1841 (class 0 OID 0) +-- Dependencies: 1291 +-- Name: datavariable_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('datavariable', 'id'), 1, false); + + +-- +-- TOC entry 1842 (class 0 OID 0) +-- Dependencies: 1297 +-- Name: fieldinputlevel_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('fieldinputlevel', 'id'), 10, false); + + + + + +-- +-- TOC entry 1844 (class 0 OID 0) +-- Dependencies: 1287 +-- Name: logindomain_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('logindomain', 'id'), 1, false); + + + + +-- +-- TOC entry 1846 (class 0 OID 0) +-- Dependencies: 1312 +-- Name: role_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('"role"', 'id'), 10, false); + + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('"networkrole"', 'id'), 10, false); + +-- +-- TOC entry 1848 (class 0 OID 0) +-- Dependencies: 1272 +-- Name: studyfield_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('studyfield', 'id'), 150, true); + + + + + +-- +-- TOC entry 1851 (class 0 OID 0) +-- Dependencies: 1270 +-- Name: studyfile_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('studyfile', 'id'), 1, false); + + + + + +-- +-- TOC entry 1856 (class 0 OID 0) +-- Dependencies: 1302 +-- Name: usergroup_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('usergroup', 'id'), 1, false); + + + +-- +-- TOC entry 1859 (class 0 OID 0) +-- Dependencies: 1299 +-- Name: vdcgroup_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('vdcgroup', 'id'), 1, false); + + +-- +-- TOC entry 1860 (class 0 OID 0) +-- Dependencies: 1289 +-- Name: vdcnetwork_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('vdcnetwork', 'id'), 1, false); + + +-- +-- TOC entry 1861 (class 0 OID 0) +-- Dependencies: 1294 +-- Name: vdcuser_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('vdcuser', 'id'), 10, false); + + + +-- +-- TOC entry 1813 (class 0 OID 113837) +-- Dependencies: 1274 +-- Data for Name: coll_adv_search_fields; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE coll_adv_search_fields DISABLE TRIGGER ALL; + + + +ALTER TABLE coll_adv_search_fields ENABLE TRIGGER ALL; + +-- +-- TOC entry 1818 (class 0 OID 113863) +-- Dependencies: 1281 +-- Data for Name: coll_any_search_fields; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE coll_any_search_fields DISABLE TRIGGER ALL; + + + +ALTER TABLE coll_any_search_fields ENABLE TRIGGER ALL; + +-- +-- TOC entry 1804 (class 0 OID 113774) +-- Dependencies: 1259 +-- Data for Name: coll_search_result_fields; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE coll_search_result_fields DISABLE TRIGGER ALL; + + + +ALTER TABLE coll_search_result_fields ENABLE TRIGGER ALL; + + + + + +ALTER TABLE datatable ENABLE TRIGGER ALL; + +-- +-- TOC entry 1825 (class 0 OID 113902) +-- Dependencies: 1292 +-- Data for Name: datavariable; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE datavariable DISABLE TRIGGER ALL; + + + +ALTER TABLE datavariable ENABLE TRIGGER ALL; + +-- +-- TOC entry 1829 (class 0 OID 113927) +-- Dependencies: 1298 +-- Data for Name: fieldinputlevel; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE fieldinputlevel DISABLE TRIGGER ALL; + +INSERT INTO fieldinputlevel (id, name ) VALUES (1, 'required'); +INSERT INTO fieldinputlevel (id, name ) VALUES(2, 'recommended'); +INSERT INTO fieldinputlevel (id, name ) VALUES(3, 'optional'); + + + +ALTER TABLE fieldinputlevel ENABLE TRIGGER ALL; + + + +-- +-- TOC entry 1823 (class 0 OID 113888) +-- Dependencies: 1288 +-- Data for Name: logindomain; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE logindomain DISABLE TRIGGER ALL; + + + +ALTER TABLE logindomain ENABLE TRIGGER ALL; + + + +-- +-- TOC entry 1838 (class 0 OID 113987) +-- Dependencies: 1313 +-- Data for Name: role; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE "role" DISABLE TRIGGER ALL; +INSERT into role(id, name) VALUES (1, 'contributor'); +INSERT into role(id, name) VALUES (2, 'curator'); +INSERT into role(id, name) VALUES (3, 'admin'); +INSERT into role(id,name) VALUES (4, 'privileged viewer'); +ALTER TABLE "role" ENABLE TRIGGER ALL; + +ALTER TABLE "networkrole" DISABLE TRIGGER ALL; +INSERT into networkrole(id, name) VALUES (1, 'Creator'); +INSERT into networkrole(id, name) VALUES (2, 'Admin'); +ALTER TABLE "networkrole" ENABLE TRIGGER ALL; + +ALTER TABLE pagedef DISABLE TRIGGER ALL; + + +-- Pages that don't require role authorization +INSERT INTO pagedef (name, path, role_id, networkrole_id ) VALUES ( 'StudyPage', '/study/StudyPage.xhtml', null,null ); +INSERT INTO pagedef (name, path, role_id, networkrole_id ) VALUES ( 'SubsettingPage', '/subsetting/SubsettingPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ExploreDataPage','/viz/ExploreDataPage.xhtml',null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ErrorPage', '/ErrorPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'HomePage', '/HomePage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'UnauthorizedPage', '/login/UnauthorizedPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'StudyLockedPage', '/login/StudyLockedPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'LogoutPage', '/login/LogoutPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AddAccountPage', '/login/AddAccountPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditAccountPage', '/login/EditAccountPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AccountOptionsPage', '/login/AccountOptionsPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AccountPage', '/login/AccountPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'LoginPage', '/login/LoginPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ForgotPasswordPage', '/login/ForgotPasswordPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ContributorRequestPage', '/login/ContributorRequestPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ContributorRequestInfoPage', '/login/ContributorRequestInfoPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'CreatorRequestPage','/login/CreatorRequestPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'CreatorRequestInfoPage','/login/CreatorRequestInfoPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'TermsOfUsePage','/login/TermsOfUsePage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AccountTermsOfUsePage','/login/AccountTermsOfUsePage.xhtml', null,null ); +INSERT INTO pagedef (name, path, role_id, networkrole_id ) VALUES ( 'StudyVersionDifferencesPage', '/study/StudyVersionDifferencesPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'OptionsPage','/admin/OptionsPage.xhtml',null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageStudiesPage','/study/ManageStudiesPage.xhtml',null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManifestPage', '/ManifestPage.xhtml', null,null ); + +-- Pages that require VDC Role authorization: +-- Contributor Role +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditStudyPage','/study/EditStudyPage.xhtml',1,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditStudyFilesPage','/study/EditStudyFilesPage.xhtml',1,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AddFilesPage','/study/AddFilesPage.xhtml',1,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'SetUpDataExplorationPage','/study/SetUpDataExplorationPage.xhtml',1,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'DeleteStudyPage','/study/DeleteStudyPage.xhtml',1,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'MyDataversePage','/networkAdmin/MyDataversePage.xhtml',null,null ); + +-- Curator Role +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditCollectionPage','/collection/EditCollectionPage.xhtml',2,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageCollectionsPage','/collection/ManageCollectionsPage.xhtml',2,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'StudyPermissionsPage','/study/StudyPermissionsPage.xhtml',2,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'DeaccessionStudyPage', '/study/DeaccessionStudyPage.xhtml', 2, null ); + +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageTemplatesPage', '/admin/ManageTemplatesPage.xhtml', 2,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'TemplateFormPage','/study/TemplateFormPage.xhtml',2,2 ); + +-- Admin Role +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditSitePage', '/site/EditSitePage.xhtml', 3,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditBannerFooterPage','/admin/EditBannerFooterPage.xhtml',3,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditContactUsPage','/admin/EditContactUsPage.xhtml',3,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditHomePanelsPage','/admin/EditHomePanelsPage.xhtml',3,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditStudyCommentsPage', '/admin/EditStudyCommentsPage.xhtml', 3,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditUserTermsPage','/admin/EditUseTermsPage.xhtml',3,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditDepositUseTermsPage','/admin/EditDepositUseTermsPage.xhtml',3,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'PrivilegedUsersPage','/admin/PrivilegedUsersPage.xhtml',3,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'SearchFieldsPage','/admin/SearchFieldsPage.xhtml',3,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'PromotionalLinkSearchBoxPage','/admin/PromotionalLinkSearchBoxPage.xhtml',3,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditLockssConfigPage','/admin/EditLockssConfigPage.xhtml',3,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditGuestbookQuestionnairePage', '/admin/EditGuestbookQuestionnairePage.xhtml', 3,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'GuestBookResponseDataPage', '/admin/GuestBookResponseDataPage.xhtml', 3,2 ); +-- Pages that require Network Role authorization +-- Creator Role +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AddSitePage', '/site/AddSitePage.xhtml', null,1 ); +-- Admin Role +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'NetworkOptionsPage', '/networkAdmin/NetworkOptionsPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'HarvestSitesPage', '/site/HarvestSitesPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AddClassificationsPage', '/networkAdmin/AddClassificationsPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageClassificationsPage', '/networkAdmin/ManageClassificationsPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageControlledVocabularyPage', '/admin/ManageControlledVocabularyPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'CommentReviewPage', '/networkAdmin/CommentReviewPage.xhtml', null, 2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageDataversesPage', '/networkAdmin/ManageDataversesPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditHarvestSitePage', '/site/EditHarvestSitePage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditNetworkNamePage', '/networkAdmin/EditNetworkNamePage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'NetworkPrivilegedUsersPage', '/networkAdmin/NetworkPrivilegedUsersPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AllUsersPage', '/networkAdmin/AllUsersPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditNetworkAnnouncementsPage', '/networkAdmin/EditNetworkAnnouncementsPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditNetworkBannerFooterPage', '/networkAdmin/EditNetworkBannerFooterPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditExportSchedulePage', '/networkAdmin/EditExportSchedulePage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditOAISetPage', '/networkAdmin/EditOAISetPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditNetworkDownloadUseTermsPage', '/networkAdmin/EditNetworkDownloadUseTermsPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditNetworkDepositUseTermsPage', '/networkAdmin/EditNetworkDepositUseTermsPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditAccountUseTermsPage', '/networkAdmin/EditAccountUseTermsPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditUserGroupPage', '/networkAdmin/EditUserGroupPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'UserGroupsPage', '/networkAdmin/UserGroupsPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ImportStudyPage', '/networkAdmin/ImportStudyPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'UtilitiesPage', '/networkAdmin/UtilitiesPage.xhtml', null, 2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditSubnetworkPage', '/networkAdmin/EditSubnetworkPage.xhtml', null, 2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageSubnetworksPage', '/networkAdmin/ManageSubnetworksPage.xhtml', null, 2 ); + +ALTER TABLE pagedef ENABLE TRIGGER ALL; + + +-- +-- TOC entry 1821 (class 0 OID 113878) +-- Dependencies: 1285 +-- Data for Name: search_result_fields; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE search_result_fields DISABLE TRIGGER ALL; + + + +ALTER TABLE search_result_fields ENABLE TRIGGER ALL; + + +-- +-- TOC entry 1826 (class 0 OID 113907) +-- Dependencies: 1293 +-- Data for Name: study_studyfield; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE study_studyfield DISABLE TRIGGER ALL; + + + +ALTER TABLE study_studyfield ENABLE TRIGGER ALL; + +-- +-- TOC entry 1817 (class 0 OID 113859) +-- Dependencies: 1280 +-- Data for Name: study_usergroup; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE study_usergroup DISABLE TRIGGER ALL; + + + +ALTER TABLE study_usergroup ENABLE TRIGGER ALL; + +-- +-- TOC entry 1812 (class 0 OID 113829) +-- Dependencies: 1273 +-- Data for Name: studyfield; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE studyfield DISABLE TRIGGER ALL; + +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (1, 'Title', 'Title', 'title', TRUE, TRUE, TRUE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (2, 'Study ID', 'Study ID', 'studyId', TRUE, TRUE, TRUE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (3, 'Author', 'Author', 'author', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (4, 'Author Affiliation', 'Author Affiliation', 'authorAffiliation', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (5, 'Producer', 'Producer', 'producer', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (6, 'Producer URL', 'Producer URL', 'producerURL', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (7, 'URL to Producer Logo', 'URL to Producer Logo', 'producerLogo', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (8, 'Producer Name Abbreviation', 'Producer Name Abbreviation', 'producerAbbreviation', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (9, 'Production Date', 'Production Date', 'productionDate', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (10, 'Software', 'Software', 'software', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (11, 'Software Version', 'Software Version', 'softwareVersion', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (12, 'Funding Agency', 'Funding Agency', 'fundingAgency', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (13, 'Grant Number', 'Grant Number', 'grantNumber', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (14, 'Grant Number Agency', 'Grant Number Agency', 'grantNumberAgency', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (15, '', '', 'distributor', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (16, '', '', 'distributorURL', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (17, '', '', 'distributorLogo', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (18, '', '', 'distributionDate', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (19, '', '', 'distributorContact', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (20, '', '', 'distributorContactAffiliation', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (21, '', '', 'distributorContactEmail', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (22, '', '', 'depositor', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (23, '', '', 'dateOfDeposit', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (24, '', '', 'series', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (25, '', '', 'seriesInformation', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (26, '', '', 'studyVersion', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (27, '', '', 'keyword', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (28, '', '', 'keywordVocab', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (29, '', '', 'keywordVocabURI', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (30, '', '', 'topicClassification', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (31, '', '', 'topicClassVocab', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (32, '', '', 'topicClassVocabURI', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (33, '', '', 'description', FALSE, TRUE, TRUE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (34, '', '', 'descriptionDate', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (35, '', '', 'timePeriodCoveredStart', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (36, '', '', 'timePeriodCoveredEnd', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (37, '', '', 'dateOfCollectionStart', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (38, '', '', 'dateOfCollectionEnd', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (39, '', '', 'country', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (40, '', '', 'geographicCoverage', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (41, '', '', 'geographicUnit', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (42, '', '', 'unitOfAnalysis', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (43, '', '', 'universe', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (44, '', '', 'kindOfData', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (45, '', '', 'timeMethod', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (46, '', '', 'dataCollector', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (47, '', '', 'frequencyOfDataCollection', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (48, '', '', 'samplingProcedure', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (49, '', '', 'deviationsFromSampleDesign', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (50, '', '', 'collectionMode', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (51, '', '', 'researchInstrument', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (52, '', '', 'dataSources', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (53, '', '', 'originOfSources', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (54, '', '', 'characteristicOfSources', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (55, '', '', 'accessToSources', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (56, '', '', 'dataCollectionSituation', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (57, '', '', 'actionsToMinimizeLoss', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (58, '', '', 'controlOperations', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (59, '', '', 'weighting', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (60, '', '', 'cleaningOperations', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (61, '', '', 'studyLevelErrorNotes', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (62, '', '', 'responseRate', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (63, '', '', 'samplingErrorEstimates', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (64, '', '', 'otherDataAppraisal', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (65, '', '', 'placeOfAccess', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (66, '', '', 'originalArchive', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (67, '', '', 'availabilityStatus', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (68, '', '', 'collectionSize', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (69, '', '', 'studyCompletion', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (70, '', '', 'confidentialityDeclaration', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (71, '', '', 'specialPermissions', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (72, '', '', 'restrictions', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (73, '', '', 'contact', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (74, '', '', 'citationRequirements', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (75, '', '', 'depositorRequirements', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (76, '', '', 'conditions', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (77, '', '', 'disclaimer', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (78, '', '', 'relatedMaterial', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (79, '', '', 'publication', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (80, '', '', 'relatedStudies', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (81, '', '', 'otherReferences', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (82, '', '', 'notesText', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (83, '', '', 'note', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (84, '', '', 'notesInformationSubject', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (85, '', '', 'otherId', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (86, '', '', 'otherIdAgency', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (87, '', '', 'productionPlace', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (88, '', '', 'numberOfFiles', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (89, '', '', 'publicationReplicationData', FALSE, TRUE, FALSE, FALSE, FALSE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (90, '', '', 'subTitle', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (91, '', '', 'versionDate', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (92, '', '', 'geographicBoundingBox', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (93, '', '', 'eastLongitude', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (94, '', '', 'northLatitude', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (95, '', '', 'southLatitude', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (96, '', '', 'producerAffiliation', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (97, '', '', 'distributorAffiliation', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (98, '', '', 'distributorAbbreviation', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (99, 'Author', 'Author', 'authorName', TRUE, TRUE, TRUE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (100, '', '', 'producerName', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (101, '', '', 'distributorName', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (102, '', '', 'distributorContactName', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (103, '', '', 'descriptionText', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (104, '', '', 'keywordValue', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (105, '', '', 'topicClassValue', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (106, '', '', 'otherIdValue', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (107, '', '', 'softwareName', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (108, '', '', 'grantNumberValue', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (109, '', '', 'seriesName', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (110, '', '', 'studyVersionValue', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (111, '', '', 'westLongitude', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (112, '', '', 'noteInformationType', FALSE, FALSE, FALSE, FALSE, TRUE ); + +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (113, '', '', 'publicationCitation', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (114, '', '', 'publicationIDType', FALSE, FALSE, FALSE, FALSE, FALSE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (115, '', '', 'publicationIDNumber', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (116, '', '', 'publicationURL', FALSE, FALSE, FALSE, FALSE, TRUE ); + +--set the parent child relationship +update studyfield set parentstudyfield_id = 3 where id = 99; +update studyfield set parentstudyfield_id = 3 where id = 4; + +update studyfield set parentstudyfield_id = 5 where id = 100; +update studyfield set parentstudyfield_id = 5 where id = 6; +update studyfield set parentstudyfield_id = 5 where id = 7; +update studyfield set parentstudyfield_id = 5 where id = 8; +update studyfield set parentstudyfield_id = 5 where id = 96; + +update studyfield set parentstudyfield_id = 15 where id = 101; +update studyfield set parentstudyfield_id = 15 where id = 16; +update studyfield set parentstudyfield_id = 15 where id = 17; +update studyfield set parentstudyfield_id = 15 where id = 97; +update studyfield set parentstudyfield_id = 15 where id = 98; + +update studyfield set parentstudyfield_id = 19 where id = 102; +update studyfield set parentstudyfield_id = 19 where id = 20; +update studyfield set parentstudyfield_id = 19 where id = 21; + +update studyfield set parentstudyfield_id = 33 where id = 103; +update studyfield set parentstudyfield_id = 33 where id = 34; + +update studyfield set parentstudyfield_id = 27 where id = 104; +update studyfield set parentstudyfield_id = 27 where id = 28; +update studyfield set parentstudyfield_id = 27 where id = 29; + +update studyfield set parentstudyfield_id = 30 where id = 105; +update studyfield set parentstudyfield_id = 30 where id = 31; +update studyfield set parentstudyfield_id = 30 where id = 32; + +update studyfield set parentstudyfield_id = 85 where id = 106; +update studyfield set parentstudyfield_id = 85 where id = 86; + +update studyfield set parentstudyfield_id = 10 where id = 107; +update studyfield set parentstudyfield_id = 10 where id = 11; + +update studyfield set parentstudyfield_id = 13 where id = 108; +update studyfield set parentstudyfield_id = 13 where id = 14; + +update studyfield set parentstudyfield_id = 24 where id = 109; +update studyfield set parentstudyfield_id = 24 where id = 25; + +update studyfield set parentstudyfield_id = 26 where id = 110; +update studyfield set parentstudyfield_id = 26 where id = 91; + +update studyfield set parentstudyfield_id = 92 where id = 111; +update studyfield set parentstudyfield_id = 92 where id = 93; +update studyfield set parentstudyfield_id = 92 where id = 94; +update studyfield set parentstudyfield_id = 92 where id = 95; + +update studyfield set parentstudyfield_id = 83 where id = 112; +update studyfield set parentstudyfield_id = 83 where id = 82; +update studyfield set parentstudyfield_id = 83 where id = 84; + +update studyfield set parentstudyfield_id = 79 where id = 113; +update studyfield set parentstudyfield_id = 79 where id = 114; +update studyfield set parentstudyfield_id = 79 where id = 115; +update studyfield set parentstudyfield_id = 79 where id = 116; +update studyfield set parentstudyfield_id = 79 where id = 89; + +update studyfield set displayorder = 0 where name = 'authorName'; +update studyfield set displayorder = 2 where name = 'authorAffiliation'; +update studyfield set displayorder = 2 where name = 'producerAbbreviation'; +update studyfield set displayorder = 1 where name = 'producerName'; +update studyfield set displayorder = 3 where name = 'producerAffiliation'; +update studyfield set displayorder = 4 where name = 'producerURL'; +update studyfield set displayorder = 5 where name = 'producerLogo'; +update studyfield set displayorder = 2 where name = 'softwareVersion'; +update studyfield set displayorder = 1 where name = 'softwareName'; +update studyfield set displayorder = 1 where name = 'grantNumberValue'; +update studyfield set displayorder = 2 where name = 'grantNumberAgency'; +update studyfield set displayorder = 1 where name = 'distributorName'; +update studyfield set displayorder = 4 where name = 'distributorURL'; +update studyfield set displayorder = 5 where name = 'distributorLogo'; +update studyfield set displayorder = 3 where name = 'distributorAffiliation'; +update studyfield set displayorder = 2 where name = 'distributorAbbreviation'; +update studyfield set displayorder = 1 where name = 'distributorContactName'; +update studyfield set displayorder = 2 where name = 'distributorContactAffiliation'; +update studyfield set displayorder = 3 where name = 'distributorContactEmail'; +update studyfield set displayorder = 2 where name = 'seriesInformation'; +update studyfield set displayorder = 1 where name = 'seriesName'; +update studyfield set displayorder = 1 where name = 'studyVersionValue'; +update studyfield set displayorder = 2 where name = 'versionDate'; +update studyfield set displayorder = 1 where name = 'keywordValue'; +update studyfield set displayorder = 3 where name = 'keywordVocabURI'; +update studyfield set displayorder = 2 where name = 'keywordVocab'; +update studyfield set displayorder = 1 where name = 'topicClassValue'; +update studyfield set displayorder = 2 where name = 'topicClassVocab'; +update studyfield set displayorder = 3 where name = 'topicClassVocabURI'; +update studyfield set displayorder = 1 where name = 'descriptionText'; +update studyfield set displayorder = 2 where name = 'descriptionDate'; +update studyfield set displayorder = 1 where name = 'publicationCitation'; +update studyfield set displayorder = 2 where name = 'publicationIDNumber'; +update studyfield set displayorder = 3 where name = 'publicationURL'; +update studyfield set displayorder = 3 where name = 'notesText'; +update studyfield set displayorder = 1 where name = 'noteInformationType'; +update studyfield set displayorder = 2 where name = 'notesInformationSubject'; +update studyfield set displayorder = 2 where name = 'otherIdAgency'; +update studyfield set displayorder = 1 where name = 'otherIdValue'; + + +update studyfield set fieldtype = 'date' where id = 9; +update studyfield set fieldtype = 'date' where id = 18; +update studyfield set fieldtype = 'date' where id = 23; +update studyfield set fieldtype = 'date' where id = 34; +update studyfield set fieldtype = 'date' where id = 35; +update studyfield set fieldtype = 'date' where id = 36; +update studyfield set fieldtype = 'date' where id = 37; +update studyfield set fieldtype = 'date' where id = 38; +update studyfield set fieldtype = 'date' where id = 91; +update studyfield set fieldtype = 'email' where id = 21; +update studyfield set fieldtype = 'textBox' where id = 4; +update studyfield set fieldtype = 'textBox' where id = 8; +update studyfield set fieldtype = 'textBox' where id = 11; +update studyfield set fieldtype = 'textBox' where id = 12; +update studyfield set fieldtype = 'textBox' where id = 13; +update studyfield set fieldtype = 'textBox' where id = 14; +update studyfield set fieldtype = 'textBox' where id = 19; +update studyfield set fieldtype = 'textBox' where id = 20; +update studyfield set fieldtype = 'textBox' where id = 22; +update studyfield set fieldtype = 'textBox' where id = 24; +update studyfield set fieldtype = 'textBox' where id = 25; +update studyfield set fieldtype = 'textBox' where id = 26; +update studyfield set fieldtype = 'textBox' where id = 27; +update studyfield set fieldtype = 'textBox' where id = 28; +update studyfield set fieldtype = 'textBox' where id = 30; +update studyfield set fieldtype = 'textBox' where id = 31; +update studyfield set fieldtype = 'textBox' where id = 33; +update studyfield set fieldtype = 'textBox' where id = 39; +update studyfield set fieldtype = 'textBox' where id = 40; +update studyfield set fieldtype = 'textBox' where id = 41; +update studyfield set fieldtype = 'textBox' where id = 42; +update studyfield set fieldtype = 'textBox' where id = 43; +update studyfield set fieldtype = 'textBox' where id = 44; +update studyfield set fieldtype = 'textBox' where id = 45; +update studyfield set fieldtype = 'textBox' where id = 46; +update studyfield set fieldtype = 'textBox' where id = 47; +update studyfield set fieldtype = 'textBox' where id = 48; +update studyfield set fieldtype = 'textBox' where id = 49; +update studyfield set fieldtype = 'textBox' where id = 50; +update studyfield set fieldtype = 'textBox' where id = 51; +update studyfield set fieldtype = 'textBox' where id = 52; +update studyfield set fieldtype = 'textBox' where id = 53; +update studyfield set fieldtype = 'textBox' where id = 54; +update studyfield set fieldtype = 'textBox' where id = 55; +update studyfield set fieldtype = 'textBox' where id = 56; +update studyfield set fieldtype = 'textBox' where id = 57; +update studyfield set fieldtype = 'textBox' where id = 58; +update studyfield set fieldtype = 'textBox' where id = 59; +update studyfield set fieldtype = 'textBox' where id = 60; +update studyfield set fieldtype = 'textBox' where id = 61; +update studyfield set fieldtype = 'textBox' where id = 62; +update studyfield set fieldtype = 'textBox' where id = 63; +update studyfield set fieldtype = 'textBox' where id = 64; +update studyfield set fieldtype = 'textBox' where id = 65; +update studyfield set fieldtype = 'textBox' where id = 66; +update studyfield set fieldtype = 'textBox' where id = 67; +update studyfield set fieldtype = 'textBox' where id = 68; +update studyfield set fieldtype = 'textBox' where id = 69; +update studyfield set fieldtype = 'textBox' where id = 70; +update studyfield set fieldtype = 'textBox' where id = 71; +update studyfield set fieldtype = 'textBox' where id = 72; +update studyfield set fieldtype = 'textBox' where id = 73; +update studyfield set fieldtype = 'textBox' where id = 74; +update studyfield set fieldtype = 'textBox' where id = 75; +update studyfield set fieldtype = 'textBox' where id = 76; +update studyfield set fieldtype = 'textBox' where id = 77; +update studyfield set fieldtype = 'textBox' where id = 78; +update studyfield set fieldtype = 'textBox' where id = 79; +update studyfield set fieldtype = 'textBox' where id = 80; +update studyfield set fieldtype = 'textBox' where id = 81; +update studyfield set fieldtype = 'textBox' where id = 82; +update studyfield set fieldtype = 'textBox' where id = 83; +update studyfield set fieldtype = 'textBox' where id = 84; +update studyfield set fieldtype = 'textBox' where id = 85; +update studyfield set fieldtype = 'textBox' where id = 86; +update studyfield set fieldtype = 'textBox' where id = 87; +update studyfield set fieldtype = 'textBox' where id = 88; +update studyfield set fieldtype = 'textBox' where id = 89; +update studyfield set fieldtype = 'textBox' where id = 92; +update studyfield set fieldtype = 'textBox' where id = 96; +update studyfield set fieldtype = 'textBox' where id = 97; +update studyfield set fieldtype = 'textBox' where id = 98; +update studyfield set fieldtype = 'textBox' where id = 99; +update studyfield set fieldtype = 'textBox' where id = 100; +update studyfield set fieldtype = 'textBox' where id = 101; +update studyfield set fieldtype = 'textBox' where id = 102; +update studyfield set fieldtype = 'textBox' where id = 103; +update studyfield set fieldtype = 'textBox' where id = 104; +update studyfield set fieldtype = 'textBox' where id = 105; +update studyfield set fieldtype = 'textBox' where id = 106; +update studyfield set fieldtype = 'textBox' where id = 107; +update studyfield set fieldtype = 'textBox' where id = 108; +update studyfield set fieldtype = 'textBox' where id = 109; +update studyfield set fieldtype = 'textBox' where id = 110; +update studyfield set fieldtype = 'textBox' where id = 112; +update studyfield set fieldtype = 'textBox' where id = 113; +update studyfield set fieldtype = 'textBox' where id = 115; +update studyfield set fieldtype = 'url' where id = 6; +update studyfield set fieldtype = 'url' where id = 7; +update studyfield set fieldtype = 'url' where id = 16; +update studyfield set fieldtype = 'url' where id = 17; +update studyfield set fieldtype = 'url' where id = 29; +update studyfield set fieldtype = 'url' where id = 32; +update studyfield set fieldtype = 'url' where id = 116; + +ALTER TABLE studyfield ENABLE TRIGGER ALL; + + + + + +-- +-- TOC entry 1811 (class 0 OID 113819) +-- Dependencies: 1271 +-- Data for Name: studyfile; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE studyfile DISABLE TRIGGER ALL; + + + +ALTER TABLE studyfile ENABLE TRIGGER ALL; + +-- +-- TOC entry 1810 (class 0 OID 113813) +-- Dependencies: 1269 +-- Data for Name: studyfile_usergroup; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE studyfile_usergroup DISABLE TRIGGER ALL; + + + +ALTER TABLE studyfile_usergroup ENABLE TRIGGER ALL; + +-- +-- TOC entry 1835 (class 0 OID 113964) +-- Dependencies: 1307 +-- Data for Name: summary_fields; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE summary_fields DISABLE TRIGGER ALL; + + + +ALTER TABLE summary_fields ENABLE TRIGGER ALL; + +-- +-- TOC entry 1809 (class 0 OID 113808) +-- Dependencies: 1268 +-- Data for Name: template; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE "template" DISABLE TRIGGER ALL; + + + +ALTER TABLE "template" ENABLE TRIGGER ALL; + +-- +-- TOC entry 1807 (class 0 OID 113797) +-- Dependencies: 1265 +-- Data for Name: templatefield; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE templatefield DISABLE TRIGGER ALL; + + + +ALTER TABLE templatefield ENABLE TRIGGER ALL; + +-- +-- TOC entry 1832 (class 0 OID 113945) +-- Dependencies: 1303 +-- Data for Name: usergroup; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE usergroup DISABLE TRIGGER ALL; + + + +ALTER TABLE usergroup ENABLE TRIGGER ALL; + + +-- +-- TOC entry 1831 (class 0 OID 113939) +-- Dependencies: 1301 +-- Data for Name: vdc_adv_search_fields; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE vdc_adv_search_fields DISABLE TRIGGER ALL; + + + +ALTER TABLE vdc_adv_search_fields ENABLE TRIGGER ALL; + +-- +-- TOC entry 1801 (class 0 OID 113756) +-- Dependencies: 1255 +-- Data for Name: vdc_any_search_fields; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE vdc_any_search_fields DISABLE TRIGGER ALL; + + + +ALTER TABLE vdc_any_search_fields ENABLE TRIGGER ALL; + + +-- +-- TOC entry 1808 (class 0 OID 113802) +-- Dependencies: 1266 +-- Data for Name: vdc_usergroup; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE vdc_usergroup DISABLE TRIGGER ALL; + + + +ALTER TABLE vdc_usergroup ENABLE TRIGGER ALL; + + +-- +-- TOC entry 1830 (class 0 OID 113934) +-- Dependencies: 1300 +-- Data for Name: vdcgroup; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE vdcgroup DISABLE TRIGGER ALL; + + + +ALTER TABLE vdcgroup ENABLE TRIGGER ALL; + +-- +-- TOC entry 1828 (class 0 OID 113921) +-- Dependencies: 1296 +-- Data for Name: vdcgrouprelationship; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE vdcgrouprelationship DISABLE TRIGGER ALL; + + + +ALTER TABLE vdcgrouprelationship ENABLE TRIGGER ALL; + + + + + +-- +-- TOC entry 1827 (class 0 OID 113913) +-- Dependencies: 1295 +-- Data for Name: vdcuser; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE vdcuser DISABLE TRIGGER ALL; + + + +ALTER TABLE vdcuser ENABLE TRIGGER ALL; + +-- +-- TOC entry 1834 (class 0 OID 113960) +-- Dependencies: 1306 +-- Data for Name: vdcuser_usergroup; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE vdcuser_usergroup DISABLE TRIGGER ALL; + + + +ALTER TABLE vdcuser_usergroup ENABLE TRIGGER ALL; + + +ALTER TABLE variableintervaltype DISABLE TRIGGER ALL; + +INSERT INTO variableintervaltype (id, name ) VALUES (1, 'discrete'); +INSERT INTO variableintervaltype (id, name ) VALUES(2, 'continuous'); +INSERT INTO variableintervaltype (id, name ) VALUES(3, 'nominal'); +INSERT INTO variableintervaltype (id, name ) VALUES(4, 'dichotomous'); + +ALTER TABLE variableintervaltype ENABLE TRIGGER ALL; + + +ALTER TABLE variableformattype DISABLE TRIGGER ALL; + +INSERT INTO variableformattype (id, name ) VALUES (1, 'numeric'); +INSERT INTO variableformattype (id, name ) VALUES(2, 'character'); + +ALTER TABLE variableformattype ENABLE TRIGGER ALL; + + +ALTER TABLE variablerangetype DISABLE TRIGGER ALL; + +INSERT INTO variablerangetype (id, name ) VALUES(1, 'min'); +INSERT INTO variablerangetype (id, name ) VALUES(2, 'max'); +INSERT INTO variablerangetype (id, name ) VALUES(3, 'min exclusive'); +INSERT INTO variablerangetype (id, name ) VALUES(4, 'max exclusive'); +INSERT INTO variablerangetype (id, name ) VALUES(5, 'point'); + +ALTER TABLE variablerangetype ENABLE TRIGGER ALL; + +ALTER TABLE summarystatistictype DISABLE TRIGGER ALL; + +INSERT INTO summarystatistictype (id, name ) VALUES(1, 'mean'); +INSERT INTO summarystatistictype (id, name ) VALUES(2, 'medn'); +INSERT INTO summarystatistictype (id, name ) VALUES(3, 'mode'); +INSERT INTO summarystatistictype (id, name ) VALUES(4, 'min'); +INSERT INTO summarystatistictype (id, name ) VALUES(5, 'max'); +INSERT INTO summarystatistictype (id, name ) VALUES(6, 'stdev'); +INSERT INTO summarystatistictype (id, name ) VALUES(7, 'vald'); +INSERT INTO summarystatistictype (id, name ) VALUES(8, 'invd'); + +ALTER TABLE variablerangetype ENABLE TRIGGER ALL; + +ALTER TABLE vdcuser DISABLE TRIGGER ALL; + +insert into vdcuser(id, version, email, firstname, lastname, username, encryptedpassword, networkRole_id,active, agreedtermsofuse ) VALUES ( 1, 1, 'dataverse@lists.hmdc.harvard.edu','Network','Admin', 'networkAdmin' ,'tf0bLmzOFx5JrBhe2EIraS5GBnI=' ,2,true, true); + +ALTER TABLE vdcuser ENABLE TRIGGER ALL; + +-- +-- TOC entry 1809 (class 0 OID 113808) +-- Dependencies: 1268 +-- Data for Name: template; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE "metadata" DISABLE TRIGGER ALL; +-- Default metadata - contains no metadata values +INSERT INTO metadata( id, version ) VALUES ( 1, 1); + +ALTER TABLE "metadata" ENABLE TRIGGER ALL; + + +ALTER TABLE "template" DISABLE TRIGGER ALL; + +INSERT INTO template( id, version, vdcnetwork_id, name,metadata_id,enabled) VALUES (1, 1, 0, 'Dataverse Network Default Template',1,true); + +ALTER TABLE "template" ENABLE TRIGGER ALL; + +-- +-- TOC entry 1824 (class 0 OID 113895) +-- Dependencies: 1290 +-- Data for Name: vdcnetwork; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE vdcnetwork DISABLE TRIGGER ALL; + +INSERT INTO vdcnetwork (id, version, name, networkpageheader, networkpagefooter, announcements, displayannouncements, aboutthisdataversenetwork, contactemail, systememail, defaultvdcheader, defaultvdcfooter, defaultvdcabouttext, defaultvdcannouncements, displayvdcannouncements, displayvdcrecentstudies, defaulttemplate_id, allowcreaterequest, defaultnetworkadmin_id,protocol,authority,handleregistration,termsofuseenabled, deposittermsofuseenabled, downloadtermsofuseenabled, defaultdisplaynumber, exportperiod, exporthourofday) VALUES (0, 1, '[Your]', ' ', ' ', 'A description of your Dataverse Network or announcements may be added here. Use Network Options to edit or remove this text.', TRUE, 'This About page is not used anymore in the DVN application.', 'dataverse@lists.hmdc.harvard.edu','dataverse@lists.hmdc.harvard.edu', ' ', ' ', 'This About page is not used anymore in the DVN application.', '', TRUE, TRUE, 1, FALSE,1,'hdl','TEST',false,false,false,false,16,'daily',3); + +update vdcnetwork set defaultvdcheader=' +
      +
      '; + +update vdcnetwork set defaultvdcfooter='
      '; + + + +update vdcnetwork set requireDVDescription = false, + requireDVaffiliation = false, + requireDVclassification = false, + requireDVstudiesforrelease = false; + +ALTER TABLE vdcnetwork ENABLE TRIGGER ALL; + + +-- +-- TOC entry 1807 (class 0 OID 113797) +-- Dependencies: 1265 +-- Data for Name: templatefield; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE templatefield DISABLE TRIGGER ALL; + +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(1,1,1,1,'required',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(2,1,1,2,'required',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(3,1,1,3,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(4,1,1,4,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(5,1,1,5,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(6,1,1,6,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(7,1,1,7,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(8,1,1,8,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(9,1,1,9,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(10,1,1,10,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(11,1,1,11,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(12,1,1,12,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(13,1,1,13,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(14,1,1,14,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(15,1,1,15,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(16,1,1,16,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(17,1,1,17,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(18,1,1,18,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(19,1,1,19,'recommended',-1); + +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(20,1,1,20,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(21,1,1,21,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(22,1,1,22,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(23,1,1,23,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(24,1,1,24,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(25,1,1,25,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(26,1,1,26,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(27,1,1,27,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(28,1,1,28,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(29,1,1,29,'optional',-1); + +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(30,1,1,30,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(31,1,1,31,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(32,1,1,32,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(33,1,1,33,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(34,1,1,34,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(35,1,1,35,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(36,1,1,36,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(37,1,1,37,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(38,1,1,38,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(39,1,1,39,'recommended',-1); + +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(40,1,1,40,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(41,1,1,41,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(42,1,1,42,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(43,1,1,43,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(44,1,1,44,'recommended',-1); + +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(45,1,1,45,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(46,1,1,46,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(47,1,1,47,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(48,1,1,48,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(49,1,1,49,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(50,1,1,50,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(51,1,1,51,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(52,1,1,52,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(53,1,1,53,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(54,1,1,54,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(55,1,1,55,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(56,1,1,56,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(57,1,1,57,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(58,1,1,58,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(59,1,1,59,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(60,1,1,60,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(61,1,1,61,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(62,1,1,62,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(63,1,1,63,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(64,1,1,64,'optional',-1); + +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(65,1,1,65,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(66,1,1,66,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(67,1,1,67,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(68,1,1,68,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(69,1,1,69,'optional',-1); + +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(70,1,1,70,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(71,1,1,71,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(72,1,1,72,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(73,1,1,73,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(74,1,1,74,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(75,1,1,75,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(76,1,1,76,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(77,1,1,77,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(78,1,1,78,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(79,1,1,79,'recommended',-1); + +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(80,1,1,80,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(81,1,1,81,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(82,1,1,82,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(83,1,1,83,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(84,1,1,84,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(85,1,1,85,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(86,1,1,86,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(87,1,1,87,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(88,1,1,88,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(89,1,1,89,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(90,1,1,90,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(91,1,1,91,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(92,1,1,92,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(93,1,1,93,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(94,1,1,94,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(95,1,1,95,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(96,1,1,96,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(97,1,1,97,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(98,1,1,98,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(99,1,1,99,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(100,1,1,100,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(101,1,1,101,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(102,1,1,102,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(103,1,1,103,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(104,1,1,104,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(105,1,1,105,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(106,1,1,106,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(107,1,1,107,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(108,1,1,108,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(109,1,1,109,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(110,1,1,110,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(111,1,1,111,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(112,1,1,112,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(113,1,1,113,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(114,1,1,114,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(115,1,1,115,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(116,1,1,116,'optional',-1); + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('templatefield', 'id'), 150, false); + + +ALTER TABLE templatefield ENABLE TRIGGER ALL; + + + +-- +-- TOC entry 1814 (class 0 OID 113843) +-- Dependencies: 1276 +-- Data for Name: templatefilecategory; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +ALTER TABLE templatefilecategory DISABLE TRIGGER ALL; + +INSERT INTO templatefilecategory(id, template_id, name, displayorder) VALUES(1,1,'Documentation',1); +INSERT INTO templatefilecategory(id, template_id, name, displayorder) VALUES(2,1,'Data Files',2); + +ALTER TABLE templatefilecategory ENABLE TRIGGER ALL; + +-- +-- TOC entry 1855 (class 0 OID 0) +-- Dependencies: 1275 +-- Name: templatefilecategory_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('templatefilecategory', 'id'), 5, false); + + +-- Completed on 2006-09-19 16:05:06 Eastern Standard Time + +-- +-- PostgreSQL database dump complete +-- + + +-- Sequence: studyid_seq + +-- DROP SEQUENCE studyid_seq; + +CREATE SEQUENCE studyid_seq + INCREMENT 1 + MINVALUE 1 + MAXVALUE 9223372036854775807 + START 10000 + CACHE 1; +ALTER TABLE studyid_seq OWNER TO "postgres"; + +-- Sequence: filesystemname_seq + +-- DROP SEQUENCE filesystemname_seq; + +CREATE SEQUENCE filesystemname_seq + INCREMENT 1 + MINVALUE 1 + MAXVALUE 9223372036854775807 + START 2 + CACHE 1; +ALTER TABLE filesystemname_seq OWNER TO "postgres"; + + +INSERT INTO DataFileFormatType (id, value, name, mimeType) VALUES (1, 'D02', 'Splus', 'text/plain'); +INSERT INTO DataFileFormatType (id, value, name, mimeType) VALUES (2, 'D03', 'Stata', 'application/x-stata'); +INSERT INTO DataFileFormatType (id, value, name, mimeType) VALUES (3, 'D04', 'R', 'application/x-rlang-transport'); + +INSERT INTO harvestformattype (id, metadataprefix, name, stylesheetfilename) VALUES (0, 'ddi', 'DDI', null); +INSERT INTO harvestformattype (id, metadataprefix, name, stylesheetfilename) VALUES (1, 'oai_etdms', 'MIF', 'mif2ddi.xsl'); +INSERT INTO harvestformattype (id, metadataprefix, name, stylesheetfilename) VALUES (2, 'oai_dc', 'DC', 'oai_dc2ddi.xsl'); +INSERT INTO harvestformattype (id, metadataprefix, name, stylesheetfilename) VALUES (3, 'oai_fgdc', 'FGDC', 'fgdc2ddi.xsl'); +INSERT INTO harvestformattype (id, metadataprefix, name, stylesheetfilename) VALUES (4, 'dcmi_terms', 'DCMI_terms', 'dcmi_terms2ddi.xsl'); + +create index datavariable_id_index on datavariable (id); +create index summarystatistic_id_index on summarystatistic (id); +create index summarystatistic_datavariable_id_index on summarystatistic (datavariable_id); +create index variablecategory_id_index on variablecategory (id); +create index variablecategory_datavariable_id_index on variablecategory (datavariable_id); +create index variablerange_id_index on variablerange (id); +create index study_id_index on study(id); +create index study_owner_id_index on study(owner_id); +create index weightedvarrelationship_id_index on weightedvarrelationship (weighted_variable_id,variable_id); +create index studyfile_id_index on studyfile(id); +create index datavariable_datatable_id_index on datavariable(datatable_id); +create index variablerange_datavariable_id_index on variablerange (datavariable_id); +create index metadata_id_index on metadata(id); +create index studyabstract_metadata_id_index on studyabstract(metadata_id); +create index studyauthor_metadata_id_index on studyauthor(metadata_id); +create index studydistributor_metadata_id_index on studydistributor(metadata_id); +create index studygeobounding_metadata_id_index on studygeobounding(metadata_id); +create index studygrant_metadata_id_index on studygrant(metadata_id); +create index studykeyword_metadata_id_index on studykeyword(metadata_id); +create index studynote_metadata_id_index on studynote(metadata_id); +create index studyotherid_metadata_id_index on studyotherid(metadata_id); +create index studyotherref_metadata_id_index on studyotherref(metadata_id); +create index studyproducer_metadata_id_index on studyproducer(metadata_id); +create index studyrelmaterial_metadata_id_index on studyrelmaterial(metadata_id); +create index studyrelpublication_metadata_id_index on studyrelpublication(metadata_id); +create index studyrelstudy_metadata_id_index on studyrelstudy(metadata_id); +create index studysoftware_metadata_id_index on studysoftware(metadata_id); +create index studytopicclass_metadata_id_index on studytopicclass(metadata_id); +create index template_metadata_id_index on template(metadata_id); +create index studyfileactivity_id_index on studyfileactivity(id); +create index studyfileactivity_studyfile_id_index on studyfileactivity(studyfile_id); +create index studyfileactivity_study_id_index on studyfileactivity(study_id); + + + + + +INSERT INTO vdcnetworkstats (id,vdcnetwork_id,downloadcount,studycount,filecount) values (0,0,0,0,0); + + insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 1, 'cc by', 'CC Attribution (cc by)', 'http://creativecommons.org/licenses/by/3.0/', 'http://creativecommons.org/licenses/by/3.0/rdf', 'http://i.creativecommons.org/l/by/3.0/88x31.png' ); +-- removed until we support cc0 +--insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 2, 'cc0','CC Zero (cc0)','http://creativecommons.org/publicdomain/zero/1.0/','http://creativecommons.org/publicdomain/zero/1.0/rdf','http://i.creativecommons.org/l/zero/1.0/88x31.png'); +insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 3, 'cc by-sa','CC Attribution Share Alike (cc by-sa)','http://creativecommons.org/licenses/by-sa/3.0/', 'http://creativecommons.org/licenses/by-sa/3.0/rdf', 'http://i.creativecommons.org/l/by-sa/3.0/88x31.png' ); +insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 4, 'cc by-nd','CC Attribution No Derivatives (cc by-nd)','http://creativecommons.org/licenses/by-nd/3.0/', 'http://creativecommons.org/licenses/by-nd/3.0/rdf', 'http://i.creativecommons.org/l/by-nd/3.0/88x31.png' ); +insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 5, 'cc by-nc','CC Attribution Non-Commercial (cc by-nc)','http://creativecommons.org/licenses/by-nc/3.0/', 'http://creativecommons.org/licenses/by-nc/3.0/rdf', 'http://i.creativecommons.org/l/by-nc/3.0/88x31.png' ); +insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 6, 'cc by-nc-sa','CC Attribution Non-Commercial Share Alike (cc by-nc-sa)','http://creativecommons.org/licenses/by-nc-sa/3.0/', 'http://creativecommons.org/licenses/by-nc-sa/3.0/rdf', 'http://i.creativecommons.org/l/by-nc-sa/3.0/88x31.png' ); +insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 7, 'cc by-nc-nd','CC Attribution Non-Commercial No Derivatives (cc by-nc-nd)','http://creativecommons.org/licenses/by-nc-sa/3.0/', 'http://creativecommons.org/licenses/by-nc-sa/3.0/rdf', 'http://i.creativecommons.org/l/by-nc-sa/3.0/88x31.png' ); + +INSERT INTO metadataformattype (id, name, mimetype, namespace, formatschema, partialexcludesupported, partialselectsupported) VALUES (1, 'ddi', 'application/xml', 'http://www.icpsr.umich.edu/DDI', 'http://www.icpsr.umich.edu/DDI/Version2-0.xsd', true, true); +INSERT INTO metadataformattype (id, name, mimetype, namespace, formatschema, partialexcludesupported, partialselectsupported) VALUES (2, 'oai_dc', 'application/xml', 'http://www.openarchives.org/OAI/2.0/oai_dc/', 'http://www.openarchives.org/OAI/2.0/oai_dc.xsd', false, false); +INSERT INTO metadataformattype (id, name, mimetype, namespace, formatschema, partialexcludesupported, partialselectsupported) VALUES (3, 'marc', 'application/octet-stream', 'http://www.loc.gov/marc/', 'MARC 21', false, false); + +/*create network guest book*/ + +INSERT INTO guestbookquestionnaire(enabled,firstnamerequired, lastnamerequired, emailrequired, institutionrequired, positionrequired, vdc_id) VALUES (true, true, true, true, false, false, null); diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/referenceData.sql.TEMPLATE --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/referenceData.sql.TEMPLATE Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,1223 @@ +-- +-- PostgreSQL database dump +-- + +-- Started on 2006-09-19 16:05:05 Eastern Standard Time + +SET client_encoding = 'UTF8'; +SET check_function_bodies = false; +SET client_min_messages = warning; + +SET search_path = public, pg_catalog; + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('metadata', 'id'), 10, false); + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('template', 'id'), 10, false); + + +-- +-- TOC entry 1840 (class 0 OID 0) +-- Dependencies: 1304 +-- Name: datatable_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER% +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('pagedef', 'id'), 500, false); + + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('datatable', 'id'), 1, false); + + +-- +-- TOC entry 1841 (class 0 OID 0) +-- Dependencies: 1291 +-- Name: datavariable_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER% +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('datavariable', 'id'), 1, false); + + +-- +-- TOC entry 1842 (class 0 OID 0) +-- Dependencies: 1297 +-- Name: fieldinputlevel_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER% +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('fieldinputlevel', 'id'), 10, false); + + + + + +-- +-- TOC entry 1844 (class 0 OID 0) +-- Dependencies: 1287 +-- Name: logindomain_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER% +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('logindomain', 'id'), 1, false); + + + + +-- +-- TOC entry 1846 (class 0 OID 0) +-- Dependencies: 1312 +-- Name: role_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER% +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('"role"', 'id'), 10, false); + + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('"networkrole"', 'id'), 10, false); + +-- +-- TOC entry 1848 (class 0 OID 0) +-- Dependencies: 1272 +-- Name: studyfield_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER% +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('studyfield', 'id'), 150, true); + + + + + +-- +-- TOC entry 1851 (class 0 OID 0) +-- Dependencies: 1270 +-- Name: studyfile_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER% +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('studyfile', 'id'), 1, false); + + + + + +-- +-- TOC entry 1856 (class 0 OID 0) +-- Dependencies: 1302 +-- Name: usergroup_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER% +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('usergroup', 'id'), 1, false); + + + +-- +-- TOC entry 1859 (class 0 OID 0) +-- Dependencies: 1299 +-- Name: vdcgroup_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER% +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('vdcgroup', 'id'), 1, false); + + +-- +-- TOC entry 1860 (class 0 OID 0) +-- Dependencies: 1289 +-- Name: vdcnetwork_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER% +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('vdcnetwork', 'id'), 1, false); + + +-- +-- TOC entry 1861 (class 0 OID 0) +-- Dependencies: 1294 +-- Name: vdcuser_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER% +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('vdcuser', 'id'), 10, false); + + + +-- +-- TOC entry 1813 (class 0 OID 113837) +-- Dependencies: 1274 +-- Data for Name: coll_adv_search_fields; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE coll_adv_search_fields DISABLE TRIGGER ALL; + + + +ALTER TABLE coll_adv_search_fields ENABLE TRIGGER ALL; + +-- +-- TOC entry 1818 (class 0 OID 113863) +-- Dependencies: 1281 +-- Data for Name: coll_any_search_fields; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE coll_any_search_fields DISABLE TRIGGER ALL; + + + +ALTER TABLE coll_any_search_fields ENABLE TRIGGER ALL; + +-- +-- TOC entry 1804 (class 0 OID 113774) +-- Dependencies: 1259 +-- Data for Name: coll_search_result_fields; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE coll_search_result_fields DISABLE TRIGGER ALL; + + + +ALTER TABLE coll_search_result_fields ENABLE TRIGGER ALL; + + + + + +ALTER TABLE datatable ENABLE TRIGGER ALL; + +-- +-- TOC entry 1825 (class 0 OID 113902) +-- Dependencies: 1292 +-- Data for Name: datavariable; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE datavariable DISABLE TRIGGER ALL; + + + +ALTER TABLE datavariable ENABLE TRIGGER ALL; + +-- +-- TOC entry 1829 (class 0 OID 113927) +-- Dependencies: 1298 +-- Data for Name: fieldinputlevel; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE fieldinputlevel DISABLE TRIGGER ALL; + +INSERT INTO fieldinputlevel (id, name ) VALUES (1, 'required'); +INSERT INTO fieldinputlevel (id, name ) VALUES(2, 'recommended'); +INSERT INTO fieldinputlevel (id, name ) VALUES(3, 'optional'); + + + +ALTER TABLE fieldinputlevel ENABLE TRIGGER ALL; + + + +-- +-- TOC entry 1823 (class 0 OID 113888) +-- Dependencies: 1288 +-- Data for Name: logindomain; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE logindomain DISABLE TRIGGER ALL; + + + +ALTER TABLE logindomain ENABLE TRIGGER ALL; + + + +-- +-- TOC entry 1838 (class 0 OID 113987) +-- Dependencies: 1313 +-- Data for Name: role; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE "role" DISABLE TRIGGER ALL; +INSERT into role(id, name) VALUES (1, 'contributor'); +INSERT into role(id, name) VALUES (2, 'curator'); +INSERT into role(id, name) VALUES (3, 'admin'); +INSERT into role(id,name) VALUES (4, 'privileged viewer'); +ALTER TABLE "role" ENABLE TRIGGER ALL; + +ALTER TABLE "networkrole" DISABLE TRIGGER ALL; +INSERT into networkrole(id, name) VALUES (1, 'Creator'); +INSERT into networkrole(id, name) VALUES (2, 'Admin'); +ALTER TABLE "networkrole" ENABLE TRIGGER ALL; + +ALTER TABLE pagedef DISABLE TRIGGER ALL; + + +-- Pages that don't require role authorization +INSERT INTO pagedef (name, path, role_id, networkrole_id ) VALUES ( 'StudyPage', '/study/StudyPage.xhtml', null,null ); +INSERT INTO pagedef (name, path, role_id, networkrole_id ) VALUES ( 'SubsettingPage', '/subsetting/SubsettingPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ExploreDataPage','/viz/ExploreDataPage.xhtml',null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ErrorPage', '/ErrorPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'HomePage', '/HomePage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'UnauthorizedPage', '/login/UnauthorizedPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'StudyLockedPage', '/login/StudyLockedPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'LogoutPage', '/login/LogoutPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AddAccountPage', '/login/AddAccountPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditAccountPage', '/login/EditAccountPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AccountOptionsPage', '/login/AccountOptionsPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AccountPage', '/login/AccountPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'LoginPage', '/login/LoginPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ForgotPasswordPage', '/login/ForgotPasswordPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ContributorRequestPage', '/login/ContributorRequestPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ContributorRequestInfoPage', '/login/ContributorRequestInfoPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'CreatorRequestPage','/login/CreatorRequestPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'CreatorRequestInfoPage','/login/CreatorRequestInfoPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'TermsOfUsePage','/login/TermsOfUsePage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AccountTermsOfUsePage','/login/AccountTermsOfUsePage.xhtml', null,null ); +INSERT INTO pagedef (name, path, role_id, networkrole_id ) VALUES ( 'StudyVersionDifferencesPage', '/study/StudyVersionDifferencesPage.xhtml', null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'OptionsPage','/admin/OptionsPage.xhtml',null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageStudiesPage','/study/ManageStudiesPage.xhtml',null,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManifestPage', '/ManifestPage.xhtml', null,null ); + +-- Pages that require VDC Role authorization: +-- Contributor Role +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditStudyPage','/study/EditStudyPage.xhtml',1,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditStudyFilesPage','/study/EditStudyFilesPage.xhtml',1,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AddFilesPage','/study/AddFilesPage.xhtml',1,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'SetUpDataExplorationPage','/study/SetUpDataExplorationPage.xhtml',1,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'DeleteStudyPage','/study/DeleteStudyPage.xhtml',1,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'MyDataversePage','/networkAdmin/MyDataversePage.xhtml',null,null ); + +-- Curator Role +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditCollectionPage','/collection/EditCollectionPage.xhtml',2,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageCollectionsPage','/collection/ManageCollectionsPage.xhtml',2,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'StudyPermissionsPage','/study/StudyPermissionsPage.xhtml',2,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'DeaccessionStudyPage', '/study/DeaccessionStudyPage.xhtml', 2, null ); + +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageTemplatesPage', '/admin/ManageTemplatesPage.xhtml', 2,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'TemplateFormPage','/study/TemplateFormPage.xhtml',2,2 ); + +-- Admin Role +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditSitePage', '/site/EditSitePage.xhtml', 3,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditBannerFooterPage','/admin/EditBannerFooterPage.xhtml',3,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditContactUsPage','/admin/EditContactUsPage.xhtml',3,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditHomePanelsPage','/admin/EditHomePanelsPage.xhtml',3,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditStudyCommentsPage', '/admin/EditStudyCommentsPage.xhtml', 3,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditUserTermsPage','/admin/EditUseTermsPage.xhtml',3,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditDepositUseTermsPage','/admin/EditDepositUseTermsPage.xhtml',3,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'PrivilegedUsersPage','/admin/PrivilegedUsersPage.xhtml',3,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'SearchFieldsPage','/admin/SearchFieldsPage.xhtml',3,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'PromotionalLinkSearchBoxPage','/admin/PromotionalLinkSearchBoxPage.xhtml',3,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditLockssConfigPage','/admin/EditLockssConfigPage.xhtml',3,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditGuestbookQuestionnairePage', '/admin/EditGuestbookQuestionnairePage.xhtml', 3,null ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'GuestBookResponseDataPage', '/admin/GuestBookResponseDataPage.xhtml', 3,2 ); +-- Pages that require Network Role authorization +-- Creator Role +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AddSitePage', '/site/AddSitePage.xhtml', null,1 ); +-- Admin Role +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'NetworkOptionsPage', '/networkAdmin/NetworkOptionsPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'HarvestSitesPage', '/site/HarvestSitesPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AddClassificationsPage', '/networkAdmin/AddClassificationsPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageClassificationsPage', '/networkAdmin/ManageClassificationsPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageControlledVocabularyPage', '/admin/ManageControlledVocabularyPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'CommentReviewPage', '/networkAdmin/CommentReviewPage.xhtml', null, 2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageDataversesPage', '/networkAdmin/ManageDataversesPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditHarvestSitePage', '/site/EditHarvestSitePage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditNetworkNamePage', '/networkAdmin/EditNetworkNamePage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'NetworkPrivilegedUsersPage', '/networkAdmin/NetworkPrivilegedUsersPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'AllUsersPage', '/networkAdmin/AllUsersPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditNetworkAnnouncementsPage', '/networkAdmin/EditNetworkAnnouncementsPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditNetworkBannerFooterPage', '/networkAdmin/EditNetworkBannerFooterPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditExportSchedulePage', '/networkAdmin/EditExportSchedulePage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditOAISetPage', '/networkAdmin/EditOAISetPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditNetworkDownloadUseTermsPage', '/networkAdmin/EditNetworkDownloadUseTermsPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditNetworkDepositUseTermsPage', '/networkAdmin/EditNetworkDepositUseTermsPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditAccountUseTermsPage', '/networkAdmin/EditAccountUseTermsPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditUserGroupPage', '/networkAdmin/EditUserGroupPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'UserGroupsPage', '/networkAdmin/UserGroupsPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ImportStudyPage', '/networkAdmin/ImportStudyPage.xhtml', null,2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'UtilitiesPage', '/networkAdmin/UtilitiesPage.xhtml', null, 2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'EditSubnetworkPage', '/networkAdmin/EditSubnetworkPage.xhtml', null, 2 ); +INSERT INTO pagedef ( name, path, role_id, networkrole_id ) VALUES ( 'ManageSubnetworksPage', '/networkAdmin/ManageSubnetworksPage.xhtml', null, 2 ); + +ALTER TABLE pagedef ENABLE TRIGGER ALL; + + +-- +-- TOC entry 1821 (class 0 OID 113878) +-- Dependencies: 1285 +-- Data for Name: search_result_fields; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE search_result_fields DISABLE TRIGGER ALL; + + + +ALTER TABLE search_result_fields ENABLE TRIGGER ALL; + + +-- +-- TOC entry 1826 (class 0 OID 113907) +-- Dependencies: 1293 +-- Data for Name: study_studyfield; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE study_studyfield DISABLE TRIGGER ALL; + + + +ALTER TABLE study_studyfield ENABLE TRIGGER ALL; + +-- +-- TOC entry 1817 (class 0 OID 113859) +-- Dependencies: 1280 +-- Data for Name: study_usergroup; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE study_usergroup DISABLE TRIGGER ALL; + + + +ALTER TABLE study_usergroup ENABLE TRIGGER ALL; + +-- +-- TOC entry 1812 (class 0 OID 113829) +-- Dependencies: 1273 +-- Data for Name: studyfield; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE studyfield DISABLE TRIGGER ALL; + +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (1, 'Title', 'Title', 'title', TRUE, TRUE, TRUE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (2, 'Study ID', 'Study ID', 'studyId', TRUE, TRUE, TRUE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (3, 'Author', 'Author', 'author', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (4, 'Author Affiliation', 'Author Affiliation', 'authorAffiliation', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (5, 'Producer', 'Producer', 'producer', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (6, 'Producer URL', 'Producer URL', 'producerURL', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (7, 'URL to Producer Logo', 'URL to Producer Logo', 'producerLogo', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (8, 'Producer Name Abbreviation', 'Producer Name Abbreviation', 'producerAbbreviation', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (9, 'Production Date', 'Production Date', 'productionDate', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (10, 'Software', 'Software', 'software', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (11, 'Software Version', 'Software Version', 'softwareVersion', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (12, 'Funding Agency', 'Funding Agency', 'fundingAgency', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (13, 'Grant Number', 'Grant Number', 'grantNumber', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (14, 'Grant Number Agency', 'Grant Number Agency', 'grantNumberAgency', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (15, '', '', 'distributor', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (16, '', '', 'distributorURL', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (17, '', '', 'distributorLogo', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (18, '', '', 'distributionDate', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (19, '', '', 'distributorContact', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (20, '', '', 'distributorContactAffiliation', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (21, '', '', 'distributorContactEmail', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (22, '', '', 'depositor', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (23, '', '', 'dateOfDeposit', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (24, '', '', 'series', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (25, '', '', 'seriesInformation', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (26, '', '', 'studyVersion', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (27, '', '', 'keyword', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (28, '', '', 'keywordVocab', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (29, '', '', 'keywordVocabURI', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (30, '', '', 'topicClassification', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (31, '', '', 'topicClassVocab', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (32, '', '', 'topicClassVocabURI', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (33, '', '', 'description', FALSE, TRUE, TRUE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (34, '', '', 'descriptionDate', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (35, '', '', 'timePeriodCoveredStart', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (36, '', '', 'timePeriodCoveredEnd', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (37, '', '', 'dateOfCollectionStart', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (38, '', '', 'dateOfCollectionEnd', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (39, '', '', 'country', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (40, '', '', 'geographicCoverage', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (41, '', '', 'geographicUnit', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (42, '', '', 'unitOfAnalysis', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (43, '', '', 'universe', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (44, '', '', 'kindOfData', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (45, '', '', 'timeMethod', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (46, '', '', 'dataCollector', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (47, '', '', 'frequencyOfDataCollection', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (48, '', '', 'samplingProcedure', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (49, '', '', 'deviationsFromSampleDesign', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (50, '', '', 'collectionMode', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (51, '', '', 'researchInstrument', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (52, '', '', 'dataSources', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (53, '', '', 'originOfSources', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (54, '', '', 'characteristicOfSources', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (55, '', '', 'accessToSources', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (56, '', '', 'dataCollectionSituation', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (57, '', '', 'actionsToMinimizeLoss', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (58, '', '', 'controlOperations', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (59, '', '', 'weighting', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (60, '', '', 'cleaningOperations', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (61, '', '', 'studyLevelErrorNotes', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (62, '', '', 'responseRate', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (63, '', '', 'samplingErrorEstimates', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (64, '', '', 'otherDataAppraisal', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (65, '', '', 'placeOfAccess', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (66, '', '', 'originalArchive', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (67, '', '', 'availabilityStatus', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (68, '', '', 'collectionSize', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (69, '', '', 'studyCompletion', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (70, '', '', 'confidentialityDeclaration', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (71, '', '', 'specialPermissions', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (72, '', '', 'restrictions', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (73, '', '', 'contact', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (74, '', '', 'citationRequirements', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (75, '', '', 'depositorRequirements', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (76, '', '', 'conditions', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (77, '', '', 'disclaimer', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (78, '', '', 'relatedMaterial', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (79, '', '', 'publication', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (80, '', '', 'relatedStudies', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (81, '', '', 'otherReferences', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (82, '', '', 'notesText', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (83, '', '', 'note', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (84, '', '', 'notesInformationSubject', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (85, '', '', 'otherId', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (86, '', '', 'otherIdAgency', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (87, '', '', 'productionPlace', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (88, '', '', 'numberOfFiles', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (89, '', '', 'publicationReplicationData', FALSE, TRUE, FALSE, FALSE, FALSE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (90, '', '', 'subTitle', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (91, '', '', 'versionDate', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (92, '', '', 'geographicBoundingBox', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (93, '', '', 'eastLongitude', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (94, '', '', 'northLatitude', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (95, '', '', 'southLatitude', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (96, '', '', 'producerAffiliation', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (97, '', '', 'distributorAffiliation', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (98, '', '', 'distributorAbbreviation', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (99, 'Author', 'Author', 'authorName', TRUE, TRUE, TRUE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (100, '', '', 'producerName', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (101, '', '', 'distributorName', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (102, '', '', 'distributorContactName', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (103, '', '', 'descriptionText', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (104, '', '', 'keywordValue', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (105, '', '', 'topicClassValue', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (106, '', '', 'otherIdValue', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (107, '', '', 'softwareName', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (108, '', '', 'grantNumberValue', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (109, '', '', 'seriesName', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (110, '', '', 'studyVersionValue', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (111, '', '', 'westLongitude', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (112, '', '', 'noteInformationType', FALSE, FALSE, FALSE, FALSE, TRUE ); + +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (113, '', '', 'publicationCitation', FALSE, TRUE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (114, '', '', 'publicationIDType', FALSE, FALSE, FALSE, FALSE, FALSE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (115, '', '', 'publicationIDNumber', FALSE, FALSE, FALSE, FALSE, TRUE ); +INSERT INTO studyfield (id,title,description, name,basicSearchField,advancedSearchField, searchResultField, customField, allowControlledVocabulary) VALUES (116, '', '', 'publicationURL', FALSE, FALSE, FALSE, FALSE, TRUE ); + +--set the parent child relationship +update studyfield set parentstudyfield_id = 3 where id = 99; +update studyfield set parentstudyfield_id = 3 where id = 4; + +update studyfield set parentstudyfield_id = 5 where id = 100; +update studyfield set parentstudyfield_id = 5 where id = 6; +update studyfield set parentstudyfield_id = 5 where id = 7; +update studyfield set parentstudyfield_id = 5 where id = 8; +update studyfield set parentstudyfield_id = 5 where id = 96; + +update studyfield set parentstudyfield_id = 15 where id = 101; +update studyfield set parentstudyfield_id = 15 where id = 16; +update studyfield set parentstudyfield_id = 15 where id = 17; +update studyfield set parentstudyfield_id = 15 where id = 97; +update studyfield set parentstudyfield_id = 15 where id = 98; + +update studyfield set parentstudyfield_id = 19 where id = 102; +update studyfield set parentstudyfield_id = 19 where id = 20; +update studyfield set parentstudyfield_id = 19 where id = 21; + +update studyfield set parentstudyfield_id = 33 where id = 103; +update studyfield set parentstudyfield_id = 33 where id = 34; + +update studyfield set parentstudyfield_id = 27 where id = 104; +update studyfield set parentstudyfield_id = 27 where id = 28; +update studyfield set parentstudyfield_id = 27 where id = 29; + +update studyfield set parentstudyfield_id = 30 where id = 105; +update studyfield set parentstudyfield_id = 30 where id = 31; +update studyfield set parentstudyfield_id = 30 where id = 32; + +update studyfield set parentstudyfield_id = 85 where id = 106; +update studyfield set parentstudyfield_id = 85 where id = 86; + +update studyfield set parentstudyfield_id = 10 where id = 107; +update studyfield set parentstudyfield_id = 10 where id = 11; + +update studyfield set parentstudyfield_id = 13 where id = 108; +update studyfield set parentstudyfield_id = 13 where id = 14; + +update studyfield set parentstudyfield_id = 24 where id = 109; +update studyfield set parentstudyfield_id = 24 where id = 25; + +update studyfield set parentstudyfield_id = 26 where id = 110; +update studyfield set parentstudyfield_id = 26 where id = 91; + +update studyfield set parentstudyfield_id = 92 where id = 111; +update studyfield set parentstudyfield_id = 92 where id = 93; +update studyfield set parentstudyfield_id = 92 where id = 94; +update studyfield set parentstudyfield_id = 92 where id = 95; + +update studyfield set parentstudyfield_id = 83 where id = 112; +update studyfield set parentstudyfield_id = 83 where id = 82; +update studyfield set parentstudyfield_id = 83 where id = 84; + +update studyfield set parentstudyfield_id = 79 where id = 113; +update studyfield set parentstudyfield_id = 79 where id = 114; +update studyfield set parentstudyfield_id = 79 where id = 115; +update studyfield set parentstudyfield_id = 79 where id = 116; +update studyfield set parentstudyfield_id = 79 where id = 89; + +update studyfield set displayorder = 0 where name = 'authorName'; +update studyfield set displayorder = 2 where name = 'authorAffiliation'; +update studyfield set displayorder = 2 where name = 'producerAbbreviation'; +update studyfield set displayorder = 1 where name = 'producerName'; +update studyfield set displayorder = 3 where name = 'producerAffiliation'; +update studyfield set displayorder = 4 where name = 'producerURL'; +update studyfield set displayorder = 5 where name = 'producerLogo'; +update studyfield set displayorder = 2 where name = 'softwareVersion'; +update studyfield set displayorder = 1 where name = 'softwareName'; +update studyfield set displayorder = 1 where name = 'grantNumberValue'; +update studyfield set displayorder = 2 where name = 'grantNumberAgency'; +update studyfield set displayorder = 1 where name = 'distributorName'; +update studyfield set displayorder = 4 where name = 'distributorURL'; +update studyfield set displayorder = 5 where name = 'distributorLogo'; +update studyfield set displayorder = 3 where name = 'distributorAffiliation'; +update studyfield set displayorder = 2 where name = 'distributorAbbreviation'; +update studyfield set displayorder = 1 where name = 'distributorContactName'; +update studyfield set displayorder = 2 where name = 'distributorContactAffiliation'; +update studyfield set displayorder = 3 where name = 'distributorContactEmail'; +update studyfield set displayorder = 2 where name = 'seriesInformation'; +update studyfield set displayorder = 1 where name = 'seriesName'; +update studyfield set displayorder = 1 where name = 'studyVersionValue'; +update studyfield set displayorder = 2 where name = 'versionDate'; +update studyfield set displayorder = 1 where name = 'keywordValue'; +update studyfield set displayorder = 3 where name = 'keywordVocabURI'; +update studyfield set displayorder = 2 where name = 'keywordVocab'; +update studyfield set displayorder = 1 where name = 'topicClassValue'; +update studyfield set displayorder = 2 where name = 'topicClassVocab'; +update studyfield set displayorder = 3 where name = 'topicClassVocabURI'; +update studyfield set displayorder = 1 where name = 'descriptionText'; +update studyfield set displayorder = 2 where name = 'descriptionDate'; +update studyfield set displayorder = 1 where name = 'publicationCitation'; +update studyfield set displayorder = 2 where name = 'publicationIDNumber'; +update studyfield set displayorder = 3 where name = 'publicationURL'; +update studyfield set displayorder = 3 where name = 'notesText'; +update studyfield set displayorder = 1 where name = 'noteInformationType'; +update studyfield set displayorder = 2 where name = 'notesInformationSubject'; +update studyfield set displayorder = 2 where name = 'otherIdAgency'; +update studyfield set displayorder = 1 where name = 'otherIdValue'; + + +update studyfield set fieldtype = 'date' where id = 9; +update studyfield set fieldtype = 'date' where id = 18; +update studyfield set fieldtype = 'date' where id = 23; +update studyfield set fieldtype = 'date' where id = 34; +update studyfield set fieldtype = 'date' where id = 35; +update studyfield set fieldtype = 'date' where id = 36; +update studyfield set fieldtype = 'date' where id = 37; +update studyfield set fieldtype = 'date' where id = 38; +update studyfield set fieldtype = 'date' where id = 91; +update studyfield set fieldtype = 'email' where id = 21; +update studyfield set fieldtype = 'textBox' where id = 4; +update studyfield set fieldtype = 'textBox' where id = 8; +update studyfield set fieldtype = 'textBox' where id = 11; +update studyfield set fieldtype = 'textBox' where id = 12; +update studyfield set fieldtype = 'textBox' where id = 13; +update studyfield set fieldtype = 'textBox' where id = 14; +update studyfield set fieldtype = 'textBox' where id = 19; +update studyfield set fieldtype = 'textBox' where id = 20; +update studyfield set fieldtype = 'textBox' where id = 22; +update studyfield set fieldtype = 'textBox' where id = 24; +update studyfield set fieldtype = 'textBox' where id = 25; +update studyfield set fieldtype = 'textBox' where id = 26; +update studyfield set fieldtype = 'textBox' where id = 27; +update studyfield set fieldtype = 'textBox' where id = 28; +update studyfield set fieldtype = 'textBox' where id = 30; +update studyfield set fieldtype = 'textBox' where id = 31; +update studyfield set fieldtype = 'textBox' where id = 33; +update studyfield set fieldtype = 'textBox' where id = 39; +update studyfield set fieldtype = 'textBox' where id = 40; +update studyfield set fieldtype = 'textBox' where id = 41; +update studyfield set fieldtype = 'textBox' where id = 42; +update studyfield set fieldtype = 'textBox' where id = 43; +update studyfield set fieldtype = 'textBox' where id = 44; +update studyfield set fieldtype = 'textBox' where id = 45; +update studyfield set fieldtype = 'textBox' where id = 46; +update studyfield set fieldtype = 'textBox' where id = 47; +update studyfield set fieldtype = 'textBox' where id = 48; +update studyfield set fieldtype = 'textBox' where id = 49; +update studyfield set fieldtype = 'textBox' where id = 50; +update studyfield set fieldtype = 'textBox' where id = 51; +update studyfield set fieldtype = 'textBox' where id = 52; +update studyfield set fieldtype = 'textBox' where id = 53; +update studyfield set fieldtype = 'textBox' where id = 54; +update studyfield set fieldtype = 'textBox' where id = 55; +update studyfield set fieldtype = 'textBox' where id = 56; +update studyfield set fieldtype = 'textBox' where id = 57; +update studyfield set fieldtype = 'textBox' where id = 58; +update studyfield set fieldtype = 'textBox' where id = 59; +update studyfield set fieldtype = 'textBox' where id = 60; +update studyfield set fieldtype = 'textBox' where id = 61; +update studyfield set fieldtype = 'textBox' where id = 62; +update studyfield set fieldtype = 'textBox' where id = 63; +update studyfield set fieldtype = 'textBox' where id = 64; +update studyfield set fieldtype = 'textBox' where id = 65; +update studyfield set fieldtype = 'textBox' where id = 66; +update studyfield set fieldtype = 'textBox' where id = 67; +update studyfield set fieldtype = 'textBox' where id = 68; +update studyfield set fieldtype = 'textBox' where id = 69; +update studyfield set fieldtype = 'textBox' where id = 70; +update studyfield set fieldtype = 'textBox' where id = 71; +update studyfield set fieldtype = 'textBox' where id = 72; +update studyfield set fieldtype = 'textBox' where id = 73; +update studyfield set fieldtype = 'textBox' where id = 74; +update studyfield set fieldtype = 'textBox' where id = 75; +update studyfield set fieldtype = 'textBox' where id = 76; +update studyfield set fieldtype = 'textBox' where id = 77; +update studyfield set fieldtype = 'textBox' where id = 78; +update studyfield set fieldtype = 'textBox' where id = 79; +update studyfield set fieldtype = 'textBox' where id = 80; +update studyfield set fieldtype = 'textBox' where id = 81; +update studyfield set fieldtype = 'textBox' where id = 82; +update studyfield set fieldtype = 'textBox' where id = 83; +update studyfield set fieldtype = 'textBox' where id = 84; +update studyfield set fieldtype = 'textBox' where id = 85; +update studyfield set fieldtype = 'textBox' where id = 86; +update studyfield set fieldtype = 'textBox' where id = 87; +update studyfield set fieldtype = 'textBox' where id = 88; +update studyfield set fieldtype = 'textBox' where id = 89; +update studyfield set fieldtype = 'textBox' where id = 92; +update studyfield set fieldtype = 'textBox' where id = 96; +update studyfield set fieldtype = 'textBox' where id = 97; +update studyfield set fieldtype = 'textBox' where id = 98; +update studyfield set fieldtype = 'textBox' where id = 99; +update studyfield set fieldtype = 'textBox' where id = 100; +update studyfield set fieldtype = 'textBox' where id = 101; +update studyfield set fieldtype = 'textBox' where id = 102; +update studyfield set fieldtype = 'textBox' where id = 103; +update studyfield set fieldtype = 'textBox' where id = 104; +update studyfield set fieldtype = 'textBox' where id = 105; +update studyfield set fieldtype = 'textBox' where id = 106; +update studyfield set fieldtype = 'textBox' where id = 107; +update studyfield set fieldtype = 'textBox' where id = 108; +update studyfield set fieldtype = 'textBox' where id = 109; +update studyfield set fieldtype = 'textBox' where id = 110; +update studyfield set fieldtype = 'textBox' where id = 112; +update studyfield set fieldtype = 'textBox' where id = 113; +update studyfield set fieldtype = 'textBox' where id = 115; +update studyfield set fieldtype = 'url' where id = 6; +update studyfield set fieldtype = 'url' where id = 7; +update studyfield set fieldtype = 'url' where id = 16; +update studyfield set fieldtype = 'url' where id = 17; +update studyfield set fieldtype = 'url' where id = 29; +update studyfield set fieldtype = 'url' where id = 32; +update studyfield set fieldtype = 'url' where id = 116; + +ALTER TABLE studyfield ENABLE TRIGGER ALL; + + + + + +-- +-- TOC entry 1811 (class 0 OID 113819) +-- Dependencies: 1271 +-- Data for Name: studyfile; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE studyfile DISABLE TRIGGER ALL; + + + +ALTER TABLE studyfile ENABLE TRIGGER ALL; + +-- +-- TOC entry 1810 (class 0 OID 113813) +-- Dependencies: 1269 +-- Data for Name: studyfile_usergroup; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE studyfile_usergroup DISABLE TRIGGER ALL; + + + +ALTER TABLE studyfile_usergroup ENABLE TRIGGER ALL; + +-- +-- TOC entry 1835 (class 0 OID 113964) +-- Dependencies: 1307 +-- Data for Name: summary_fields; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE summary_fields DISABLE TRIGGER ALL; + + + +ALTER TABLE summary_fields ENABLE TRIGGER ALL; + +-- +-- TOC entry 1809 (class 0 OID 113808) +-- Dependencies: 1268 +-- Data for Name: template; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE "template" DISABLE TRIGGER ALL; + + + +ALTER TABLE "template" ENABLE TRIGGER ALL; + +-- +-- TOC entry 1807 (class 0 OID 113797) +-- Dependencies: 1265 +-- Data for Name: templatefield; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE templatefield DISABLE TRIGGER ALL; + + + +ALTER TABLE templatefield ENABLE TRIGGER ALL; + +-- +-- TOC entry 1832 (class 0 OID 113945) +-- Dependencies: 1303 +-- Data for Name: usergroup; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE usergroup DISABLE TRIGGER ALL; + + + +ALTER TABLE usergroup ENABLE TRIGGER ALL; + + +-- +-- TOC entry 1831 (class 0 OID 113939) +-- Dependencies: 1301 +-- Data for Name: vdc_adv_search_fields; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE vdc_adv_search_fields DISABLE TRIGGER ALL; + + + +ALTER TABLE vdc_adv_search_fields ENABLE TRIGGER ALL; + +-- +-- TOC entry 1801 (class 0 OID 113756) +-- Dependencies: 1255 +-- Data for Name: vdc_any_search_fields; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE vdc_any_search_fields DISABLE TRIGGER ALL; + + + +ALTER TABLE vdc_any_search_fields ENABLE TRIGGER ALL; + + +-- +-- TOC entry 1808 (class 0 OID 113802) +-- Dependencies: 1266 +-- Data for Name: vdc_usergroup; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE vdc_usergroup DISABLE TRIGGER ALL; + + + +ALTER TABLE vdc_usergroup ENABLE TRIGGER ALL; + + +-- +-- TOC entry 1830 (class 0 OID 113934) +-- Dependencies: 1300 +-- Data for Name: vdcgroup; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE vdcgroup DISABLE TRIGGER ALL; + + + +ALTER TABLE vdcgroup ENABLE TRIGGER ALL; + +-- +-- TOC entry 1828 (class 0 OID 113921) +-- Dependencies: 1296 +-- Data for Name: vdcgrouprelationship; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE vdcgrouprelationship DISABLE TRIGGER ALL; + + + +ALTER TABLE vdcgrouprelationship ENABLE TRIGGER ALL; + + + + + +-- +-- TOC entry 1827 (class 0 OID 113913) +-- Dependencies: 1295 +-- Data for Name: vdcuser; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE vdcuser DISABLE TRIGGER ALL; + + + +ALTER TABLE vdcuser ENABLE TRIGGER ALL; + +-- +-- TOC entry 1834 (class 0 OID 113960) +-- Dependencies: 1306 +-- Data for Name: vdcuser_usergroup; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE vdcuser_usergroup DISABLE TRIGGER ALL; + + + +ALTER TABLE vdcuser_usergroup ENABLE TRIGGER ALL; + + +ALTER TABLE variableintervaltype DISABLE TRIGGER ALL; + +INSERT INTO variableintervaltype (id, name ) VALUES (1, 'discrete'); +INSERT INTO variableintervaltype (id, name ) VALUES(2, 'continuous'); +INSERT INTO variableintervaltype (id, name ) VALUES(3, 'nominal'); +INSERT INTO variableintervaltype (id, name ) VALUES(4, 'dichotomous'); + +ALTER TABLE variableintervaltype ENABLE TRIGGER ALL; + + +ALTER TABLE variableformattype DISABLE TRIGGER ALL; + +INSERT INTO variableformattype (id, name ) VALUES (1, 'numeric'); +INSERT INTO variableformattype (id, name ) VALUES(2, 'character'); + +ALTER TABLE variableformattype ENABLE TRIGGER ALL; + + +ALTER TABLE variablerangetype DISABLE TRIGGER ALL; + +INSERT INTO variablerangetype (id, name ) VALUES(1, 'min'); +INSERT INTO variablerangetype (id, name ) VALUES(2, 'max'); +INSERT INTO variablerangetype (id, name ) VALUES(3, 'min exclusive'); +INSERT INTO variablerangetype (id, name ) VALUES(4, 'max exclusive'); +INSERT INTO variablerangetype (id, name ) VALUES(5, 'point'); + +ALTER TABLE variablerangetype ENABLE TRIGGER ALL; + +ALTER TABLE summarystatistictype DISABLE TRIGGER ALL; + +INSERT INTO summarystatistictype (id, name ) VALUES(1, 'mean'); +INSERT INTO summarystatistictype (id, name ) VALUES(2, 'medn'); +INSERT INTO summarystatistictype (id, name ) VALUES(3, 'mode'); +INSERT INTO summarystatistictype (id, name ) VALUES(4, 'min'); +INSERT INTO summarystatistictype (id, name ) VALUES(5, 'max'); +INSERT INTO summarystatistictype (id, name ) VALUES(6, 'stdev'); +INSERT INTO summarystatistictype (id, name ) VALUES(7, 'vald'); +INSERT INTO summarystatistictype (id, name ) VALUES(8, 'invd'); + +ALTER TABLE variablerangetype ENABLE TRIGGER ALL; + +ALTER TABLE vdcuser DISABLE TRIGGER ALL; + +insert into vdcuser(id, version, email, firstname, lastname, username, encryptedpassword, networkRole_id,active, agreedtermsofuse ) VALUES ( 1, 1, 'dataverse@lists.hmdc.harvard.edu','Network','Admin', 'networkAdmin' ,'tf0bLmzOFx5JrBhe2EIraS5GBnI=' ,2,true, true); + +ALTER TABLE vdcuser ENABLE TRIGGER ALL; + +-- +-- TOC entry 1809 (class 0 OID 113808) +-- Dependencies: 1268 +-- Data for Name: template; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE "metadata" DISABLE TRIGGER ALL; +-- Default metadata - contains no metadata values +INSERT INTO metadata( id, version ) VALUES ( 1, 1); + +ALTER TABLE "metadata" ENABLE TRIGGER ALL; + + +ALTER TABLE "template" DISABLE TRIGGER ALL; + +INSERT INTO template( id, version, vdcnetwork_id, name,metadata_id,enabled) VALUES (1, 1, 0, 'Dataverse Network Default Template',1,true); + +ALTER TABLE "template" ENABLE TRIGGER ALL; + +-- +-- TOC entry 1824 (class 0 OID 113895) +-- Dependencies: 1290 +-- Data for Name: vdcnetwork; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE vdcnetwork DISABLE TRIGGER ALL; + +INSERT INTO vdcnetwork (id, version, name, networkpageheader, networkpagefooter, announcements, displayannouncements, aboutthisdataversenetwork, contactemail, systememail, defaultvdcheader, defaultvdcfooter, defaultvdcabouttext, defaultvdcannouncements, displayvdcannouncements, displayvdcrecentstudies, defaulttemplate_id, allowcreaterequest, defaultnetworkadmin_id,protocol,authority,handleregistration,termsofuseenabled, deposittermsofuseenabled, downloadtermsofuseenabled, defaultdisplaynumber, exportperiod, exporthourofday) VALUES (0, 1, '[Your]', ' ', ' ', 'A description of your Dataverse Network or announcements may be added here. Use Network Options to edit or remove this text.', TRUE, 'This About page is not used anymore in the DVN application.', 'dataverse@lists.hmdc.harvard.edu','dataverse@lists.hmdc.harvard.edu', ' ', ' ', 'This About page is not used anymore in the DVN application.', '', TRUE, TRUE, 1, FALSE,1,'hdl','TEST',false,false,false,false,16,'daily',3); + +update vdcnetwork set defaultvdcheader=' +
      +
      '; + +update vdcnetwork set defaultvdcfooter='
      '; + + + +update vdcnetwork set requireDVDescription = false, + requireDVaffiliation = false, + requireDVclassification = false, + requireDVstudiesforrelease = false; + +ALTER TABLE vdcnetwork ENABLE TRIGGER ALL; + + +-- +-- TOC entry 1807 (class 0 OID 113797) +-- Dependencies: 1265 +-- Data for Name: templatefield; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE templatefield DISABLE TRIGGER ALL; + +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(1,1,1,1,'required',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(2,1,1,2,'required',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(3,1,1,3,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(4,1,1,4,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(5,1,1,5,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(6,1,1,6,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(7,1,1,7,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(8,1,1,8,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(9,1,1,9,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(10,1,1,10,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(11,1,1,11,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(12,1,1,12,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(13,1,1,13,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(14,1,1,14,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(15,1,1,15,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(16,1,1,16,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(17,1,1,17,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(18,1,1,18,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(19,1,1,19,'recommended',-1); + +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(20,1,1,20,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(21,1,1,21,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(22,1,1,22,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(23,1,1,23,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(24,1,1,24,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(25,1,1,25,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(26,1,1,26,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(27,1,1,27,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(28,1,1,28,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(29,1,1,29,'optional',-1); + +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(30,1,1,30,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(31,1,1,31,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(32,1,1,32,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(33,1,1,33,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(34,1,1,34,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(35,1,1,35,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(36,1,1,36,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(37,1,1,37,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(38,1,1,38,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(39,1,1,39,'recommended',-1); + +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(40,1,1,40,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(41,1,1,41,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(42,1,1,42,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(43,1,1,43,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(44,1,1,44,'recommended',-1); + +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(45,1,1,45,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(46,1,1,46,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(47,1,1,47,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(48,1,1,48,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(49,1,1,49,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(50,1,1,50,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(51,1,1,51,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(52,1,1,52,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(53,1,1,53,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(54,1,1,54,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(55,1,1,55,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(56,1,1,56,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(57,1,1,57,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(58,1,1,58,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(59,1,1,59,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(60,1,1,60,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(61,1,1,61,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(62,1,1,62,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(63,1,1,63,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(64,1,1,64,'optional',-1); + +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(65,1,1,65,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(66,1,1,66,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(67,1,1,67,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(68,1,1,68,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(69,1,1,69,'optional',-1); + +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(70,1,1,70,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(71,1,1,71,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(72,1,1,72,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(73,1,1,73,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(74,1,1,74,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(75,1,1,75,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(76,1,1,76,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(77,1,1,77,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(78,1,1,78,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(79,1,1,79,'recommended',-1); + +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(80,1,1,80,'recommended',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(81,1,1,81,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(82,1,1,82,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(83,1,1,83,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(84,1,1,84,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(85,1,1,85,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(86,1,1,86,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(87,1,1,87,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(88,1,1,88,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(89,1,1,89,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(90,1,1,90,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(91,1,1,91,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(92,1,1,92,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(93,1,1,93,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(94,1,1,94,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(95,1,1,95,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(96,1,1,96,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(97,1,1,97,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(98,1,1,98,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(99,1,1,99,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(100,1,1,100,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(101,1,1,101,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(102,1,1,102,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(103,1,1,103,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(104,1,1,104,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(105,1,1,105,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(106,1,1,106,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(107,1,1,107,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(108,1,1,108,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(109,1,1,109,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(110,1,1,110,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(111,1,1,111,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(112,1,1,112,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(113,1,1,113,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(114,1,1,114,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(115,1,1,115,'optional',-1); +INSERT INTO templatefield(id, version, template_id, studyfield_id, fieldinputlevelstring, displayorder) VALUES(116,1,1,116,'optional',-1); + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('templatefield', 'id'), 150, false); + + +ALTER TABLE templatefield ENABLE TRIGGER ALL; + + + +-- +-- TOC entry 1814 (class 0 OID 113843) +-- Dependencies: 1276 +-- Data for Name: templatefilecategory; Type: TABLE DATA; Schema: public; Owner: %POSTGRES_USER% +-- + +ALTER TABLE templatefilecategory DISABLE TRIGGER ALL; + +INSERT INTO templatefilecategory(id, template_id, name, displayorder) VALUES(1,1,'Documentation',1); +INSERT INTO templatefilecategory(id, template_id, name, displayorder) VALUES(2,1,'Data Files',2); + +ALTER TABLE templatefilecategory ENABLE TRIGGER ALL; + +-- +-- TOC entry 1855 (class 0 OID 0) +-- Dependencies: 1275 +-- Name: templatefilecategory_id_seq; Type: SEQUENCE SET; Schema: public; Owner: %POSTGRES_USER% +-- + +SELECT pg_catalog.setval(pg_catalog.pg_get_serial_sequence('templatefilecategory', 'id'), 5, false); + + +-- Completed on 2006-09-19 16:05:06 Eastern Standard Time + +-- +-- PostgreSQL database dump complete +-- + + +-- Sequence: studyid_seq + +-- DROP SEQUENCE studyid_seq; + +CREATE SEQUENCE studyid_seq + INCREMENT 1 + MINVALUE 1 + MAXVALUE 9223372036854775807 + START 10000 + CACHE 1; +ALTER TABLE studyid_seq OWNER TO "%POSTGRES_USER%"; + +-- Sequence: filesystemname_seq + +-- DROP SEQUENCE filesystemname_seq; + +CREATE SEQUENCE filesystemname_seq + INCREMENT 1 + MINVALUE 1 + MAXVALUE 9223372036854775807 + START 2 + CACHE 1; +ALTER TABLE filesystemname_seq OWNER TO "%POSTGRES_USER%"; + + +INSERT INTO DataFileFormatType (id, value, name, mimeType) VALUES (1, 'D02', 'Splus', 'text/plain'); +INSERT INTO DataFileFormatType (id, value, name, mimeType) VALUES (2, 'D03', 'Stata', 'application/x-stata'); +INSERT INTO DataFileFormatType (id, value, name, mimeType) VALUES (3, 'D04', 'R', 'application/x-rlang-transport'); + +INSERT INTO harvestformattype (id, metadataprefix, name, stylesheetfilename) VALUES (0, 'ddi', 'DDI', null); +INSERT INTO harvestformattype (id, metadataprefix, name, stylesheetfilename) VALUES (1, 'oai_etdms', 'MIF', 'mif2ddi.xsl'); +INSERT INTO harvestformattype (id, metadataprefix, name, stylesheetfilename) VALUES (2, 'oai_dc', 'DC', 'oai_dc2ddi.xsl'); +INSERT INTO harvestformattype (id, metadataprefix, name, stylesheetfilename) VALUES (3, 'oai_fgdc', 'FGDC', 'fgdc2ddi.xsl'); +INSERT INTO harvestformattype (id, metadataprefix, name, stylesheetfilename) VALUES (4, 'dcmi_terms', 'DCMI_terms', 'dcmi_terms2ddi.xsl'); + +create index datavariable_id_index on datavariable (id); +create index summarystatistic_id_index on summarystatistic (id); +create index summarystatistic_datavariable_id_index on summarystatistic (datavariable_id); +create index variablecategory_id_index on variablecategory (id); +create index variablecategory_datavariable_id_index on variablecategory (datavariable_id); +create index variablerange_id_index on variablerange (id); +create index study_id_index on study(id); +create index study_owner_id_index on study(owner_id); +create index weightedvarrelationship_id_index on weightedvarrelationship (weighted_variable_id,variable_id); +create index studyfile_id_index on studyfile(id); +create index datavariable_datatable_id_index on datavariable(datatable_id); +create index variablerange_datavariable_id_index on variablerange (datavariable_id); +create index metadata_id_index on metadata(id); +create index studyabstract_metadata_id_index on studyabstract(metadata_id); +create index studyauthor_metadata_id_index on studyauthor(metadata_id); +create index studydistributor_metadata_id_index on studydistributor(metadata_id); +create index studygeobounding_metadata_id_index on studygeobounding(metadata_id); +create index studygrant_metadata_id_index on studygrant(metadata_id); +create index studykeyword_metadata_id_index on studykeyword(metadata_id); +create index studynote_metadata_id_index on studynote(metadata_id); +create index studyotherid_metadata_id_index on studyotherid(metadata_id); +create index studyotherref_metadata_id_index on studyotherref(metadata_id); +create index studyproducer_metadata_id_index on studyproducer(metadata_id); +create index studyrelmaterial_metadata_id_index on studyrelmaterial(metadata_id); +create index studyrelpublication_metadata_id_index on studyrelpublication(metadata_id); +create index studyrelstudy_metadata_id_index on studyrelstudy(metadata_id); +create index studysoftware_metadata_id_index on studysoftware(metadata_id); +create index studytopicclass_metadata_id_index on studytopicclass(metadata_id); +create index template_metadata_id_index on template(metadata_id); +create index studyfileactivity_id_index on studyfileactivity(id); +create index studyfileactivity_studyfile_id_index on studyfileactivity(studyfile_id); +create index studyfileactivity_study_id_index on studyfileactivity(study_id); + + + + + +INSERT INTO vdcnetworkstats (id,vdcnetwork_id,downloadcount,studycount,filecount) values (0,0,0,0,0); + + insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 1, 'cc by', 'CC Attribution (cc by)', 'http://creativecommons.org/licenses/by/3.0/', 'http://creativecommons.org/licenses/by/3.0/rdf', 'http://i.creativecommons.org/l/by/3.0/88x31.png' ); +-- removed until we support cc0 +--insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 2, 'cc0','CC Zero (cc0)','http://creativecommons.org/publicdomain/zero/1.0/','http://creativecommons.org/publicdomain/zero/1.0/rdf','http://i.creativecommons.org/l/zero/1.0/88x31.png'); +insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 3, 'cc by-sa','CC Attribution Share Alike (cc by-sa)','http://creativecommons.org/licenses/by-sa/3.0/', 'http://creativecommons.org/licenses/by-sa/3.0/rdf', 'http://i.creativecommons.org/l/by-sa/3.0/88x31.png' ); +insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 4, 'cc by-nd','CC Attribution No Derivatives (cc by-nd)','http://creativecommons.org/licenses/by-nd/3.0/', 'http://creativecommons.org/licenses/by-nd/3.0/rdf', 'http://i.creativecommons.org/l/by-nd/3.0/88x31.png' ); +insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 5, 'cc by-nc','CC Attribution Non-Commercial (cc by-nc)','http://creativecommons.org/licenses/by-nc/3.0/', 'http://creativecommons.org/licenses/by-nc/3.0/rdf', 'http://i.creativecommons.org/l/by-nc/3.0/88x31.png' ); +insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 6, 'cc by-nc-sa','CC Attribution Non-Commercial Share Alike (cc by-nc-sa)','http://creativecommons.org/licenses/by-nc-sa/3.0/', 'http://creativecommons.org/licenses/by-nc-sa/3.0/rdf', 'http://i.creativecommons.org/l/by-nc-sa/3.0/88x31.png' ); +insert into licensetype (id, shortname, name, licenseurl, rdfurl, imageurl) values ( 7, 'cc by-nc-nd','CC Attribution Non-Commercial No Derivatives (cc by-nc-nd)','http://creativecommons.org/licenses/by-nc-sa/3.0/', 'http://creativecommons.org/licenses/by-nc-sa/3.0/rdf', 'http://i.creativecommons.org/l/by-nc-sa/3.0/88x31.png' ); + +INSERT INTO metadataformattype (id, name, mimetype, namespace, formatschema, partialexcludesupported, partialselectsupported) VALUES (1, 'ddi', 'application/xml', 'http://www.icpsr.umich.edu/DDI', 'http://www.icpsr.umich.edu/DDI/Version2-0.xsd', true, true); +INSERT INTO metadataformattype (id, name, mimetype, namespace, formatschema, partialexcludesupported, partialselectsupported) VALUES (2, 'oai_dc', 'application/xml', 'http://www.openarchives.org/OAI/2.0/oai_dc/', 'http://www.openarchives.org/OAI/2.0/oai_dc.xsd', false, false); +INSERT INTO metadataformattype (id, name, mimetype, namespace, formatschema, partialexcludesupported, partialselectsupported) VALUES (3, 'marc', 'application/octet-stream', 'http://www.loc.gov/marc/', 'MARC 21', false, false); + +/*create network guest book*/ + +INSERT INTO guestbookquestionnaire(enabled,firstnamerequired, lastnamerequired, emailrequired, institutionrequired, positionrequired, vdc_id) VALUES (true, true, true, true, false, false, null); diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/robots.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/DVN-web/installer/dvninstall/robots.txt Wed May 13 11:50:21 2015 +0200 @@ -0,0 +1,22 @@ +User-agent: * +Disallow: / +User-agent: Googlebot +Disallow: / +#Crawl-delay: 120 +#Disallow: /dvn/faces/javax.faces.resource +#Disallow: /dvn/OAIHandler +#Disallow: /dvn/faces/ContactUsPage.xhtml +#Disallow: /dvn/dv/*/faces/ContactUsPage.xhtml +#Disallow: /dvn/faces/study/TermsOfUsePage.xhtml +#Disallow: /dvn/faces/subsetting/SubsettingPage.xhtml +#Disallow: /dvn/dv/*/faces/subsetting/SubsettingPage.xhtml +#Disallow: /dvn/FileDownload/ +#Disallow: /FileDownload/ +#Disallow: /dvn/dv/*/FileDownload/ +#Disallow: /dvn/resources/ +#Disallow: /dvn/api/ +# + + +# Created initially using: http://www.mcanerin.com/EN/search-engine/robots-txt.asp +# Verified using: http://tool.motoricerca.info/robots-checker.phtml diff -r dd9adfc73390 -r 1b2188262ae9 DVN-web/installer/dvninstall/web-core.jar Binary file DVN-web/installer/dvninstall/web-core.jar has changed