Mercurial > hg > LGDataverses
comparison src/main/java/edu/harvard/iq/dataverse/DatasetField.java @ 10:a50cf11e5178
Rewrite LGDataverse completely upgrading to dataverse4.0
| author | Zoe Hong <zhong@mpiwg-berlin.mpg.de> |
|---|---|
| date | Tue, 08 Sep 2015 17:00:21 +0200 |
| parents | |
| children |
comparison
equal
deleted
inserted
replaced
| 9:5926d6419569 | 10:a50cf11e5178 |
|---|---|
| 1 /* | |
| 2 * To change this license header, choose License Headers in Project Properties. | |
| 3 * To change this template file, choose Tools | Templates | |
| 4 * and open the template in the editor. | |
| 5 */ | |
| 6 package edu.harvard.iq.dataverse; | |
| 7 | |
| 8 /** | |
| 9 * | |
| 10 * @author skraffmiller | |
| 11 */ | |
| 12 import java.io.Serializable; | |
| 13 import java.util.ArrayList; | |
| 14 import java.util.Arrays; | |
| 15 import java.util.Collections; | |
| 16 import java.util.Comparator; | |
| 17 import java.util.HashMap; | |
| 18 import java.util.Iterator; | |
| 19 import java.util.LinkedList; | |
| 20 import java.util.List; | |
| 21 import java.util.Map; | |
| 22 import javax.persistence.CascadeType; | |
| 23 import javax.persistence.Entity; | |
| 24 import javax.persistence.GeneratedValue; | |
| 25 import javax.persistence.GenerationType; | |
| 26 import javax.persistence.Id; | |
| 27 import javax.persistence.Index; | |
| 28 import javax.persistence.JoinColumn; | |
| 29 import javax.persistence.JoinTable; | |
| 30 import javax.persistence.ManyToMany; | |
| 31 import javax.persistence.ManyToOne; | |
| 32 import javax.persistence.OneToMany; | |
| 33 import javax.persistence.OrderBy; | |
| 34 import javax.persistence.Table; | |
| 35 import javax.persistence.Transient; | |
| 36 import org.apache.commons.lang.StringUtils; | |
| 37 | |
| 38 @Entity | |
| 39 @ValidateDatasetFieldType | |
| 40 @Table(indexes = {@Index(columnList="datasetfieldtype_id"),@Index(columnList="datasetversion_id"), | |
| 41 @Index(columnList="parentdatasetfieldcompoundvalue_id"),@Index(columnList="template_id")}) | |
| 42 public class DatasetField implements Serializable { | |
| 43 private static final long serialVersionUID = 1L; | |
| 44 | |
| 45 public static final String NA_VALUE = "N/A"; | |
| 46 | |
| 47 /** | |
| 48 * Orders dataset fields by their display order. | |
| 49 */ | |
| 50 public static final Comparator<DatasetField> DisplayOrder = new Comparator<DatasetField>() { | |
| 51 @Override | |
| 52 public int compare(DatasetField o1, DatasetField o2) { | |
| 53 return Integer.compare( o1.getDatasetFieldType().getDisplayOrder(), | |
| 54 o2.getDatasetFieldType().getDisplayOrder() ); | |
| 55 }}; | |
| 56 | |
| 57 public static DatasetField createNewEmptyDatasetField(DatasetFieldType dsfType, Object dsv) { | |
| 58 | |
| 59 DatasetField dsfv = createNewEmptyDatasetField(dsfType); | |
| 60 //TODO - a better way to handle this? | |
| 61 if (dsv.getClass().getName().equals("edu.harvard.iq.dataverse.DatasetVersion")){ | |
| 62 dsfv.setDatasetVersion((DatasetVersion)dsv); | |
| 63 } else { | |
| 64 dsfv.setTemplate((Template)dsv); | |
| 65 } | |
| 66 | |
| 67 return dsfv; | |
| 68 } | |
| 69 | |
| 70 // originally this was an overloaded method, but we renamed it to get around an issue with Bean Validation | |
| 71 // (that looked t overloaded methods, when it meant to look at overriden methods | |
| 72 public static DatasetField createNewEmptyChildDatasetField(DatasetFieldType dsfType, DatasetFieldCompoundValue compoundValue) { | |
| 73 DatasetField dsfv = createNewEmptyDatasetField(dsfType); | |
| 74 dsfv.setParentDatasetFieldCompoundValue(compoundValue); | |
| 75 return dsfv; | |
| 76 } | |
| 77 | |
| 78 private static DatasetField createNewEmptyDatasetField(DatasetFieldType dsfType) { | |
| 79 DatasetField dsfv = new DatasetField(); | |
| 80 dsfv.setDatasetFieldType(dsfType); | |
| 81 | |
| 82 if (dsfType.isPrimitive()) { | |
| 83 if (!dsfType.isControlledVocabulary()) { | |
| 84 dsfv.getDatasetFieldValues().add(new DatasetFieldValue(dsfv)); | |
| 85 } | |
| 86 } else { // compound field | |
| 87 dsfv.getDatasetFieldCompoundValues().add(DatasetFieldCompoundValue.createNewEmptyDatasetFieldCompoundValue(dsfv)); | |
| 88 } | |
| 89 | |
| 90 return dsfv; | |
| 91 | |
| 92 } | |
| 93 | |
| 94 /** | |
| 95 * Groups a list of fields by the block they belong to. | |
| 96 * | |
| 97 * @param fields well, duh. | |
| 98 * @return a map, mapping each block to the fields that belong to it. | |
| 99 */ | |
| 100 public static Map<MetadataBlock, List<DatasetField>> groupByBlock(List<DatasetField> fields) { | |
| 101 Map<MetadataBlock, List<DatasetField>> retVal = new HashMap<>(); | |
| 102 for (DatasetField f : fields) { | |
| 103 MetadataBlock metadataBlock = f.getDatasetFieldType().getMetadataBlock(); | |
| 104 List<DatasetField> lst = retVal.get(metadataBlock); | |
| 105 if (lst == null) { | |
| 106 retVal.put(metadataBlock, new LinkedList<>(Collections.singleton(f))); | |
| 107 } else { | |
| 108 lst.add(f); | |
| 109 } | |
| 110 } | |
| 111 return retVal; | |
| 112 } | |
| 113 | |
| 114 @Id | |
| 115 @GeneratedValue(strategy = GenerationType.IDENTITY) | |
| 116 private Long id; | |
| 117 | |
| 118 public Long getId() { | |
| 119 return id; | |
| 120 } | |
| 121 | |
| 122 public void setId(Long id) { | |
| 123 this.id = id; | |
| 124 } | |
| 125 | |
| 126 @ManyToOne | |
| 127 @JoinColumn(nullable = false) | |
| 128 private DatasetFieldType datasetFieldType; | |
| 129 | |
| 130 public DatasetFieldType getDatasetFieldType() { | |
| 131 return datasetFieldType; | |
| 132 } | |
| 133 | |
| 134 public void setDatasetFieldType(DatasetFieldType datasetField) { | |
| 135 this.datasetFieldType = datasetField; | |
| 136 } | |
| 137 | |
| 138 @ManyToOne | |
| 139 private DatasetVersion datasetVersion; | |
| 140 | |
| 141 public DatasetVersion getDatasetVersion() { | |
| 142 return datasetVersion; | |
| 143 } | |
| 144 | |
| 145 public void setDatasetVersion(DatasetVersion datasetVersion) { | |
| 146 this.datasetVersion = datasetVersion; | |
| 147 } | |
| 148 | |
| 149 @ManyToOne | |
| 150 private Template template; | |
| 151 | |
| 152 public Template getTemplate() { | |
| 153 return template; | |
| 154 } | |
| 155 | |
| 156 public void setTemplate(Template template) { | |
| 157 this.template = template; | |
| 158 } | |
| 159 | |
| 160 @ManyToOne(cascade = CascadeType.MERGE) | |
| 161 private DatasetFieldCompoundValue parentDatasetFieldCompoundValue; | |
| 162 | |
| 163 public DatasetFieldCompoundValue getParentDatasetFieldCompoundValue() { | |
| 164 return parentDatasetFieldCompoundValue; | |
| 165 } | |
| 166 | |
| 167 public void setParentDatasetFieldCompoundValue(DatasetFieldCompoundValue parentDatasetFieldCompoundValue) { | |
| 168 this.parentDatasetFieldCompoundValue = parentDatasetFieldCompoundValue; | |
| 169 } | |
| 170 | |
| 171 @OneToMany(mappedBy = "parentDatasetField", orphanRemoval = true, cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) | |
| 172 @OrderBy("displayOrder ASC") | |
| 173 private List<DatasetFieldCompoundValue> datasetFieldCompoundValues = new ArrayList(); | |
| 174 | |
| 175 public List<DatasetFieldCompoundValue> getDatasetFieldCompoundValues() { | |
| 176 return datasetFieldCompoundValues; | |
| 177 } | |
| 178 | |
| 179 public void setDatasetFieldCompoundValues(List<DatasetFieldCompoundValue> datasetFieldCompoundValues) { | |
| 180 this.datasetFieldCompoundValues = datasetFieldCompoundValues; | |
| 181 } | |
| 182 | |
| 183 @OneToMany(mappedBy = "datasetField", orphanRemoval = true, cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) | |
| 184 @OrderBy("displayOrder ASC") | |
| 185 private List<DatasetFieldValue> datasetFieldValues = new ArrayList(); | |
| 186 | |
| 187 public List<DatasetFieldValue> getDatasetFieldValues() { | |
| 188 return this.datasetFieldValues; | |
| 189 } | |
| 190 | |
| 191 public void setDatasetFieldValues(List<DatasetFieldValue> datasetFieldValues) { | |
| 192 this.datasetFieldValues = datasetFieldValues; | |
| 193 } | |
| 194 | |
| 195 @ManyToMany(cascade = {CascadeType.MERGE}) | |
| 196 @JoinTable(indexes = {@Index(columnList="datasetfield_id"),@Index(columnList="controlledvocabularyvalues_id")}) | |
| 197 private List<ControlledVocabularyValue> controlledVocabularyValues = new ArrayList(); | |
| 198 | |
| 199 public List<ControlledVocabularyValue> getControlledVocabularyValues() { | |
| 200 return controlledVocabularyValues; | |
| 201 } | |
| 202 | |
| 203 public void setControlledVocabularyValues(List<ControlledVocabularyValue> controlledVocabularyValues) { | |
| 204 this.controlledVocabularyValues = controlledVocabularyValues; | |
| 205 } | |
| 206 | |
| 207 // HELPER METHODS | |
| 208 public DatasetFieldValue getSingleValue() { | |
| 209 if (!datasetFieldValues.isEmpty()) { | |
| 210 return datasetFieldValues.get(0); | |
| 211 } else { | |
| 212 return new DatasetFieldValue(this); | |
| 213 } | |
| 214 } | |
| 215 | |
| 216 public void setSingleValue(String value) { | |
| 217 if (datasetFieldValues.isEmpty()) { | |
| 218 datasetFieldValues.add(new DatasetFieldValue(this)); | |
| 219 } | |
| 220 datasetFieldValues.get(0).setValue(value); | |
| 221 } | |
| 222 | |
| 223 public ControlledVocabularyValue getSingleControlledVocabularyValue() { | |
| 224 if (!controlledVocabularyValues.isEmpty()) { | |
| 225 return controlledVocabularyValues.get(0); | |
| 226 } else { | |
| 227 return null; | |
| 228 } | |
| 229 } | |
| 230 | |
| 231 public void setSingleControlledVocabularyValue(ControlledVocabularyValue cvv) { | |
| 232 if (!controlledVocabularyValues.isEmpty()) { | |
| 233 controlledVocabularyValues.set(0, cvv); | |
| 234 } else { | |
| 235 controlledVocabularyValues.add(cvv); | |
| 236 } | |
| 237 } | |
| 238 | |
| 239 public String getValue() { | |
| 240 if (!datasetFieldValues.isEmpty()) { | |
| 241 return datasetFieldValues.get(0).getValue(); | |
| 242 } else if (controlledVocabularyValues != null && !controlledVocabularyValues.isEmpty()) { | |
| 243 if (controlledVocabularyValues.get(0) != null){ | |
| 244 return controlledVocabularyValues.get(0).getStrValue(); | |
| 245 } | |
| 246 } | |
| 247 return null; | |
| 248 } | |
| 249 | |
| 250 public String getDisplayValue() { | |
| 251 String returnString = ""; | |
| 252 for (String value : getValues()) { | |
| 253 if(value == null) value=""; | |
| 254 returnString += (returnString.equals("") ? "" : "; ") + value.trim(); | |
| 255 } | |
| 256 return returnString; | |
| 257 } | |
| 258 | |
| 259 public String getCompoundDisplayValue() { | |
| 260 String returnString = ""; | |
| 261 for (DatasetFieldCompoundValue dscv : datasetFieldCompoundValues) { | |
| 262 for (DatasetField dsf : dscv.getChildDatasetFields()) { | |
| 263 for (String value : dsf.getValues()) { | |
| 264 if (!(value == null)) { | |
| 265 returnString += (returnString.equals("") ? "" : "; ") + value.trim(); | |
| 266 } | |
| 267 } | |
| 268 } | |
| 269 } | |
| 270 return returnString; | |
| 271 } | |
| 272 | |
| 273 public List<String> getValues() { | |
| 274 List returnList = new ArrayList(); | |
| 275 if (!datasetFieldValues.isEmpty()) { | |
| 276 for (DatasetFieldValue dsfv : datasetFieldValues) { | |
| 277 returnList.add(dsfv.getValue()); | |
| 278 } | |
| 279 } else { | |
| 280 for (ControlledVocabularyValue cvv : controlledVocabularyValues) { | |
| 281 if (cvv != null && cvv.getStrValue() != null) { | |
| 282 returnList.add(cvv.getStrValue()); | |
| 283 } | |
| 284 } | |
| 285 } | |
| 286 return returnList; | |
| 287 } | |
| 288 | |
| 289 public List<String> getValuesWithoutNaValues() { | |
| 290 List<String> returnList = getValues(); | |
| 291 returnList.removeAll(Arrays.asList(NA_VALUE)); | |
| 292 return returnList; | |
| 293 } | |
| 294 | |
| 295 public boolean isEmpty() { | |
| 296 return isEmpty(false); | |
| 297 } | |
| 298 | |
| 299 public boolean isEmptyForDisplay() { | |
| 300 return isEmpty(true); | |
| 301 } | |
| 302 | |
| 303 | |
| 304 private boolean isEmpty(boolean forDisplay) { | |
| 305 if (datasetFieldType.isPrimitive()) { // primitive | |
| 306 for (String value : getValues()) { | |
| 307 if (!StringUtils.isBlank(value) && !(forDisplay && DatasetField.NA_VALUE.equals(value))) { | |
| 308 return false; | |
| 309 } | |
| 310 } | |
| 311 } else { // compound | |
| 312 for (DatasetFieldCompoundValue cv : datasetFieldCompoundValues) { | |
| 313 for (DatasetField subField : cv.getChildDatasetFields()) { | |
| 314 if (!subField.isEmpty(forDisplay)) { | |
| 315 return false; | |
| 316 } | |
| 317 } | |
| 318 } | |
| 319 } | |
| 320 | |
| 321 return true; | |
| 322 } | |
| 323 | |
| 324 @Transient | |
| 325 private String validationMessage; | |
| 326 | |
| 327 public String getValidationMessage() { | |
| 328 return validationMessage; | |
| 329 } | |
| 330 | |
| 331 public void setValidationMessage(String validationMessage) { | |
| 332 this.validationMessage = validationMessage; | |
| 333 } | |
| 334 | |
| 335 @Transient | |
| 336 private Boolean required; | |
| 337 | |
| 338 public boolean isRequired() { | |
| 339 if (required == null) { | |
| 340 required = false; | |
| 341 if (this.datasetFieldType.isPrimitive() && this.datasetFieldType.isRequired()) { | |
| 342 required = true; | |
| 343 } | |
| 344 | |
| 345 if (this.datasetFieldType.isHasRequiredChildren()) { | |
| 346 required = true; | |
| 347 } | |
| 348 | |
| 349 Dataverse dv = getDataverse(); | |
| 350 while (!dv.isMetadataBlockRoot()) { | |
| 351 if (dv.getOwner() == null) { | |
| 352 break; // we are at the root; which by defintion is metadata blcok root, regarldess of the value | |
| 353 } | |
| 354 dv = dv.getOwner(); | |
| 355 } | |
| 356 | |
| 357 List<DataverseFieldTypeInputLevel> dftilListFirst = dv.getDataverseFieldTypeInputLevels(); | |
| 358 if (!getDatasetFieldType().isHasChildren()) { | |
| 359 for (DataverseFieldTypeInputLevel dsftil : dftilListFirst) { | |
| 360 if (dsftil.getDatasetFieldType().equals(this.datasetFieldType)) { | |
| 361 required = dsftil.isRequired(); | |
| 362 } | |
| 363 } | |
| 364 } | |
| 365 | |
| 366 if (getDatasetFieldType().isHasChildren() && (!dftilListFirst.isEmpty())) { | |
| 367 for (DatasetFieldType child : getDatasetFieldType().getChildDatasetFieldTypes()) { | |
| 368 for (DataverseFieldTypeInputLevel dftilTest : dftilListFirst) { | |
| 369 if (child.equals(dftilTest.getDatasetFieldType())) { | |
| 370 if (dftilTest.isRequired()) { | |
| 371 required = true; | |
| 372 } | |
| 373 } | |
| 374 } | |
| 375 } | |
| 376 } | |
| 377 | |
| 378 } | |
| 379 // logger.fine("at return " + this.datasetFieldType.getDisplayName() + " " + required); | |
| 380 return required; | |
| 381 } | |
| 382 | |
| 383 public Dataverse getDataverse() { | |
| 384 if (datasetVersion != null) { | |
| 385 return datasetVersion.getDataset().getOwner(); | |
| 386 } else if (parentDatasetFieldCompoundValue != null) { | |
| 387 return parentDatasetFieldCompoundValue.getParentDatasetField().getDataverse(); | |
| 388 } else if (template != null) { | |
| 389 return template.getDataverse(); | |
| 390 } else { | |
| 391 throw new IllegalStateException("DatasetField is in an illegal state: no dataset version, compound value, or template is set as its parent."); | |
| 392 } | |
| 393 } | |
| 394 | |
| 395 | |
| 396 @Transient | |
| 397 private boolean include; | |
| 398 | |
| 399 public void setInclude(boolean include){ | |
| 400 this.include = include; | |
| 401 } | |
| 402 | |
| 403 public boolean isInclude(){ | |
| 404 return this.include; | |
| 405 } | |
| 406 | |
| 407 @Override | |
| 408 public int hashCode() { | |
| 409 int hash = 0; | |
| 410 hash += (id != null ? id.hashCode() : 0); | |
| 411 return hash; | |
| 412 } | |
| 413 | |
| 414 @Override | |
| 415 public boolean equals(Object object) { | |
| 416 // TODO: Warning - this method won't work in the case the id fields are not set | |
| 417 if (!(object instanceof DatasetField)) { | |
| 418 return false; | |
| 419 } | |
| 420 DatasetField other = (DatasetField) object; | |
| 421 if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) { | |
| 422 return false; | |
| 423 } | |
| 424 return true; | |
| 425 } | |
| 426 | |
| 427 @Override | |
| 428 public String toString() { | |
| 429 return "edu.harvard.iq.dataverse.DatasetField[ id=" + id + " ]"; | |
| 430 } | |
| 431 | |
| 432 public DatasetField copy(Object version) { | |
| 433 return copy(version, null); | |
| 434 } | |
| 435 | |
| 436 // originally this was an overloaded method, but we renamed it to get around an issue with Bean Validation | |
| 437 // (that looked t overloaded methods, when it meant to look at overriden methods | |
| 438 public DatasetField copyChild(DatasetFieldCompoundValue parent) { | |
| 439 return copy(null, parent); | |
| 440 } | |
| 441 | |
| 442 private DatasetField copy(Object version, DatasetFieldCompoundValue parent) { | |
| 443 DatasetField dsf = new DatasetField(); | |
| 444 dsf.setDatasetFieldType(datasetFieldType); | |
| 445 | |
| 446 if (version != null) { | |
| 447 if (version.getClass().getName().equals("edu.harvard.iq.dataverse.DatasetVersion")) { | |
| 448 dsf.setDatasetVersion((DatasetVersion) version); | |
| 449 } else { | |
| 450 dsf.setTemplate((Template) version); | |
| 451 } | |
| 452 } | |
| 453 | |
| 454 dsf.setParentDatasetFieldCompoundValue(parent); | |
| 455 dsf.setControlledVocabularyValues(controlledVocabularyValues); | |
| 456 | |
| 457 for (DatasetFieldValue dsfv : datasetFieldValues) { | |
| 458 dsf.getDatasetFieldValues().add(dsfv.copy(dsf)); | |
| 459 } | |
| 460 | |
| 461 for (DatasetFieldCompoundValue compoundValue : datasetFieldCompoundValues) { | |
| 462 dsf.getDatasetFieldCompoundValues().add(compoundValue.copy(dsf)); | |
| 463 } | |
| 464 | |
| 465 return dsf; | |
| 466 } | |
| 467 | |
| 468 public boolean removeBlankDatasetFieldValues() { | |
| 469 if (this.getDatasetFieldType().isPrimitive()) { | |
| 470 if (!this.getDatasetFieldType().isControlledVocabulary()) { | |
| 471 Iterator<DatasetFieldValue> dsfvIt = this.getDatasetFieldValues().iterator(); | |
| 472 while (dsfvIt.hasNext()) { | |
| 473 DatasetFieldValue dsfv = dsfvIt.next(); | |
| 474 if (StringUtils.isBlank(dsfv.getValue())) { | |
| 475 dsfvIt.remove(); | |
| 476 } | |
| 477 } | |
| 478 if (this.getDatasetFieldValues().isEmpty()) { | |
| 479 return true; | |
| 480 } | |
| 481 } else { // controlled vocab | |
| 482 if (this.getControlledVocabularyValues().isEmpty()) { | |
| 483 return true; | |
| 484 } | |
| 485 } | |
| 486 } else if (this.getDatasetFieldType().isCompound()) { | |
| 487 Iterator<DatasetFieldCompoundValue> cvIt = this.getDatasetFieldCompoundValues().iterator(); | |
| 488 while (cvIt.hasNext()) { | |
| 489 DatasetFieldCompoundValue cv = cvIt.next(); | |
| 490 Iterator<DatasetField> dsfIt = cv.getChildDatasetFields().iterator(); | |
| 491 while (dsfIt.hasNext()) { | |
| 492 if (dsfIt.next().removeBlankDatasetFieldValues()) { | |
| 493 dsfIt.remove(); | |
| 494 } | |
| 495 } | |
| 496 if (cv.getChildDatasetFields().isEmpty()) { | |
| 497 cvIt.remove(); | |
| 498 } | |
| 499 } | |
| 500 if (this.getDatasetFieldCompoundValues().isEmpty()) { | |
| 501 return true; | |
| 502 } | |
| 503 } | |
| 504 return false; | |
| 505 } | |
| 506 | |
| 507 public void setValueDisplayOrder() { | |
| 508 if (this.getDatasetFieldType().isPrimitive() && !this.getDatasetFieldType().isControlledVocabulary()) { | |
| 509 for (int i = 0; i < datasetFieldValues.size(); i++) { | |
| 510 datasetFieldValues.get(i).setDisplayOrder(i); | |
| 511 } | |
| 512 | |
| 513 } else if (this.getDatasetFieldType().isCompound()) { | |
| 514 for (int i = 0; i < datasetFieldCompoundValues.size(); i++) { | |
| 515 DatasetFieldCompoundValue compoundValue = datasetFieldCompoundValues.get(i); | |
| 516 compoundValue.setDisplayOrder(i); | |
| 517 for (DatasetField dsf : compoundValue.getChildDatasetFields()) { | |
| 518 dsf.setValueDisplayOrder(); | |
| 519 } | |
| 520 } | |
| 521 } | |
| 522 } | |
| 523 | |
| 524 public void addDatasetFieldValue(int index) { | |
| 525 datasetFieldValues.add(index, new DatasetFieldValue(this)); | |
| 526 } | |
| 527 | |
| 528 public void removeDatasetFieldValue(int index) { | |
| 529 datasetFieldValues.remove(index); | |
| 530 } | |
| 531 | |
| 532 public void addDatasetFieldCompoundValue(int index) { | |
| 533 datasetFieldCompoundValues.add(index, DatasetFieldCompoundValue.createNewEmptyDatasetFieldCompoundValue(this)); | |
| 534 } | |
| 535 | |
| 536 public void removeDatasetFieldCompoundValue(int index) { | |
| 537 datasetFieldCompoundValues.remove(index); | |
| 538 } | |
| 539 | |
| 540 | |
| 541 /** | |
| 542 * If this is a FieldType.TEXT or FieldType.TEXTBOX, then run it through the markup checker | |
| 543 * | |
| 544 * @return | |
| 545 */ | |
| 546 public boolean needsTextCleaning(){ | |
| 547 | |
| 548 | |
| 549 if (this.getDatasetFieldType() == null || this.getDatasetFieldType().getFieldType() == null){ | |
| 550 return false; | |
| 551 } | |
| 552 | |
| 553 if (this.datasetFieldType.getFieldType().equals(DatasetFieldType.FieldType.TEXT)){ | |
| 554 return true; | |
| 555 } else if (this.datasetFieldType.getFieldType().equals(DatasetFieldType.FieldType.TEXTBOX)){ | |
| 556 return true; | |
| 557 } | |
| 558 | |
| 559 return false; | |
| 560 | |
| 561 } // end: needsTextCleaning | |
| 562 | |
| 563 } |
