--- /dev/null
+listener.module.name=botgarden
\ No newline at end of file
--- /dev/null
+<project name="org.collectionspace.services.3rdparty.nuxeo.listener.botgarden" default="package" basedir=".">
+ <description>
+ CollectionSpace Nuxeo listener component type
+ </description>
+ <!-- Set global properties for this build -->
+ <property name="services.trunk" value="../../../.."/>
+ <!-- Environment should be declared before reading build.properties -->
+ <property environment="env" />
+ <!-- Set global properties for this build -->
+ <property file="${services.trunk}/build.properties" />
+ <!-- Set local properties for this build -->
+ <property file="build.properties" />
+ <property name="mvn.opts" value="-V" />
+ <property name="src" location="src"/>
+ <property name="build" location="build"/>
+ <property name="dist" location="dist"/>
+
+ <!-- The listener.module.name variable is set in a local properties file -->
+ <property name="jar.name"
+ value="org.collectionspace.services.listener.${listener.module.name}-${cspace.release}.jar"/>
+
+ <property name="jar.all"
+ value="org.collectionspace.services.listener.${listener.module.name}-*.jar"/>
+
+ <condition property="osfamily-unix">
+ <os family="unix" />
+ </condition>
+ <condition property="osfamily-windows">
+ <os family="windows" />
+ </condition>
+
+ <target name="init" >
+ <!-- Create the time stamp -->
+ <tstamp/>
+ <!-- Create the build directory structure used by compile -->
+ <mkdir dir="${build}"/>
+ </target>
+
+ <target name="package" depends="package-unix,package-windows"
+ description="Package CollectionSpace Services" />
+ <target name="package-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="package-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="install" depends="install-unix,install-windows"
+ description="Install" />
+ <target name="install-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="install-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="deploy" depends="install"
+ description="deploy listener in ${jee.server.nuxeo}">
+ <copy file="${basedir}/target/${jar.name}"
+ todir="${jee.deploy.nuxeo.plugins}"/>
+ </target>
+
+ <target name="undeploy"
+ description="undeploy listener from ${jee.server.nuxeo}">
+ <delete>
+ <fileset dir="${jee.deploy.nuxeo.plugins}">
+ <include name="${jar.all}"/>
+ </fileset>
+ </delete>
+ </target>
+
+</project>
--- /dev/null
+<?xml version="1.0"?>
+<project
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+ xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>org.collectionspace.services.3rdparty.nuxeo.listener</artifactId>
+ <groupId>org.collectionspace.services</groupId>
+ <version>4.5-SNAPSHOT</version>
+ </parent>
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ </properties>
+ <artifactId>org.collectionspace.services.listener.botgarden</artifactId>
+ <name>org.collectionspace.services.listener.botgarden</name>
+ <url>http://maven.apache.org</url>
+
+
+ <dependencies>
+ <dependency>
+ <groupId>org.nuxeo.ecm.core</groupId>
+ <artifactId>nuxeo-core-storage-sql</artifactId>
+ <version>${nuxeo.core.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.nuxeo.ecm.platform</groupId>
+ <artifactId>nuxeo-platform-filemanager-api</artifactId>
+ <version>${nuxeo.platform.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.collectionobject.service</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.movement.service</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.batch.service</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ </dependencies>
+
+
+ <build>
+ <resources>
+ <resource>
+ <directory>src/main/resources</directory>
+ <filtering>true</filtering>
+ </resource>
+ </resources>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <configuration>
+ <archive>
+ <manifestFile> src/main/resources/META-INF/MANIFEST.MF </manifestFile>
+ <manifestEntries>
+ <Bundle-Version>${eclipseVersion}</Bundle-Version>
+ <Bundle-ManifestVersion>2</Bundle-ManifestVersion>
+ </manifestEntries>
+ </archive>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
--- /dev/null
+package org.collectionspace.services.listener.botgarden;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.collectionspace.services.client.workflow.WorkflowClient;
+import org.collectionspace.services.movement.nuxeo.MovementConstants;
+import org.collectionspace.services.nuxeo.listener.AbstractCSEventListenerImpl;
+import org.nuxeo.ecm.core.api.DocumentModel;
+import org.nuxeo.ecm.core.api.DocumentRef;
+import org.nuxeo.ecm.core.api.VersioningOption;
+import org.nuxeo.ecm.core.event.Event;
+import org.nuxeo.ecm.core.event.EventContext;
+import org.nuxeo.ecm.core.event.impl.DocumentEventContext;
+
+public class CreateVersionListener extends AbstractCSEventListenerImpl {
+ public static final String SKIP_PROPERTY = "CreateVersionListener.SKIP";
+
+ final Log logger = LogFactory.getLog(CreateVersionListener.class);
+
+ public void handleEvent(Event event) {
+ EventContext ec = event.getContext();
+
+ if (ec instanceof DocumentEventContext) {
+ DocumentEventContext context = (DocumentEventContext) ec;
+
+ if (ec.hasProperty(SKIP_PROPERTY) && ((Boolean) ec.getProperty(SKIP_PROPERTY))) {
+ logger.debug("Skipping create version");
+ }
+ else {
+ DocumentModel doc = context.getSourceDocument();
+
+ logger.debug("docType=" + doc.getType());
+
+ if (doc.getType().startsWith(MovementConstants.NUXEO_DOCTYPE) &&
+ !doc.isVersion() &&
+ !doc.isProxy() &&
+ !doc.getCurrentLifeCycleState().equals(WorkflowClient.WORKFLOWSTATE_DELETED)) {
+ // Version the document
+ DocumentRef versionRef = doc.checkIn(VersioningOption.MINOR, null);
+ DocumentModel versionDoc = context.getCoreSession().getDocument(versionRef);
+
+ logger.debug("created version: id=" + versionDoc.getId() + " csid=" + versionDoc.getName());
+
+ // Check out the document, so it can be modified
+ doc.checkOut();
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.collectionspace.services.listener.botgarden;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.collectionspace.services.client.workflow.WorkflowClient;
+import org.collectionspace.services.movement.nuxeo.MovementBotGardenConstants;
+import org.collectionspace.services.movement.nuxeo.MovementConstants;
+import org.collectionspace.services.nuxeo.listener.AbstractCSEventListenerImpl;
+import org.nuxeo.ecm.core.api.CoreSession;
+import org.nuxeo.ecm.core.api.DocumentModel;
+import org.nuxeo.ecm.core.event.Event;
+import org.nuxeo.ecm.core.event.EventContext;
+import org.nuxeo.ecm.core.event.impl.DocumentEventContext;
+
+public class DeleteDeadLocationListener extends AbstractCSEventListenerImpl {
+ final Log logger = LogFactory.getLog(DeleteDeadLocationListener.class);
+
+ /*
+ * Delete dead locations.
+ */
+ public void handleEvent(Event event) {
+ EventContext ec = event.getContext();
+
+ if (ec instanceof DocumentEventContext) {
+ DocumentEventContext context = (DocumentEventContext) ec;
+ DocumentModel doc = context.getSourceDocument();
+
+ if (doc.getType().startsWith(MovementConstants.NUXEO_DOCTYPE) &&
+ !doc.isVersion() &&
+ !doc.isProxy() &&
+ !doc.getCurrentLifeCycleState().equals(WorkflowClient.WORKFLOWSTATE_DELETED)) {
+ String actionCode = (String) doc.getProperty(MovementBotGardenConstants.ACTION_CODE_SCHEMA_NAME,
+ MovementBotGardenConstants.ACTION_CODE_FIELD_NAME);
+
+ logger.debug("actionCode=" + actionCode);
+
+ if (actionCode != null && actionCode.equals(MovementBotGardenConstants.DEAD_ACTION_CODE)) {
+ CoreSession session = context.getCoreSession();
+
+ if (session.getAllowedStateTransitions(doc.getRef()).contains(WorkflowClient.WORKFLOWTRANSITION_DELETE)) {
+ session.followTransition(doc.getRef(), WorkflowClient.WORKFLOWTRANSITION_DELETE);
+ }
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.collectionspace.services.listener.botgarden;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.collectionspace.services.batch.nuxeo.UpdateAccessCodeBatchJob;
+import org.collectionspace.services.client.workflow.WorkflowClient;
+import org.collectionspace.services.collectionobject.nuxeo.CollectionObjectBotGardenConstants;
+import org.collectionspace.services.collectionobject.nuxeo.CollectionObjectConstants;
+import org.collectionspace.services.common.ResourceMap;
+import org.collectionspace.services.common.invocable.InvocationResults;
+import org.collectionspace.services.common.relation.nuxeo.RelationConstants;
+import org.collectionspace.services.nuxeo.listener.AbstractCSEventListenerImpl;
+import org.collectionspace.services.taxonomy.nuxeo.TaxonBotGardenConstants;
+import org.collectionspace.services.taxonomy.nuxeo.TaxonConstants;
+import org.jboss.resteasy.spi.ResteasyProviderFactory;
+import org.nuxeo.ecm.core.api.DocumentModel;
+import org.nuxeo.ecm.core.api.event.CoreEventConstants;
+import org.nuxeo.ecm.core.api.event.DocumentEventTypes;
+import org.nuxeo.ecm.core.event.Event;
+import org.nuxeo.ecm.core.event.EventContext;
+import org.nuxeo.ecm.core.event.impl.DocumentEventContext;
+
+/**
+ * A listener that updates the access code on taxon records when collectionobjects
+ * or taxon records are created or modified.
+ *
+ * @see org.collectionspace.services.batch.nuxeo.UpdateAccessCodeBatchJob
+ * @author ray
+ *
+ */
+public class UpdateAccessCodeListener extends AbstractCSEventListenerImpl {
+ final Log logger = LogFactory.getLog(UpdateAccessCodeListener.class);
+
+ public static final String PREVIOUS_DEAD_FLAG_PROPERTY_NAME = "UpdateAccessCodeListener.previousDeadFlag";
+ public static final String PREVIOUS_TAXON_NAMES_PROPERTY_NAME = "UpdateAccessCodeListener.previousTaxonNames";
+ public static final String PREVIOUS_ACCESS_CODE_PROPERTY_NAME = "UpdateAccessCodeListener.previousAccessCode";
+ public static final String DELETED_RELATION_PARENT_CSID_PROPERTY_NAME = "UpdateAccessCodeListener.deletedRelationParentCsid";
+
+ private static final String[] TAXON_PATH_ELEMENTS = CollectionObjectBotGardenConstants.TAXON_FIELD_NAME.split("/");
+ private static final String TAXONOMIC_IDENT_GROUP_LIST_FIELD_NAME = TAXON_PATH_ELEMENTS[0];
+ private static final String TAXON_FIELD_NAME = TAXON_PATH_ELEMENTS[2];
+
+ public void handleEvent(Event event) {
+ EventContext ec = event.getContext();
+
+ if (ec instanceof DocumentEventContext) {
+ DocumentEventContext context = (DocumentEventContext) ec;
+ DocumentModel doc = context.getSourceDocument();
+
+ logger.debug("docType=" + doc.getType());
+
+ if (doc.getType().startsWith(CollectionObjectConstants.NUXEO_DOCTYPE) &&
+ !doc.isVersion() &&
+ !doc.isProxy() &&
+ !doc.getCurrentLifeCycleState().equals(WorkflowClient.WORKFLOWSTATE_DELETED)) {
+
+ if (event.getName().equals(DocumentEventTypes.BEFORE_DOC_UPDATE)) {
+ // Stash the previous dead flag and taxonomic ident values, so they can be retrieved in the documentModified handler.
+
+ DocumentModel previousDoc = (DocumentModel) context.getProperty(CoreEventConstants.PREVIOUS_DOCUMENT_MODEL);
+
+ String previousDeadFlag = (String) previousDoc.getProperty(CollectionObjectBotGardenConstants.DEAD_FLAG_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.DEAD_FLAG_FIELD_NAME);
+ context.setProperty(PREVIOUS_DEAD_FLAG_PROPERTY_NAME, previousDeadFlag);
+
+ List<String> previousTaxonNames = getTaxonNames(previousDoc);
+ context.setProperty(PREVIOUS_TAXON_NAMES_PROPERTY_NAME, previousTaxonNames.toArray(new String[previousTaxonNames.size()]));
+ }
+ else {
+ boolean deadFlagChanged = false;
+ Set<String> deletedTaxonNames = null;
+ Set<String> addedTaxonNames = null;
+
+ if (event.getName().equals(DocumentEventTypes.DOCUMENT_UPDATED)) {
+ // As an optimization, check if the dead flag of the collectionobject has
+ // changed, or if the taxonomic identification has changed. If so, we need to
+ // update the access codes of referenced taxon records.
+
+ String previousDeadFlag = (String) context.getProperty(PREVIOUS_DEAD_FLAG_PROPERTY_NAME);
+ String currentDeadFlag = (String) doc.getProperty(CollectionObjectBotGardenConstants.DEAD_FLAG_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.DEAD_FLAG_FIELD_NAME);
+
+ if (previousDeadFlag == null) {
+ previousDeadFlag = "";
+ }
+
+ if (currentDeadFlag == null) {
+ currentDeadFlag = "";
+ }
+
+ if (previousDeadFlag.equals(currentDeadFlag)) {
+ logger.debug("dead flag not changed: previousDeadFlag=" + previousDeadFlag + " currentDeadFlag=" + currentDeadFlag);
+ }
+ else {
+ logger.debug("dead flag changed: previousDeadFlag=" + previousDeadFlag + " currentDeadFlag=" + currentDeadFlag);
+ deadFlagChanged = true;
+ }
+
+ List<String> previousTaxonNames = Arrays.asList((String[]) context.getProperty(PREVIOUS_TAXON_NAMES_PROPERTY_NAME));
+ List<String> currentTaxonNames = getTaxonNames(doc);
+
+ deletedTaxonNames = findDeletedTaxonNames(previousTaxonNames, currentTaxonNames);
+ logger.debug("found deleted taxon names: " + StringUtils.join(deletedTaxonNames, ", "));
+
+ addedTaxonNames = findAddedTaxonNames(previousTaxonNames, currentTaxonNames);
+ logger.debug("found added taxon names: " + StringUtils.join(addedTaxonNames, ", "));
+ }
+ else if (event.getName().equals(DocumentEventTypes.DOCUMENT_CREATED)) {
+ deadFlagChanged = true;
+ }
+
+ UpdateAccessCodeBatchJob updater = createUpdater();
+
+ if (deadFlagChanged) {
+ String collectionObjectCsid = doc.getName();
+
+ try {
+ // Pass false for the second parameter to updateReferencedAccessCodes, so that it doesn't
+ // propagate changes up the taxon hierarchy. Propagation is taken care of by this
+ // event handler: As taxon records are modified, this handler executes, and updates the
+ // parent.
+
+ InvocationResults results = updater.updateReferencedAccessCodes(collectionObjectCsid, false);
+
+ logger.debug("updateReferencedAccessCodes complete: numAffected=" + results.getNumAffected() + " userNote=" + results.getUserNote());
+ }
+ catch (Exception e) {
+ logger.error(e.getMessage(), e);
+ }
+ }
+ else {
+ // If the dead flag didn't change, we still need to recalculate the access codes of
+ // any taxonomic idents that were added.
+
+ if (addedTaxonNames != null) {
+ for (String addedTaxonName : addedTaxonNames) {
+ logger.debug("updating added taxon: " + addedTaxonName);
+
+ try {
+ InvocationResults results = updater.updateAccessCode(addedTaxonName, false);
+
+ logger.debug("updateAccessCode complete: numAffected=" + results.getNumAffected() + " userNote=" + results.getUserNote());
+ }
+ catch (Exception e) {
+ logger.error(e.getMessage(), e);
+ }
+ }
+ }
+ }
+
+ if (deletedTaxonNames != null) {
+ // If any taxonomic idents were removed from the collectionobject, they need to have their
+ // access codes recalculated.
+
+ for (String deletedTaxonName : deletedTaxonNames) {
+ logger.debug("updating deleted taxon: " + deletedTaxonName);
+
+ try {
+ InvocationResults results = updater.updateAccessCode(deletedTaxonName, false);
+
+ logger.debug("updateAccessCode complete: numAffected=" + results.getNumAffected() + " userNote=" + results.getUserNote());
+ }
+ catch (Exception e) {
+ logger.error(e.getMessage(), e);
+ }
+ }
+ }
+ }
+ }
+ else if (doc.getType().startsWith(TaxonConstants.NUXEO_DOCTYPE) &&
+ !doc.isVersion() &&
+ !doc.isProxy() &&
+ !doc.getCurrentLifeCycleState().equals(WorkflowClient.WORKFLOWSTATE_DELETED)) {
+
+ if (event.getName().equals(DocumentEventTypes.BEFORE_DOC_UPDATE)) {
+ // Stash the previous access code value, so it can be retrieved in the documentModified handler.
+
+ DocumentModel previousDoc = (DocumentModel) context.getProperty(CoreEventConstants.PREVIOUS_DOCUMENT_MODEL);
+ String previousAccessCode = (String) previousDoc.getProperty(TaxonBotGardenConstants.ACCESS_CODE_SCHEMA_NAME, TaxonBotGardenConstants.ACCESS_CODE_FIELD_NAME);
+
+ context.setProperty(PREVIOUS_ACCESS_CODE_PROPERTY_NAME, previousAccessCode);
+ }
+ else {
+ boolean updateRequired = false;
+
+ if (event.getName().equals(DocumentEventTypes.DOCUMENT_UPDATED)) {
+ // As an optimization, check if the access code of the taxon has
+ // changed. We only need to update the access code of the parent taxon
+ // record if it has.
+
+ String previousAccessCode = (String) context.getProperty(PREVIOUS_ACCESS_CODE_PROPERTY_NAME);
+ String currentAccessCode = (String) doc.getProperty(TaxonBotGardenConstants.ACCESS_CODE_SCHEMA_NAME, TaxonBotGardenConstants.ACCESS_CODE_FIELD_NAME);
+
+ if (previousAccessCode == null) {
+ previousAccessCode = "";
+ }
+
+ if (currentAccessCode == null) {
+ currentAccessCode = "";
+ }
+
+ if (previousAccessCode.equals(currentAccessCode)) {
+ logger.debug("update not required: previousAccessCode=" + previousAccessCode + " currentAccessCode=" + currentAccessCode);
+ }
+ else {
+ logger.debug("update required: previousAccessCode=" + previousAccessCode + " currentAccessCode=" + currentAccessCode);
+ updateRequired = true;
+ }
+ }
+ else if (event.getName().equals(DocumentEventTypes.DOCUMENT_CREATED)) {
+ updateRequired = true;
+ }
+
+ if (updateRequired) {
+ String taxonCsid = doc.getName();
+
+ try {
+ // Pass false for the second parameter to updateReferencedAccessCodes, so that it doesn't
+ // propagate changes up the taxon hierarchy. Propagation is taken care of by this
+ // event handler: As taxon records are modified, this handler executes, and updates the
+ // parent.
+
+ InvocationResults results = createUpdater().updateParentAccessCode(taxonCsid, false);
+
+ logger.debug("updateParentAccessCode complete: numAffected=" + results.getNumAffected() + " userNote=" + results.getUserNote());
+ }
+ catch (Exception e) {
+ logger.error(e.getMessage(), e);
+ }
+ }
+ }
+ }
+ else if (doc.getType().equals(RelationConstants.NUXEO_DOCTYPE) &&
+ !doc.isVersion() &&
+ !doc.isProxy()) {
+
+ if (event.getName().equals(DocumentEventTypes.DOCUMENT_CREATED)) {
+ String subjectDocType = (String) doc.getProperty(RelationConstants.SUBJECT_DOCTYPE_SCHEMA_NAME, RelationConstants.SUBJECT_DOCTYPE_FIELD_NAME);
+ String objectDocType = (String) doc.getProperty(RelationConstants.OBJECT_DOCTYPE_SCHEMA_NAME, RelationConstants.OBJECT_DOCTYPE_FIELD_NAME);;
+ String relationType = (String) doc.getProperty(RelationConstants.TYPE_SCHEMA_NAME, RelationConstants.TYPE_FIELD_NAME);
+
+ logger.debug("subjectDocType=" + subjectDocType + " objectDocType=" + objectDocType + " relationType=" + relationType);
+
+ if (subjectDocType.equals(TaxonConstants.NUXEO_DOCTYPE) && objectDocType.equals(TaxonConstants.NUXEO_DOCTYPE) && relationType.equals(RelationConstants.BROADER_TYPE)) {
+ String parentTaxonCsid = (String) doc.getProperty(RelationConstants.OBJECT_CSID_SCHEMA_NAME, RelationConstants.OBJECT_CSID_FIELD_NAME);
+ logger.debug("child added, updating parent taxon: parentTaxonCsid=" + parentTaxonCsid);
+
+ try {
+ InvocationResults results = createUpdater().updateAccessCode(parentTaxonCsid, false);
+
+ logger.debug("updateAccessCode complete: numAffected=" + results.getNumAffected() + " userNote=" + results.getUserNote());
+ }
+ catch (Exception e) {
+ logger.error(e.getMessage(), e);
+ }
+ }
+ }
+ else if (event.getName().equals(DocumentEventTypes.ABOUT_TO_REMOVE)) {
+ String subjectDocType = (String) doc.getProperty(RelationConstants.SUBJECT_DOCTYPE_SCHEMA_NAME, RelationConstants.SUBJECT_DOCTYPE_FIELD_NAME);
+ String objectDocType = (String) doc.getProperty(RelationConstants.OBJECT_DOCTYPE_SCHEMA_NAME, RelationConstants.OBJECT_DOCTYPE_FIELD_NAME);;
+ String relationType = (String) doc.getProperty(RelationConstants.TYPE_SCHEMA_NAME, RelationConstants.TYPE_FIELD_NAME);
+
+ logger.debug("subjectDocType=" + subjectDocType + " objectDocType=" + objectDocType + " relationType=" + relationType);
+
+ if (subjectDocType.equals(TaxonConstants.NUXEO_DOCTYPE) && objectDocType.equals(TaxonConstants.NUXEO_DOCTYPE) && relationType.equals(RelationConstants.BROADER_TYPE)) {
+ String parentTaxonCsid = (String) doc.getProperty(RelationConstants.OBJECT_CSID_SCHEMA_NAME, RelationConstants.OBJECT_CSID_FIELD_NAME);
+
+ // Stash the parent taxon csid, so it can be retrieved in the documentRemoved handler.
+ logger.debug("about to delete taxon hierarchy relation: parentTaxonCsid=" + parentTaxonCsid);
+ context.setProperty(DELETED_RELATION_PARENT_CSID_PROPERTY_NAME, parentTaxonCsid);
+ }
+ }
+ else if (event.getName().equals(DocumentEventTypes.DOCUMENT_REMOVED)) {
+ String parentTaxonCsid = (String) context.getProperty(DELETED_RELATION_PARENT_CSID_PROPERTY_NAME);
+
+ if (StringUtils.isNotEmpty(parentTaxonCsid)) {
+ logger.debug("child removed, updating parent taxon: parentTaxonCsid=" + parentTaxonCsid);
+
+ try {
+ InvocationResults results = createUpdater().updateAccessCode(parentTaxonCsid, false);
+
+ logger.debug("updateAccessCode complete: numAffected=" + results.getNumAffected() + " userNote=" + results.getUserNote());
+ }
+ catch (Exception e) {
+ logger.error(e.getMessage(), e);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private List<String> getTaxonNames(DocumentModel doc) {
+ List<Map<String, Object>> taxonomicIdentGroupList = (List<Map<String, Object>>) doc.getProperty(CollectionObjectBotGardenConstants.TAXON_SCHEMA_NAME,
+ TAXONOMIC_IDENT_GROUP_LIST_FIELD_NAME);
+ List<String> taxonNames = new ArrayList<String>();
+
+ for (Map<String, Object> taxonomicIdentGroup : taxonomicIdentGroupList) {
+ String taxonName = (String) taxonomicIdentGroup.get(TAXON_FIELD_NAME);
+
+ if (StringUtils.isNotEmpty(taxonName)) {
+ taxonNames.add(taxonName);
+ }
+ }
+
+ return taxonNames;
+ }
+
+ private Set<String> findDeletedTaxonNames(List<String> previousTaxonNames, List<String> currentTaxonNames) {
+ Set<String> currentTaxonNameSet = new HashSet<String>(currentTaxonNames);
+ Set<String> deletedTaxonNameSet = new HashSet<String>();
+
+ for (String previousTaxonName : previousTaxonNames) {
+ if (!currentTaxonNameSet.contains(previousTaxonName)) {
+ deletedTaxonNameSet.add(previousTaxonName);
+ }
+ }
+
+ return deletedTaxonNameSet;
+ }
+
+ private Set<String> findAddedTaxonNames(List<String> previousTaxonNames, List<String> currentTaxonNames) {
+ Set<String> previousTaxonNameSet = new HashSet<String>(previousTaxonNames);
+ Set<String> addedTaxonNameSet = new HashSet<String>();
+
+ for (String currentTaxonName : currentTaxonNames) {
+ if (!previousTaxonNameSet.contains(currentTaxonName)) {
+ addedTaxonNameSet.add(currentTaxonName);
+ }
+ }
+
+ return addedTaxonNameSet;
+ }
+
+ private UpdateAccessCodeBatchJob createUpdater() {
+ ResourceMap resourceMap = ResteasyProviderFactory.getContextData(ResourceMap.class);
+
+ UpdateAccessCodeBatchJob updater = new UpdateAccessCodeBatchJob();
+ updater.setResourceMap(resourceMap);
+
+ return updater;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.collectionspace.services.listener.botgarden;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.collectionspace.services.batch.nuxeo.UpdateDeadFlagBatchJob;
+import org.collectionspace.services.client.workflow.WorkflowClient;
+import org.collectionspace.services.collectionobject.nuxeo.CollectionObjectConstants;
+import org.collectionspace.services.common.ResourceMap;
+import org.collectionspace.services.common.invocable.InvocationResults;
+import org.collectionspace.services.common.relation.nuxeo.RelationConstants;
+import org.collectionspace.services.movement.nuxeo.MovementBotGardenConstants;
+import org.collectionspace.services.movement.nuxeo.MovementConstants;
+import org.collectionspace.services.nuxeo.listener.AbstractCSEventListenerImpl;
+import org.jboss.resteasy.spi.ResteasyProviderFactory;
+import org.nuxeo.ecm.core.api.DocumentModel;
+import org.nuxeo.ecm.core.api.event.DocumentEventTypes;
+import org.nuxeo.ecm.core.event.Event;
+import org.nuxeo.ecm.core.event.EventContext;
+import org.nuxeo.ecm.core.event.impl.DocumentEventContext;
+
+public class UpdateDeadFlagListener extends AbstractCSEventListenerImpl {
+ final Log logger = LogFactory.getLog(UpdateDeadFlagListener.class);
+
+ /*
+ * Set the dead flag and dead date on collectionobjects related to a new or modified movement record.
+ */
+ public void handleEvent(Event event) {
+ EventContext ec = event.getContext();
+
+ if (ec instanceof DocumentEventContext) {
+ DocumentEventContext context = (DocumentEventContext) ec;
+ DocumentModel doc = context.getSourceDocument();
+
+ logger.debug("docType=" + doc.getType());
+
+ if (event.getName().equals(DocumentEventTypes.DOCUMENT_CREATED)) {
+ /*
+ * Handle the case where a new movement is created with action code revive, and then related
+ * to a collectionobject. The movement won't have any relations at the time it's created,
+ * so we need to capture the creation of the relation.
+ */
+ if (doc.getType().equals(RelationConstants.NUXEO_DOCTYPE) &&
+ !doc.isVersion() &&
+ !doc.isProxy()) {
+ String subjectDocType = (String) doc.getProperty(RelationConstants.SUBJECT_DOCTYPE_SCHEMA_NAME, RelationConstants.SUBJECT_DOCTYPE_FIELD_NAME);
+ String objectDocType = (String) doc.getProperty(RelationConstants.OBJECT_DOCTYPE_SCHEMA_NAME, RelationConstants.OBJECT_DOCTYPE_FIELD_NAME);;
+
+ logger.debug("subjectDocType=" + subjectDocType + " objectDocType=" + objectDocType);
+
+ if (subjectDocType.equals(MovementConstants.NUXEO_DOCTYPE) && objectDocType.equals(CollectionObjectConstants.NUXEO_DOCTYPE)) {
+ String movementCsid = (String) doc.getProperty(RelationConstants.SUBJECT_CSID_SCHEMA_NAME, RelationConstants.SUBJECT_CSID_FIELD_NAME);
+ String collectionObjectCsid = (String) doc.getProperty(RelationConstants.OBJECT_CSID_SCHEMA_NAME, RelationConstants.OBJECT_CSID_FIELD_NAME);
+
+ try {
+ InvocationResults results = createUpdater().updateDeadFlag(collectionObjectCsid, movementCsid);
+
+ logger.debug("updateDeadFlag complete: numAffected=" + results.getNumAffected() + " userNote=" + results.getUserNote());
+ } catch (Exception e) {
+ logger.error(e.getMessage(), e);
+ }
+ }
+ }
+ }
+ else {
+ /*
+ * Handle document modification. If the modified document was a movement record, and
+ * its action code is dead or revived, update the dead flag. We don't actually have to
+ * check the action code here, since it will be checked inside UpdateDeadFlagBatchJob.updateRelatedDeadFlags,
+ * but it is an optimization.
+ */
+ if (doc.getType().startsWith(MovementConstants.NUXEO_DOCTYPE) &&
+ !doc.isVersion() &&
+ !doc.isProxy() &&
+ !doc.getCurrentLifeCycleState().equals(WorkflowClient.WORKFLOWSTATE_DELETED)) {
+ String actionCode = (String) doc.getProperty(MovementBotGardenConstants.ACTION_CODE_SCHEMA_NAME, MovementBotGardenConstants.ACTION_CODE_FIELD_NAME);
+
+ logger.debug("actionCode=" + actionCode);
+
+ if (actionCode != null && (actionCode.equals(MovementBotGardenConstants.DEAD_ACTION_CODE) || actionCode.equals(MovementBotGardenConstants.REVIVED_ACTION_CODE))) {
+ String movementCsid = doc.getName();
+
+ try {
+ InvocationResults results = createUpdater().updateRelatedDeadFlags(movementCsid);
+
+ logger.debug("updateRelatedDeadFlags complete: numAffected=" + results.getNumAffected() + " userNote=" + results.getUserNote());
+ } catch (Exception e) {
+ logger.error(e.getMessage(), e);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private UpdateDeadFlagBatchJob createUpdater() {
+ ResourceMap resourceMap = ResteasyProviderFactory.getContextData(ResourceMap.class);
+
+ UpdateDeadFlagBatchJob updater = new UpdateDeadFlagBatchJob();
+ updater.setResourceMap(resourceMap);
+
+ return updater;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.collectionspace.services.listener.botgarden;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.collectionspace.services.client.workflow.WorkflowClient;
+import org.collectionspace.services.movement.nuxeo.MovementBotGardenConstants;
+import org.collectionspace.services.movement.nuxeo.MovementConstants;
+import org.collectionspace.services.nuxeo.listener.AbstractCSEventListenerImpl;
+import org.nuxeo.ecm.core.api.DocumentModel;
+import org.nuxeo.ecm.core.api.event.CoreEventConstants;
+import org.nuxeo.ecm.core.api.event.DocumentEventTypes;
+import org.nuxeo.ecm.core.event.Event;
+import org.nuxeo.ecm.core.event.EventContext;
+import org.nuxeo.ecm.core.event.impl.DocumentEventContext;
+
+public class UpdateLocationListener extends AbstractCSEventListenerImpl {
+ final Log logger = LogFactory.getLog(UpdateLocationListener.class);
+
+ /*
+ * Set the currentLocation and previousLocation fields in a Current Location record
+ * to appropriate values.
+ *
+ * <ul>
+ * <li>If the plant is dead, set currentLocation to none</li>
+ * <li>Set the previousLocation field to the previous value of the currentLocation field</li>
+ * </ui>
+ */
+ public void handleEvent(Event event) {
+ EventContext ec = event.getContext();
+
+ if (ec instanceof DocumentEventContext) {
+ DocumentEventContext context = (DocumentEventContext) ec;
+ DocumentModel doc = context.getSourceDocument();
+
+ if (doc.getType().startsWith(MovementConstants.NUXEO_DOCTYPE) &&
+ !doc.isVersion() &&
+ !doc.isProxy() &&
+ !doc.getCurrentLifeCycleState().equals(WorkflowClient.WORKFLOWSTATE_DELETED)) {
+ String actionCode = (String) doc.getProperty(MovementBotGardenConstants.ACTION_CODE_SCHEMA_NAME,
+ MovementBotGardenConstants.ACTION_CODE_FIELD_NAME);
+
+ logger.debug("actionCode=" + actionCode);
+
+ if (event.getName().equals(DocumentEventTypes.DOCUMENT_CREATED)) {
+ /*
+ * Special case for a document that is created with an action code of dead.
+ * In this case, we'll set the currentLocation to none, and the previousLocation to
+ * the current value of currentLocation, since there isn't a previous value. To do
+ * this, we can simply save the document, which will cause the beforeDocumentModification
+ * event to fire, taking us into the other branch of this code, with the current document
+ * becoming the previous document.
+ */
+ if (actionCode != null && actionCode.equals(MovementBotGardenConstants.DEAD_ACTION_CODE)) {
+ context.getCoreSession().saveDocument(doc);
+
+ /*
+ * The saveDocument call will have caused the document to be versioned via documentModified,
+ * so we can skip the versioning that would normally happen on documentCreated.
+ */
+ ec.setProperty(CreateVersionListener.SKIP_PROPERTY, true);
+ }
+ }
+ else {
+ if (actionCode != null && actionCode.equals(MovementBotGardenConstants.DEAD_ACTION_CODE)) {
+ doc.setProperty(MovementConstants.CURRENT_LOCATION_SCHEMA_NAME, MovementConstants.CURRENT_LOCATION_FIELD_NAME, MovementConstants.NONE_LOCATION);
+ }
+
+ DocumentModel previousDoc = (DocumentModel) context.getProperty(CoreEventConstants.PREVIOUS_DOCUMENT_MODEL);
+ String previousLocation = (String) previousDoc.getProperty(MovementConstants.CURRENT_LOCATION_SCHEMA_NAME, MovementConstants.CURRENT_LOCATION_FIELD_NAME);
+
+ logger.debug("previousLocation=" + previousLocation);
+
+ doc.setProperty(MovementConstants.PREVIOUS_LOCATION_SCHEMA_NAME, MovementConstants.PREVIOUS_LOCATION_FIELD_NAME, previousLocation);
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.collectionspace.services.listener.botgarden;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.collectionspace.services.batch.nuxeo.UpdateRareFlagBatchJob;
+import org.collectionspace.services.client.PoxPayloadIn;
+import org.collectionspace.services.client.PoxPayloadOut;
+import org.collectionspace.services.client.workflow.WorkflowClient;
+import org.collectionspace.services.collectionobject.nuxeo.CollectionObjectBotGardenConstants;
+import org.collectionspace.services.collectionobject.nuxeo.CollectionObjectConstants;
+import org.collectionspace.services.common.ResourceMap;
+import org.collectionspace.services.common.invocable.InvocationResults;
+import org.collectionspace.services.nuxeo.listener.AbstractCSEventListenerImpl;
+import org.collectionspace.services.taxonomy.nuxeo.TaxonBotGardenConstants;
+import org.collectionspace.services.taxonomy.nuxeo.TaxonConstants;
+import org.jboss.resteasy.spi.ResteasyProviderFactory;
+import org.nuxeo.ecm.core.api.DocumentModel;
+import org.nuxeo.ecm.core.api.event.CoreEventConstants;
+import org.nuxeo.ecm.core.api.event.DocumentEventTypes;
+import org.nuxeo.ecm.core.event.Event;
+import org.nuxeo.ecm.core.event.EventContext;
+import org.nuxeo.ecm.core.event.impl.DocumentEventContext;
+
+/**
+ * A listener that updates the rare flag on collectionobjects when collectionobjects
+ * are created or modified, and when taxon records are modified.
+ *
+ * @see org.collectionspace.services.batch.nuxeo.UpdateRareFlagBatchJob
+ * @author ray
+ *
+ */
+public class UpdateRareFlagListener extends AbstractCSEventListenerImpl {
+ final Log logger = LogFactory.getLog(UpdateRareFlagListener.class);
+
+ public static final String PREVIOUS_TAXON_PROPERTY_NAME = "UpdateRareFlagListener.previousTaxon";
+ public static final String PREVIOUS_HAS_RARE_CONSERVATION_CATEGORY_PROPERTY_NAME = "UpdateRareFlagListener.previousHasRareConservationCategory";
+
+ private static final String[] CONSERVATION_CATEGORY_PATH_ELEMENTS = TaxonBotGardenConstants.CONSERVATION_CATEGORY_FIELD_NAME.split("/");
+ private static final String PLANT_ATTRIBUTES_GROUP_LIST_FIELD_NAME = CONSERVATION_CATEGORY_PATH_ELEMENTS[0];
+ private static final String CONSERVATION_CATEGORY_FIELD_NAME = CONSERVATION_CATEGORY_PATH_ELEMENTS[2];
+
+ public void handleEvent(Event event) {
+ EventContext ec = event.getContext();
+
+ if (ec instanceof DocumentEventContext) {
+ DocumentEventContext context = (DocumentEventContext) ec;
+ DocumentModel doc = context.getSourceDocument();
+
+ logger.debug("docType=" + doc.getType());
+
+ if (doc.getType().startsWith(CollectionObjectConstants.NUXEO_DOCTYPE) &&
+ !doc.isVersion() &&
+ !doc.isProxy() &&
+ !doc.getCurrentLifeCycleState().equals(WorkflowClient.WORKFLOWSTATE_DELETED)) {
+
+ if (event.getName().equals(DocumentEventTypes.BEFORE_DOC_UPDATE)) {
+ // Stash the previous primary taxonomic ident, so it can be retrieved in the documentModified handler.
+
+ DocumentModel previousDoc = (DocumentModel) context.getProperty(CoreEventConstants.PREVIOUS_DOCUMENT_MODEL);
+ String previousTaxon = (String) previousDoc.getProperty(CollectionObjectBotGardenConstants.TAXON_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.PRIMARY_TAXON_FIELD_NAME);
+
+ context.setProperty(PREVIOUS_TAXON_PROPERTY_NAME, previousTaxon);
+ }
+ else {
+ boolean updateRequired = false;
+
+ if (event.getName().equals(DocumentEventTypes.DOCUMENT_UPDATED)) {
+ // A collectionobject was modified. As an optimization, check if the primary taxonomic determination
+ // of the collectionobject has changed. We only need to update the rare flag if it has.
+
+ String previousTaxon = (String) context.getProperty(PREVIOUS_TAXON_PROPERTY_NAME);
+ String currentTaxon = (String) doc.getProperty(CollectionObjectBotGardenConstants.TAXON_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.PRIMARY_TAXON_FIELD_NAME);
+
+ if (previousTaxon == null) {
+ previousTaxon = "";
+ }
+
+ if (currentTaxon == null) {
+ currentTaxon = "";
+ }
+
+ if (previousTaxon.equals(currentTaxon)) {
+ logger.debug("update not required: previousTaxon=" + previousTaxon + " currentTaxon=" + currentTaxon);
+ }
+ else {
+ logger.debug("update required: previousTaxon=" + previousTaxon + " currentTaxon=" + currentTaxon);
+ updateRequired = true;
+ }
+ }
+ else if (event.getName().equals(DocumentEventTypes.DOCUMENT_CREATED)) {
+ // A collectionobject was created. Always update the rare flag.
+
+ updateRequired = true;
+ }
+
+ if (updateRequired) {
+ String collectionObjectCsid = doc.getName();
+
+ try {
+ InvocationResults results = createUpdater().updateRareFlag(collectionObjectCsid);
+
+ logger.debug("updateRareFlag complete: numAffected=" + results.getNumAffected() + " userNote=" + results.getUserNote());
+ } catch (Exception e) {
+ logger.error(e.getMessage(), e);
+ }
+ }
+ }
+ }
+ else if (doc.getType().startsWith(TaxonConstants.NUXEO_DOCTYPE) &&
+ !doc.isVersion() &&
+ !doc.isProxy() &&
+ !doc.getCurrentLifeCycleState().equals(WorkflowClient.WORKFLOWSTATE_DELETED)) {
+
+ if (event.getName().equals(DocumentEventTypes.BEFORE_DOC_UPDATE)) {
+ // Stash whether there was previously a non-empty conservation category, so it can be retrieved in the documentModified handler.
+
+ DocumentModel previousDoc = (DocumentModel) context.getProperty(CoreEventConstants.PREVIOUS_DOCUMENT_MODEL);
+ boolean previousHasRareConservationCategory = hasRareConservationCategory(previousDoc);
+
+ context.setProperty(PREVIOUS_HAS_RARE_CONSERVATION_CATEGORY_PROPERTY_NAME, new Boolean(previousHasRareConservationCategory));
+ }
+ else {
+ boolean updateRequired = false;
+
+ if (event.getName().equals(DocumentEventTypes.DOCUMENT_UPDATED)) {
+ // A taxon record was modified. As an optimization, check if there is now a rare
+ // conservation category when there wasn't before, or vice versa. We only need to update
+ // the rare flags of referencing collectionobjects if there was a change.
+
+ boolean previousHasRareConservationCategory = (Boolean) context.getProperty(PREVIOUS_HAS_RARE_CONSERVATION_CATEGORY_PROPERTY_NAME);
+ boolean currentHasRareConservationCategory = hasRareConservationCategory(doc);
+
+ if (previousHasRareConservationCategory == currentHasRareConservationCategory) {
+ logger.debug("update not required: previousHasRareConservationCategory=" + previousHasRareConservationCategory +
+ " currentHasRareConservationCategory=" + currentHasRareConservationCategory);
+ }
+ else {
+ logger.debug("update required: previousHasRareConservationCategory=" + previousHasRareConservationCategory +
+ " currentHasRareConservationCategory=" + currentHasRareConservationCategory);
+ updateRequired = true;
+ }
+ }
+
+ if (updateRequired) {
+ String taxonCsid = doc.getName();
+
+ try {
+ InvocationResults results = createUpdater().updateReferencingRareFlags(taxonCsid);
+
+ logger.debug("updateReferencingRareFlags complete: numAffected=" + results.getNumAffected() + " userNote=" + results.getUserNote());
+ } catch (Exception e) {
+ logger.error(e.getMessage(), e);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private boolean hasRareConservationCategory(DocumentModel doc) {
+ List<Map<String, Object>> plantAttributesGroupList = (List<Map<String, Object>>) doc.getProperty(TaxonBotGardenConstants.CONSERVATION_CATEGORY_SCHEMA_NAME,
+ PLANT_ATTRIBUTES_GROUP_LIST_FIELD_NAME);
+ boolean hasRareConservationCategory = false;
+
+ // UCBG-369: Changing this so that it only checks the primary conservation category.
+
+ if (plantAttributesGroupList.size() > 0) {
+ Map<String, Object> plantAttributesGroup = plantAttributesGroupList.get(0);
+ String conservationCategory = (String) plantAttributesGroup.get(CONSERVATION_CATEGORY_FIELD_NAME);
+
+ if (UpdateRareFlagBatchJob.isRare(conservationCategory)) {
+ hasRareConservationCategory = true;
+ }
+ }
+
+// for (Map<String, Object> plantAttributesGroup : plantAttributesGroupList) {
+// String conservationCategory = (String) plantAttributesGroup.get(CONSERVATION_CATEGORY_FIELD_NAME);
+//
+// if (UpdateRareFlagBatchJob.isRare(conservationCategory)) {
+// hasRareConservationCategory = true;
+// break;
+// }
+// }
+
+ return hasRareConservationCategory;
+ }
+
+ private UpdateRareFlagBatchJob createUpdater() {
+ ResourceMap<PoxPayloadIn, PoxPayloadOut> resourceMap = ResteasyProviderFactory.getContextData(ResourceMap.class);
+
+ UpdateRareFlagBatchJob updater = new UpdateRareFlagBatchJob();
+ updater.setResourceMap(resourceMap);
+
+ return updater;
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.collectionspace.services.listener.botgarden;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.jboss.resteasy.spi.ResteasyProviderFactory;
+
+import org.collectionspace.services.batch.nuxeo.FormatVoucherNameBatchJob;
+import org.collectionspace.services.client.PoxPayloadIn;
+import org.collectionspace.services.client.PoxPayloadOut;
+import org.collectionspace.services.client.workflow.WorkflowClient;
+import org.collectionspace.services.common.ResourceMap;
+import org.collectionspace.services.loanout.nuxeo.LoanoutBotGardenConstants;
+import org.collectionspace.services.loanout.nuxeo.LoanoutConstants;
+import org.collectionspace.services.nuxeo.listener.AbstractCSEventListenerImpl;
+import org.nuxeo.ecm.core.api.DocumentModel;
+import org.nuxeo.ecm.core.api.event.CoreEventConstants;
+import org.nuxeo.ecm.core.api.event.DocumentEventTypes;
+import org.nuxeo.ecm.core.event.Event;
+import org.nuxeo.ecm.core.event.EventContext;
+import org.nuxeo.ecm.core.event.impl.DocumentEventContext;
+
+public class UpdateStyledNameListener extends AbstractCSEventListenerImpl {
+ public static final String RUN_AFTER_MODIFIED_PROPERTY = "UpdateStyledNameListener.RUN_AFTER_MODIFIED";
+
+ final Log logger = LogFactory.getLog(UpdateStyledNameListener.class);
+
+ public void handleEvent(Event event) {
+ EventContext ec = event.getContext();
+
+ if (ec instanceof DocumentEventContext) {
+ DocumentEventContext context = (DocumentEventContext) ec;
+ DocumentModel doc = context.getSourceDocument();
+
+ logger.debug("docType=" + doc.getType());
+
+ if (doc.getType().startsWith(LoanoutConstants.NUXEO_DOCTYPE) &&
+ !doc.isVersion() &&
+ !doc.isProxy() &&
+ !doc.getCurrentLifeCycleState().equals(WorkflowClient.WORKFLOWSTATE_DELETED)) {
+
+ if (event.getName().equals(DocumentEventTypes.BEFORE_DOC_UPDATE)) {
+ DocumentModel previousDoc = (DocumentModel) context.getProperty(CoreEventConstants.PREVIOUS_DOCUMENT_MODEL);
+
+ String previousLabelRequested = (String) previousDoc.getProperty(LoanoutBotGardenConstants.LABEL_REQUESTED_SCHEMA_NAME,
+ LoanoutBotGardenConstants.LABEL_REQUESTED_FIELD_NAME);
+ String labelRequested = (String) doc.getProperty(LoanoutBotGardenConstants.LABEL_REQUESTED_SCHEMA_NAME,
+ LoanoutBotGardenConstants.LABEL_REQUESTED_FIELD_NAME);
+
+ logger.debug("previousLabelRequested=" + previousLabelRequested + " labelRequested=" + labelRequested);
+
+ if ((previousLabelRequested == null || previousLabelRequested.equals(LoanoutBotGardenConstants.LABEL_REQUESTED_NO_VALUE)) &&
+ labelRequested.equals(LoanoutBotGardenConstants.LABEL_REQUESTED_YES_VALUE)) {
+ // The label request is changing from no to yes, so we should update the styled name.
+ ec.setProperty(RUN_AFTER_MODIFIED_PROPERTY, true);
+ }
+ }
+ else {
+ boolean doUpdate = false;
+
+ if (event.getName().equals(DocumentEventTypes.DOCUMENT_CREATED)) {
+ String labelRequested = (String) doc.getProperty(LoanoutBotGardenConstants.LABEL_REQUESTED_SCHEMA_NAME,
+ LoanoutBotGardenConstants.LABEL_REQUESTED_FIELD_NAME);
+
+ doUpdate = (labelRequested != null && labelRequested.equals(LoanoutBotGardenConstants.LABEL_REQUESTED_YES_VALUE));
+ } else {
+ doUpdate = ec.hasProperty(RUN_AFTER_MODIFIED_PROPERTY) && ((Boolean) ec.getProperty(RUN_AFTER_MODIFIED_PROPERTY));
+ }
+
+ if (doUpdate) {
+ logger.debug("Updating styled name");
+
+ String voucherCsid = doc.getName();
+
+ try {
+ createFormatter().formatVoucherName(voucherCsid);
+ } catch (Exception e) {
+ logger.error(e.getMessage(), e);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private FormatVoucherNameBatchJob createFormatter() {
+ ResourceMap<PoxPayloadIn, PoxPayloadOut> resourceMap = ResteasyProviderFactory.getContextData(ResourceMap.class);
+
+ FormatVoucherNameBatchJob formatter = new FormatVoucherNameBatchJob();
+ formatter.setResourceMap(resourceMap);
+
+ return formatter;
+ }
+}
\ No newline at end of file
--- /dev/null
+Manifest-Version: 1.0
+Bundle-ManifestVersion: 1
+Bundle-Name: org.collectionspace.services.listener.botgarden
+Bundle-SymbolicName: org.collectionspace.services.listener.botgarden;singleton:=true
+Bundle-Version: 1.0.0
+Bundle-Localization: plugin
+Bundle-Vendor: Nuxeo
+Require-Bundle: org.nuxeo.runtime,
+ org.nuxeo.ecm.core.api,
+ org.nuxeo.ecm.core,
+ org.nuxeo.ecm.webapp.core
+Provide-Package: org.collectionspace.services.listener.botgarden
+Nuxeo-Component: OSGI-INF/core-types-contrib.xml,
+ OSGI-INF/default-life-cycle-contrib.xml,
+ OSGI-INF/ecm-types-contrib.xml,
+ OSGI-INF/layouts-contrib.xml
--- /dev/null
+<?xml version="1.0"?>
+<component name="org.collectionspace.services.listener.botgarden.core.types">
+ <!-- This page intentionally left blank -->
+</component>
--- /dev/null
+<?xml version="1.0"?>
+<component name="org.collectionspace.services.listener.botgarden.LifeCycle">
+ <!-- This page intentionally left blank -->
+</component>
--- /dev/null
+<?xml version="1.0"?>
+<fragment>
+
+ <extension target="application#MODULE">
+ <module>
+ <java>${bundle.fileName}</java>
+ </module>
+ </extension>
+
+</fragment>
--- /dev/null
+<?xml version="1.0"?>
+<component name="org.collectionspace.services.listener.botgarden.ecm.types">
+ <extension target="org.nuxeo.ecm.core.event.EventServiceComponent" point="listener">
+ <listener name="updatelocationlistener" async="false" postCommit="false"
+ class="org.collectionspace.services.listener.botgarden.UpdateLocationListener" priority="100">
+ <event>documentCreated</event>
+ <event>beforeDocumentModification</event>
+ </listener>
+ </extension>
+
+ <extension target="org.nuxeo.ecm.core.event.EventServiceComponent" point="listener">
+ <listener name="updatedeadflaglistener" async="false" postCommit="false"
+ class="org.collectionspace.services.listener.botgarden.UpdateDeadFlagListener" priority="200">
+ <event>documentCreated</event>
+ <event>documentModified</event>
+ </listener>
+ </extension>
+
+ <extension target="org.nuxeo.ecm.core.event.EventServiceComponent" point="listener">
+ <listener name="updateformatteddisplaynamelistener" async="false" postCommit="false"
+ class="org.collectionspace.services.listener.naturalhistory.UpdateFormattedDisplayNameListener" priority="200">
+ <event>beforeDocumentModification</event>
+ <event>documentCreated</event>
+ </listener>
+ </extension>
+
+ <extension target="org.nuxeo.ecm.core.event.EventServiceComponent" point="listener">
+ <listener name="updaterareflaglistener" async="false" postCommit="false"
+ class="org.collectionspace.services.listener.botgarden.UpdateRareFlagListener" priority="200">
+ <event>beforeDocumentModification</event>
+ <event>documentModified</event>
+ <event>documentCreated</event>
+ </listener>
+ </extension>
+
+ <!-- UpdateAccessCodeListener must run after UpdateDeadFlagListener, since it depends on the dead flag being set. -->
+
+ <extension target="org.nuxeo.ecm.core.event.EventServiceComponent" point="listener">
+ <listener name="updateaccesscodelistener" async="false" postCommit="false"
+ class="org.collectionspace.services.listener.botgarden.UpdateAccessCodeListener" priority="300">
+ <event>beforeDocumentModification</event>
+ <event>documentModified</event>
+ <event>documentCreated</event>
+ <event>aboutToRemove</event>
+ <event>documentRemoved</event>
+ </listener>
+ </extension>
+
+ <!--
+ We should be able to use the versioning support that's now built into cspace, but need to make
+ sure it doesn't create versions on import, and we possibly need to refactor UpdateLocationListener
+ so that two versions don't get created in the case of a document that is created with an action code of dead.
+ -->
+ <extension target="org.nuxeo.ecm.core.event.EventServiceComponent" point="listener">
+ <listener name="createversionlistener" async="false" postCommit="false"
+ class="org.collectionspace.services.listener.botgarden.CreateVersionListener" priority="999">
+ <event>documentCreated</event>
+ <event>documentModified</event>
+ </listener>
+ </extension>
+
+ <!--
+ Deleting dead locations in a Nuxeo event handler causes problems in the app and ui layers,
+ since they're not expecting a save of a record to cause the record to be deleted. Instead,
+ deletion will be invoked from the ui layer.
+ -->
+ <!-- <extension target="org.nuxeo.ecm.core.event.EventServiceComponent" point="listener">
+ <listener name="deletedeadlocationlistener" async="false" postCommit="false"
+ class="org.collectionspace.services.listener.botgarden.DeleteDeadLocationListener" priority="150">
+ <event>documentModified</event>
+ </listener>
+ </extension> -->
+ <!-- <extension target="org.nuxeo.ecm.core.event.EventServiceComponent" point="listener">
+ <listener name="updatestylednamelistener" async="false" postCommit="true"
+ class="org.collectionspace.services.listener.botgarden.UpdateStyledNameListener" priority="200">
+ <event>documentCreated</event>
+ <event>beforeDocumentModification</event>
+ <event>documentModified</event>
+ </listener>
+ </extension> -->
+</component>
+
+
--- /dev/null
+<?xml version="1.0"?>
+<component name="org.collectionspace.services.listener.botgarden.layouts.webapp">
+ <!-- This page intentionally left blank -->
+</component>
<ant antfile="updateobjectlocationonmove/build.xml" target="deploy" inheritall="false"/>
<ant antfile="updaterelationsondelete/build.xml" target="deploy" inheritall="false"/>
<ant antfile="updateimagederivatives/build.xml" target="deploy" inheritall="false"/>
+ <ant antfile="naturalhistory/build.xml" target="deploy" inheritall="false"/>
+ <ant antfile="botgarden/build.xml" target="deploy" inheritall="false"/>
</target>
<target name="undeploy"
<ant antfile="updateobjectlocationonmove/build.xml" target="undeploy" inheritall="false"/>
<ant antfile="updaterelationsondelete/build.xml" target="undeploy" inheritall="false"/>
<ant antfile="updateimagederivatives/build.xml" target="undeploy" inheritall="false"/>
+ <ant antfile="naturalhistory/build.xml" target="undeploy" inheritall="false"/>
+ <ant antfile="botgarden/build.xml" target="undeploy" inheritall="false"/>
</target>
<target name="dist"
<ant antfile="updateobjectlocationonmove/build.xml" target="dist" inheritall="false"/>
<ant antfile="updaterelationsondelete/build.xml" target="dist" inheritall="false"/>
<ant antfile="updateimagederivatives/build.xml" target="dist" inheritall="false"/>
+ <ant antfile="naturalhistory/build.xml" target="dist" inheritall="false"/>
+ <ant antfile="botgarden/build.xml" target="dist" inheritall="false"/>
</target>
</project>
--- /dev/null
+listener.module.name=naturalhistory
\ No newline at end of file
--- /dev/null
+<project name="org.collectionspace.services.3rdparty.nuxeo.listener.naturalhistory" default="package" basedir=".">
+ <description>
+ CollectionSpace Nuxeo listener component type
+ </description>
+ <!-- Set global properties for this build -->
+ <property name="services.trunk" value="../../../.."/>
+ <!-- Environment should be declared before reading build.properties -->
+ <property environment="env" />
+ <!-- Set global properties for this build -->
+ <property file="${services.trunk}/build.properties" />
+ <!-- Set local properties for this build -->
+ <property file="build.properties" />
+ <property name="mvn.opts" value="-V" />
+ <property name="src" location="src"/>
+ <property name="build" location="build"/>
+ <property name="dist" location="dist"/>
+
+ <!-- The listener.module.name variable is set in a local properties file -->
+ <property name="jar.name"
+ value="org.collectionspace.services.listener.${listener.module.name}-${cspace.release}.jar"/>
+
+ <property name="jar.all"
+ value="org.collectionspace.services.listener.${listener.module.name}-*.jar"/>
+
+ <condition property="osfamily-unix">
+ <os family="unix" />
+ </condition>
+ <condition property="osfamily-windows">
+ <os family="windows" />
+ </condition>
+
+ <target name="init" >
+ <!-- Create the time stamp -->
+ <tstamp/>
+ <!-- Create the build directory structure used by compile -->
+ <mkdir dir="${build}"/>
+ </target>
+
+ <target name="package" depends="package-unix,package-windows"
+ description="Package CollectionSpace Services" />
+ <target name="package-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="package-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="install" depends="install-unix,install-windows"
+ description="Install" />
+ <target name="install-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="install-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="deploy" depends="install"
+ description="deploy listener in ${jee.server.nuxeo}">
+ <copy file="${basedir}/target/${jar.name}"
+ todir="${jee.deploy.nuxeo.plugins}"/>
+ </target>
+
+ <target name="undeploy"
+ description="undeploy listener from ${jee.server.nuxeo}">
+ <delete>
+ <fileset dir="${jee.deploy.nuxeo.plugins}">
+ <include name="${jar.all}"/>
+ </fileset>
+ </delete>
+ </target>
+
+</project>
--- /dev/null
+<?xml version="1.0"?>
+<project
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+ xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <artifactId>org.collectionspace.services.3rdparty.nuxeo.listener</artifactId>
+ <groupId>org.collectionspace.services</groupId>
+ <version>4.5-SNAPSHOT</version>
+ </parent>
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ </properties>
+ <artifactId>org.collectionspace.services.listener.naturalhistory</artifactId>
+ <name>org.collectionspace.services.listener.naturalhistory</name>
+ <url>http://maven.apache.org</url>
+
+
+ <dependencies>
+ <dependency>
+ <groupId>org.nuxeo.ecm.core</groupId>
+ <artifactId>nuxeo-core-storage-sql</artifactId>
+ <version>${nuxeo.core.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.nuxeo.ecm.platform</groupId>
+ <artifactId>nuxeo-platform-filemanager-api</artifactId>
+ <version>${nuxeo.platform.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.taxonomy.service</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ </dependencies>
+
+
+ <build>
+ <resources>
+ <resource>
+ <directory>src/main/resources</directory>
+ <filtering>true</filtering>
+ </resource>
+ </resources>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <configuration>
+ <archive>
+ <manifestFile> src/main/resources/META-INF/MANIFEST.MF </manifestFile>
+ <manifestEntries>
+ <Bundle-Version>${eclipseVersion}</Bundle-Version>
+ <Bundle-ManifestVersion>2</Bundle-ManifestVersion>
+ </manifestEntries>
+ </archive>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
--- /dev/null
+package org.collectionspace.services.listener.ucb;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.collectionspace.services.client.workflow.WorkflowClient;
+import org.collectionspace.services.common.api.RefName;
+import org.collectionspace.services.common.api.TaxonFormatter;
+import org.collectionspace.services.nuxeo.listener.AbstractCSEventListenerImpl;
+import org.collectionspace.services.taxonomy.nuxeo.TaxonBotGardenConstants;
+import org.collectionspace.services.taxonomy.nuxeo.TaxonConstants;
+import org.nuxeo.ecm.core.api.DocumentModel;
+import org.nuxeo.ecm.core.api.event.CoreEventConstants;
+import org.nuxeo.ecm.core.api.event.DocumentEventTypes;
+import org.nuxeo.ecm.core.event.Event;
+import org.nuxeo.ecm.core.event.EventContext;
+import org.nuxeo.ecm.core.event.impl.DocumentEventContext;
+
+public class UpdateFormattedDisplayNameListener extends AbstractCSEventListenerImpl {
+ public static final String RUN_AFTER_MODIFIED_PROPERTY = "UpdateFormattedDisplayNameListener.RUN_AFTER_MODIFIED";
+
+ private static final String[] DISPLAY_NAME_PATH_ELEMENTS = TaxonConstants.DISPLAY_NAME_FIELD_NAME.split("/");
+ private static final String TERM_GROUP_LIST_FIELD_NAME = DISPLAY_NAME_PATH_ELEMENTS[0];
+ private static final String DISPLAY_NAME_FIELD_NAME = DISPLAY_NAME_PATH_ELEMENTS[2];
+
+ private static final String[] FORMATTED_DISPLAY_NAME_PATH_ELEMENTS = TaxonConstants.FORMATTED_DISPLAY_NAME_FIELD_NAME.split("/");
+ private static final String FORMATTED_DISPLAY_NAME_FIELD_NAME = FORMATTED_DISPLAY_NAME_PATH_ELEMENTS[2];
+
+ final Log logger = LogFactory.getLog(UpdateFormattedDisplayNameListener.class);
+
+ @Override
+ public void handleEvent(Event event) {
+ EventContext ec = event.getContext();
+
+ if (ec instanceof DocumentEventContext) {
+ DocumentEventContext context = (DocumentEventContext) ec;
+ DocumentModel doc = context.getSourceDocument();
+
+ logger.debug("docType=" + doc.getType());
+
+ if (doc.getType().startsWith(TaxonConstants.NUXEO_DOCTYPE) &&
+ !doc.isVersion() &&
+ !doc.isProxy() &&
+ !doc.getCurrentLifeCycleState().equals(WorkflowClient.WORKFLOWSTATE_DELETED)) {
+
+ String refName = (String) doc.getProperty(TaxonConstants.REFNAME_SCHEMA_NAME, TaxonConstants.REFNAME_FIELD_NAME);
+ RefName.AuthorityItem item = RefName.AuthorityItem.parse(refName);
+ String parentShortId = item.getParentShortIdentifier();
+
+ logger.debug("parentShortId=" + parentShortId);
+
+ if (!parentShortId.equals(TaxonBotGardenConstants.COMMON_VOCABULARY_SHORTID)) {
+ if (event.getName().equals(DocumentEventTypes.DOCUMENT_CREATED)) {
+ // Save the document, to get the BEFORE_DOC_UPDATE branch to run.
+ doc.getCoreSession().saveDocument(doc);
+ }
+ else if (event.getName().equals(DocumentEventTypes.BEFORE_DOC_UPDATE)) {
+ DocumentModel previousDoc = (DocumentModel) context.getProperty(CoreEventConstants.PREVIOUS_DOCUMENT_MODEL);
+
+ updateFormattedDisplayNames(doc, previousDoc);
+ }
+ }
+ }
+ }
+ }
+
+ private void updateFormattedDisplayNames(DocumentModel doc, DocumentModel previousDoc) {
+ //Set<String> previousDisplayNames = getDisplayNames(previousDoc);
+ TaxonFormatter formatter = new TaxonFormatter();
+ List<Map<String, Object>> termGroupList = (List<Map<String, Object>>) doc.getProperty(TaxonConstants.DISPLAY_NAME_SCHEMA_NAME, TERM_GROUP_LIST_FIELD_NAME);
+
+ for (Map<String, Object> termGroup : termGroupList) {
+ String displayName = (String) termGroup.get(DISPLAY_NAME_FIELD_NAME);
+ String formattedDisplayName = (String) termGroup.get(FORMATTED_DISPLAY_NAME_FIELD_NAME);
+
+ if (StringUtils.isBlank(formattedDisplayName)) {
+ formattedDisplayName = "";
+
+ if (StringUtils.isNotBlank(displayName)) {
+ formattedDisplayName = formatter.format(displayName);
+ }
+
+ termGroup.put(FORMATTED_DISPLAY_NAME_FIELD_NAME, formattedDisplayName);
+ }
+ }
+
+ Map<String, Object> updateMap = new HashMap<String, Object>();
+ updateMap.put(TERM_GROUP_LIST_FIELD_NAME, termGroupList);
+
+ doc.setProperties(TaxonConstants.DISPLAY_NAME_SCHEMA_NAME, updateMap);
+ }
+
+ /*
+ private Set<String> getDisplayNames(DocumentModel doc) throws ClientException {
+ Set<String> displayNames = new HashSet<String>();
+ List<Map<String, Object>> termGroupList = (List<Map<String, Object>>) doc.getProperty(TaxonConstants.DISPLAY_NAME_SCHEMA_NAME, TERM_GROUP_LIST_FIELD_NAME);
+
+ for (Map<String, Object> termGroup : termGroupList) {
+ String displayName = (String) termGroup.get(DISPLAY_NAME_FIELD_NAME);
+
+ if (displayName != null) {
+ displayNames.add(displayName);
+ }
+ }
+
+ return displayNames;
+ }
+ */
+}
\ No newline at end of file
--- /dev/null
+Manifest-Version: 1.0
+Bundle-ManifestVersion: 1
+Bundle-Name: org.collectionspace.services.listener.naturalhistory
+Bundle-SymbolicName: org.collectionspace.services.listener.naturalhistory;singleton:=true
+Bundle-Version: 1.0.0
+Bundle-Localization: plugin
+Bundle-Vendor: Nuxeo
+Require-Bundle: org.nuxeo.runtime,
+ org.nuxeo.ecm.core.api,
+ org.nuxeo.ecm.core,
+ org.nuxeo.ecm.webapp.core
+Provide-Package: org.collectionspace.services.listener.naturalhistory
+Nuxeo-Component: OSGI-INF/core-types-contrib.xml,
+ OSGI-INF/default-life-cycle-contrib.xml,
+ OSGI-INF/ecm-types-contrib.xml,
+ OSGI-INF/layouts-contrib.xml
--- /dev/null
+<?xml version="1.0"?>
+<component name="org.collectionspace.services.listener.naturalhistory.core.types">
+ <!-- This page intentionally left blank -->
+</component>
--- /dev/null
+<?xml version="1.0"?>
+<component name="org.collectionspace.services.listener.naturalhistory.LifeCycle">
+ <!-- This page intentionally left blank -->
+</component>
--- /dev/null
+<?xml version="1.0"?>
+<fragment>
+
+ <extension target="application#MODULE">
+ <module>
+ <java>${bundle.fileName}</java>
+ </module>
+ </extension>
+
+</fragment>
--- /dev/null
+<?xml version="1.0"?>
+<component name="org.collectionspace.services.listener.naturalhistory.ecm.types">
+</component>
+
+
--- /dev/null
+<?xml version="1.0"?>
+<component name="org.collectionspace.services.listener.naturalhistory.layouts.webapp">
+ <!-- This page intentionally left blank -->
+</component>
<module>updateobjectlocationonmove</module>
<module>updaterelationsondelete</module>
<module>updateimagederivatives</module>
+ <module>naturalhistory</module>
+ <module>botgarden</module>
</modules>
<dependencies>
import org.collectionspace.services.common.api.CommonAPI;
import org.collectionspace.services.nuxeo.client.java.CoreSessionInterface;
import org.collectionspace.services.nuxeo.client.java.CoreSessionWrapper;
+import org.collectionspace.services.nuxeo.listener.AbstractCSEventListenerImpl;
import org.collectionspace.services.nuxeo.util.NuxeoUtils;
import org.nuxeo.ecm.core.api.Blob;
-import org.nuxeo.ecm.core.api.ClientException;
import org.nuxeo.ecm.core.api.DocumentModel;
import org.nuxeo.ecm.core.api.blobholder.BlobHolder;
import org.nuxeo.ecm.core.api.blobholder.DocumentBlobHolder;
//import org.nuxeo.ecm.core.api.event.DocumentEventTypes;
import org.nuxeo.ecm.core.event.Event;
import org.nuxeo.ecm.core.event.EventContext;
-import org.nuxeo.ecm.core.event.EventListener;
import org.nuxeo.ecm.core.event.impl.DocumentEventContext;
//import org.nuxeo.ecm.platform.picture.api.ImagingDocumentConstants;
import org.nuxeo.ecm.platform.picture.api.ImagingDocumentConstants;
-public class UpdateImageDerivatives implements EventListener {
+public class UpdateImageDerivatives extends AbstractCSEventListenerImpl {
// All Nuxeo sessions that get passed around to CollectionSpace code need to
// be wrapped inside of a CoreSessionWrapper. For example:
private final static Log logger = LogFactory.getLog(UpdateImageDerivatives.class);
@Override
- public void handleEvent(Event event) throws ClientException {
+ public void handleEvent(Event event) {
if (logger.isTraceEnabled()) {
logger.trace(String.format("Entering handleEvent in '%s'...", getClass().getName()));
}
private void purgeOriginalImage(DocumentModel docModel, CoreSessionInterface nuxeoSession) {
//
// Empty the document model's "content" property -this does not delete the actual file/blob it
- // just disassociates the blob content (aka, the orginal image) from the document.
+ // just disassociates the blob content (aka, the original image) from the document.
//
docModel.setPropertyValue("file:content", (Serializable) null);
import org.collectionspace.services.client.workflow.WorkflowClient;
import org.collectionspace.services.nuxeo.client.java.CoreSessionInterface;
import org.collectionspace.services.nuxeo.client.java.CoreSessionWrapper;
-import org.nuxeo.ecm.core.api.ClientException;
+import org.collectionspace.services.nuxeo.listener.AbstractCSEventListenerImpl;
import org.nuxeo.ecm.core.api.DocumentModel;
import org.nuxeo.ecm.core.api.DocumentModelList;
import org.nuxeo.ecm.core.api.impl.LifeCycleFilter;
import org.nuxeo.ecm.core.event.EventListener;
import org.nuxeo.ecm.core.event.impl.DocumentEventContext;
-public class UpdateRelationsOnDelete implements EventListener {
+public class UpdateRelationsOnDelete extends AbstractCSEventListenerImpl {
// FIXME: We might experiment here with using log4j instead of Apache Commons Logging;
// am using the latter to follow Ray's pattern for now
final static String RELATIONS_COMMON_OBJECT_CSID_FIELD = "relations_common:objectCsid";
@Override
- public void handleEvent(Event event) throws ClientException {
+ public void handleEvent(Event event) {
logger.trace("In handleEvent in UpdateRelationsOnDelete ...");
EventContext eventContext = event.getContext();
DocumentModelList matchingDocuments;
try {
matchingDocuments = session.query(queryString.toString(), workflowStateFilter);
- } catch (ClientException ce) {
+ } catch (Exception ce) {
logger.warn("Error attempting to retrieve relation records where "
+ "record of type '" + docModel.getType() + "' with CSID " + csid
+ " is the subject or object of any relation: " + ce.getMessage());
--- /dev/null
+<?xml version="1.0"?>
+
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema">
+
+ <xs:complexType name="content">
+ <xs:sequence>
+ <xs:element name="encoding" type="xs:string" />
+ <xs:element name="mime-type" type="xs:string" />
+ <xs:element name="data" type="xs:base64Binary" />
+ <xs:element name="name" type="xs:string" />
+ <xs:element name="length" type="xs:long" />
+ <xs:element name="digest" type="xs:string" />
+ </xs:sequence>
+ </xs:complexType>
+
+ <xs:complexType name="externalcontent">
+ <xs:sequence>
+ <xs:element name="encoding" type="xs:string" />
+ <xs:element name="mime-type" type="xs:string" />
+ <xs:element name="uri" type="xs:string" />
+ <xs:element name="name" type="xs:string" />
+ <xs:element name="length" type="xs:long" />
+ <xs:element name="digest" type="xs:string" />
+ </xs:sequence>
+ </xs:complexType>
+
+</xs:schema>
+
+
<artifactId>org.collectionspace.services.loanout.service</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.claim.service</artifactId>
+ <version>${project.version}</version>
+ </dependency>
<dependency>
<groupId>org.collectionspace.services</groupId>
<artifactId>org.collectionspace.services.exhibition.service</artifactId>
<artifactId>org.collectionspace.services.movement.service</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.propagation.service</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.pottag.service</artifactId>
+ <version>${project.version}</version>
+ </dependency>
<dependency>
<groupId>org.collectionspace.services</groupId>
<artifactId>org.collectionspace.services.report.service</artifactId>
//
System.out.println(String.format("%tc [INFO] Starting up the CollectionSpace Services' JAX-RS application.", new Date()));
super.contextInitialized(event);
- CollectionSpaceJaxRsApplication app =
- (CollectionSpaceJaxRsApplication)deployment.getApplication();
+ CollectionSpaceJaxRsApplication app = (CollectionSpaceJaxRsApplication)deployment.getApplication();
Dispatcher disp = deployment.getDispatcher();
disp.getDefaultContextObjects().put(ResourceMap.class, app.getResourceMap());
System.out.println(String.format("%tc [INFO] CollectionSpace Services' JAX-RS application started.", new Date()));
import org.collectionspace.services.concept.ConceptAuthorityResource;
import org.collectionspace.services.taxonomy.TaxonomyAuthorityResource;
import org.collectionspace.services.movement.MovementResource;
+import org.collectionspace.services.propagation.PropagationResource;
+import org.collectionspace.services.pottag.PottagResource;
import org.collectionspace.services.report.ReportResource;
import org.collectionspace.services.acquisition.AcquisitionResource;
import org.collectionspace.services.dimension.DimensionResource;
import org.collectionspace.services.organization.OrgAuthorityResource;
import org.collectionspace.services.person.PersonAuthorityResource;
import org.collectionspace.services.citation.CitationAuthorityResource;
+import org.collectionspace.services.claim.ClaimResource;
import org.collectionspace.services.exhibition.ExhibitionResource;
import org.collectionspace.services.conditioncheck.ConditioncheckResource;
import org.collectionspace.services.conservation.ConservationResource;
addResourceToMapAndSingletons(new MediaResource());
addResourceToMapAndSingletons(new BlobResource());
addResourceToMapAndSingletons(new MovementResource());
+ addResourceToMapAndSingletons(new PropagationResource());
+ addResourceToMapAndSingletons(new PottagResource());
+ addResourceToMapAndSingletons(new ClaimResource());
addResourceToMapAndSingletons(new ReportResource());
addResourceToMapAndSingletons(new PublicItemResource());
}
@Override
- public ResourceMap getResourceMap() {
+ public ResourceMap<PoxPayloadIn, PoxPayloadOut> getResourceMap() {
return resourceMap;
}
<artifactId>org.collectionspace.services.authentication.jaxb</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.authorization.jaxb</artifactId>
+ <version>${project.version}</version>
+ </dependency>
<dependency>
<groupId>org.collectionspace.services</groupId>
<artifactId>org.collectionspace.services.client</artifactId>
http://java.sun.com/xml/ns/persistence/orm http://java.sun.com/xml/ns/persistence/orm_1_0.xsd" xmlns="http://java.sun.com/xml/ns/persistence" xmlns:orm="http://java.sun.com/xml/ns/persistence/orm" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<persistence-unit name="org.collectionspace.services.authentication">
<class>org.collectionspace.services.authentication.User</class>
- <class>org.collectionspace.services.authentication.Role</class>
- <class>org.collectionspace.services.authentication.UserRole</class>
+ <class>org.collectionspace.services.authorization.Role</class>
<properties>
<property name="hibernate.ejb.cfgfile" value="hibernate.cfg.xml"/>
* @param tenantId
* @return
*/
- protected String getItemDocType(String tenantId) {
+ public String getItemDocType(String tenantId) {
return getDocType(tenantId, getItemServiceName());
}
<persistence version="1.0" xsi:schemaLocation="http://java.sun.com/xml/ns/persistence http://java.sun.com/xml/ns/persistence/persistence_1_0.xsd
http://java.sun.com/xml/ns/persistence/orm http://java.sun.com/xml/ns/persistence/orm_1_0.xsd" xmlns="http://java.sun.com/xml/ns/persistence" xmlns:orm="http://java.sun.com/xml/ns/persistence/orm" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<persistence-unit name="org.collectionspace.services.authorization">
- <class>org.collectionspace.services.authorization.Permission</class>
- <class>org.collectionspace.services.authorization.PermissionAction</class>
+ <class>org.collectionspace.services.authorization.perms.Permission</class>
+ <class>org.collectionspace.services.authorization.perms.PermissionAction</class>
<class>org.collectionspace.services.authorization.PermissionRoleRel</class>
<class>org.collectionspace.services.authorization.Role</class>
<class>org.collectionspace.services.authorization.AccountRoleRel</class>
</exec>
</target>
- <target name="deploy" depends="install"
- description="deploy batch service">
+ <target name="deploy" depends="install" description="deploy batch service">
+ <!-- Now obsolete since the Nuxeo artifact is now created and deployed from config in the Application Layer -->
+ <!--
<ant antfile="3rdparty/build.xml" target="deploy" inheritall="false"/>
+ -->
</target>
<target name="undeploy"
<modules>
<module>jaxb</module>
<module>service</module>
- <module>3rdparty</module>
+ <!-- <module>3rdparty</module> --> <!-- Now obsolete since the Nuxeo artifact is now created and deployed from config in the Application Layer -->
<module>client</module>
</modules>
<artifactId>org.collectionspace.services.batch.jaxb</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.authority.service</artifactId>
+ <version>${project.version}</version>
+ </dependency>
<dependency>
<groupId>org.collectionspace.services</groupId>
<artifactId>org.collectionspace.services.collectionobject.client</artifactId>
<artifactId>org.collectionspace.services.collectionobject.jaxb</artifactId> <!-- FIXME: REM - Is this really a required dependency? -->
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.collectionobject.service</artifactId>
+ <version>${project.version}</version>
+ </dependency>
<dependency>
<groupId>org.collectionspace.services</groupId>
<artifactId>org.collectionspace.services.loanout.client</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.loanout.service</artifactId>
+ <version>${project.version}</version>
+ </dependency>
<dependency>
<groupId>org.collectionspace.services</groupId>
<artifactId>org.collectionspace.services.movement.client</artifactId>
<artifactId>org.collectionspace.services.movement.service</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.place.client</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.place.service</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.pottag.client</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.pottag.service</artifactId>
+ <version>${project.version}</version>
+ </dependency>
<dependency>
<groupId>org.collectionspace.services</groupId>
<artifactId>org.collectionspace.services.relation.client</artifactId>
<version>${project.version}</version>
- </dependency>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.relation.service</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.taxonomy.client</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.taxonomy.service</artifactId>
+ <version>${project.version}</version>
+ </dependency>
<!-- External dependencies -->
<dependency>
<groupId>junit</groupId>
import java.util.Collections;
import java.util.List;
import javax.ws.rs.core.Response;
+
+import org.collectionspace.services.client.PoxPayloadIn;
+import org.collectionspace.services.client.PoxPayloadOut;
import org.collectionspace.services.common.ResourceMap;
import org.collectionspace.services.common.context.ServiceContext;
import org.collectionspace.services.common.invocable.InvocationContext;
import org.collectionspace.services.common.invocable.InvocationResults;
+import org.collectionspace.services.nuxeo.client.java.CoreSessionInterface;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
protected final String CSID_VALUES_NOT_PROVIDED_IN_INVOCATION_CONTEXT =
"Could not find required CSID values in the invocation context for this batch job.";
private List<String> invocationModes;
- private ResourceMap resourceMap;
+ private ResourceMap<PoxPayloadIn, PoxPayloadOut> resourceMap;
private InvocationContext invocationCtx;
- private ServiceContext ctx;
+ private ServiceContext<PoxPayloadIn, PoxPayloadOut> ctx;
+ private String tenantId;
+
private int completionStatus;
private InvocationResults results;
this.invocationModes = invocationModes;
}
- public ResourceMap getResourceMap() {
+ public ResourceMap<PoxPayloadIn, PoxPayloadOut> getResourceMap() {
return resourceMap;
}
@Override
- public void setResourceMap(ResourceMap resourceMap) {
+ public void setResourceMap(ResourceMap<PoxPayloadIn, PoxPayloadOut> resourceMap) {
this.resourceMap = resourceMap;
}
@Override
- public void setServiceContext(ServiceContext context) {
+ public void setServiceContext(ServiceContext<PoxPayloadIn, PoxPayloadOut> context) {
this.ctx = context;
}
@Override
- public ServiceContext getServiceContext() {
+ public ServiceContext<PoxPayloadIn, PoxPayloadOut> getServiceContext() {
return ctx;
}
+
+ @Override
+ public CoreSessionInterface getRepoSession() {
+ CoreSessionInterface result = null;
+
+ if (ctx != null) {
+ result = (CoreSessionInterface) ctx.getCurrentRepositorySession();
+ } else {
+ logger.error(String.format("Batch job '%s' invoked with a null/empty service context.",
+ this.getClass().getName()));
+ }
+
+ return result;
+ }
+
+ @Override
+ public String getTenantId() {
+ String result = null;
+
+ if (ctx != null) {
+ result = ctx.getTenantId();
+ } else {
+ logger.error(String.format("Batch job '%s' invoked with a null/empty service context.",
+ this.getClass().getName()));
+ }
+
+ return result;
+ }
@Override
public InvocationContext getInvocationContext() {
package org.collectionspace.services.batch;
+import org.collectionspace.services.client.PoxPayloadIn;
+import org.collectionspace.services.client.PoxPayloadOut;
import org.collectionspace.services.common.ResourceMap;
import org.collectionspace.services.common.invocable.Invocable;
+import org.collectionspace.services.nuxeo.client.java.CoreSessionInterface;
public interface BatchInvocable extends Invocable {
* Sets the invocation context for the batch job. Called before run().
* @param context an instance of InvocationContext.
*/
- public void setResourceMap(ResourceMap resourceMap);
+ public void setResourceMap(ResourceMap<PoxPayloadIn, PoxPayloadOut> resourceMap);
+ public CoreSessionInterface getRepoSession();
+
+ public String getTenantId();
}
import org.collectionspace.services.common.invocable.Invocable;
import org.collectionspace.services.common.invocable.InvocationContext;
import org.collectionspace.services.common.invocable.InvocationResults;
+import org.collectionspace.services.common.query.QueryManager;
import org.collectionspace.services.jaxb.AbstractCommonList;
import javax.ws.rs.Consumes;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.UriInfo;
@Path(BatchClient.SERVICE_PATH)
}
}
+ // FIXME: Resource classes should not be invoking handlers directly. This resource method should follow the conventions used by
+ // other resource methods and use the getRepositoryClient() methods.
+ @Override
+ protected AbstractCommonList getCommonList(UriInfo ui) {
+ try {
+ ServiceContext<PoxPayloadIn, PoxPayloadOut> ctx = createServiceContext(ui);
+ MultivaluedMap<String, String> queryParams = ctx.getQueryParams();
+ DocumentHandler handler = createDocumentHandler(ctx);
+ String docType = queryParams.getFirst(IQueryManager.SEARCH_TYPE_DOCTYPE);
+ String mode = queryParams.getFirst(IQueryManager.SEARCH_TYPE_INVOCATION_MODE);
+ String whereClause = null;
+ DocumentFilter documentFilter = null;
+ String common_part = ctx.getCommonPartLabel();
+
+ if (docType != null && !docType.isEmpty()) {
+ whereClause = QueryManager.createWhereClauseForInvocableByDocType(
+ common_part, docType);
+ documentFilter = handler.getDocumentFilter();
+ documentFilter.appendWhereClause(whereClause, IQueryManager.SEARCH_QUALIFIER_AND);
+ }
+
+ if (mode != null && !mode.isEmpty()) {
+ whereClause = QueryManager.createWhereClauseForInvocableByMode(
+ common_part, mode);
+ documentFilter = handler.getDocumentFilter();
+ documentFilter.appendWhereClause(whereClause, IQueryManager.SEARCH_QUALIFIER_AND);
+ }
+
+ if (whereClause !=null && logger.isDebugEnabled()) {
+ logger.debug("The WHERE clause is: " + documentFilter.getWhereClause());
+ }
+
+ getRepositoryClient(ctx).getFiltered(ctx, handler);
+ AbstractCommonList list = (AbstractCommonList) handler.getCommonPartList();
+ return list;
+ } catch (Exception e) {
+ throw bigReThrow(e, ServiceMessages.LIST_FAILED);
+ }
+ }
+
/**
* Gets the authorityItem list for the specified authority
* If partialPerm is specified, keywords will be ignored.
return ptClause;
}
-
-
-
@POST
@Path("{csid}")
public InvocationResults invokeBatchJob(
--- /dev/null
+package org.collectionspace.services.batch.nuxeo;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.ws.rs.core.PathSegment;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+
+import org.apache.commons.lang.StringEscapeUtils;
+import org.apache.commons.lang.StringUtils;
+import org.collectionspace.services.batch.AbstractBatchInvocable;
+import org.collectionspace.services.client.CollectionObjectClient;
+import org.collectionspace.services.client.CollectionSpaceClient;
+import org.collectionspace.services.client.CollectionSpaceClientUtils;
+import org.collectionspace.services.client.IRelationsManager;
+import org.collectionspace.services.client.MovementClient;
+import org.collectionspace.services.client.PayloadOutputPart;
+import org.collectionspace.services.client.PlaceAuthorityClient;
+import org.collectionspace.services.client.PoxPayloadOut;
+import org.collectionspace.services.client.RelationClient;
+import org.collectionspace.services.client.TaxonomyAuthorityClient;
+import org.collectionspace.services.client.workflow.WorkflowClient;
+import org.collectionspace.services.collectionobject.nuxeo.CollectionObjectConstants;
+import org.collectionspace.services.common.NuxeoBasedResource;
+import org.collectionspace.services.common.ServiceMain;
+import org.collectionspace.services.common.UriTemplateRegistry;
+import org.collectionspace.services.common.api.RefName;
+import org.collectionspace.services.common.authorityref.AuthorityRefDocList;
+import org.collectionspace.services.common.context.ServiceBindingUtils;
+import org.collectionspace.services.common.relation.RelationResource;
+import org.collectionspace.services.common.vocabulary.AuthorityResource;
+import org.collectionspace.services.jaxb.AbstractCommonList;
+import org.collectionspace.services.movement.nuxeo.MovementConstants;
+import org.collectionspace.services.relation.RelationsCommonList;
+import org.collectionspace.services.relation.RelationsCommonList.RelationListItem;
+
+import org.dom4j.DocumentException;
+import org.dom4j.Element;
+import org.dom4j.Node;
+
+import org.jboss.resteasy.specimpl.PathSegmentImpl;
+//import org.jboss.resteasy.specimpl.UriInfoImpl;
+import org.collectionspace.services.batch.UriInfoImpl;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public abstract class AbstractBatchJob extends AbstractBatchInvocable {
+ public final int CREATED_STATUS = Response.Status.CREATED.getStatusCode();
+ public final int BAD_REQUEST_STATUS = Response.Status.BAD_REQUEST.getStatusCode();
+ public final int INT_ERROR_STATUS = Response.Status.INTERNAL_SERVER_ERROR.getStatusCode();
+
+ final Logger logger = LoggerFactory.getLogger(AbstractBatchJob.class);
+
+ private Map<String, String> authorityServiceNamesByDocType;
+
+ public abstract void run();
+
+ protected String getFieldXml(Map<String, String> fields, String fieldName) {
+ return getFieldXml(fieldName, fields.get(fieldName));
+ }
+
+ protected String getFieldXml(Map<String, String> fields) {
+ StringBuffer xmlBuffer = new StringBuffer();
+
+ for (String fieldName : fields.keySet()) {
+ xmlBuffer.append(getFieldXml(fields, fieldName));
+ }
+
+ return xmlBuffer.toString();
+ }
+
+ protected String getFieldXml(String fieldName, String fieldValue) {
+ String xml = "<" + fieldName + ">" + (fieldValue == null ? "" : StringEscapeUtils.escapeXml(fieldValue)) + "</" + fieldName + ">";
+
+ return xml;
+ }
+
+ protected String createRelation(String subjectCsid, String subjectDocType, String objectCsid, String objectDocType, String relationshipType) throws ResourceException {
+ String relationCsid = null;
+
+ String createRelationPayload =
+ "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" +
+ "<document name=\"relations\">" +
+ "<ns2:relations_common xmlns:ns2=\"http://collectionspace.org/services/relation\" xmlns:ns3=\"http://collectionspace.org/services/jaxb\">" +
+ "<subjectCsid>" + StringEscapeUtils.escapeXml(subjectCsid) + "</subjectCsid>" +
+ "<subjectDocumentType>" + StringEscapeUtils.escapeXml(subjectDocType) + "</subjectDocumentType>" +
+ "<objectCsid>" + StringEscapeUtils.escapeXml(objectCsid) + "</objectCsid>" +
+ "<objectDocumentType>" + StringEscapeUtils.escapeXml(objectDocType) + "</objectDocumentType>" +
+ "<relationshipType>" + StringEscapeUtils.escapeXml(relationshipType) + "</relationshipType>" +
+ "</ns2:relations_common>" +
+ "</document>";
+
+ NuxeoBasedResource resource = (NuxeoBasedResource) getResourceMap().get(RelationClient.SERVICE_NAME);
+ Response response = resource.create(getResourceMap(), null, createRelationPayload);
+
+ if (response.getStatus() == CREATED_STATUS) {
+ relationCsid = CollectionSpaceClientUtils.extractId(response);
+ }
+ else {
+ throw new ResourceException(response, "Error creating relation");
+ }
+
+ return relationCsid;
+ }
+
+ /**
+ * Return related records, based on the supplied search criteria. Soft-deleted relations
+ * are filtered from the list, but soft-deleted subject/object records are not.
+ *
+ * @param subjectCsid The csid of the subject record. If null or empty, match any subject.
+ * @param subjectDocType The document type of the subject record. If null or empty, match any subject type.
+ * @param predicate The predicate of the relation. If null or empty, match any predicate.
+ * @param objectCsid The csid of the object record. If null or empty, match any object.
+ * @param objectDocType The document type of the object record. If null or empty, match any object type.
+ * @return
+ * @throws URISyntaxException
+ */
+ protected List<RelationListItem> findRelated(String subjectCsid, String subjectDocType, String predicate, String objectCsid, String objectDocType) throws URISyntaxException {
+ RelationResource relationResource = (RelationResource) getResourceMap().get(RelationClient.SERVICE_NAME);
+ RelationsCommonList relationList = relationResource.getList(createRelationSearchUriInfo(subjectCsid, subjectDocType, predicate, objectCsid, objectDocType));
+
+ return relationList.getRelationListItem();
+ }
+
+ protected List<String> findRelatedObjects(String subjectCsid, String subjectDocType, String predicate, String objectCsid, String objectDocType) throws URISyntaxException {
+ List<String> csids = new ArrayList<String>();
+
+ for (RelationsCommonList.RelationListItem item : findRelated(subjectCsid, subjectDocType, predicate, objectCsid, objectDocType)) {
+ csids.add(item.getObjectCsid());
+ }
+
+ return csids;
+ }
+
+ protected List<String> findRelatedSubjects(String subjectCsid, String subjectDocType, String predicate, String objectCsid, String objectDocType) throws URISyntaxException {
+ List<String> csids = new ArrayList<String>();
+
+ for (RelationsCommonList.RelationListItem item : findRelated(subjectCsid, subjectDocType, predicate, objectCsid, objectDocType)) {
+ csids.add(item.getSubjectCsid());
+ }
+
+ return csids;
+ }
+
+ protected List<String> findRelatedCollectionObjects(String subjectCsid) throws URISyntaxException {
+ return findRelatedObjects(subjectCsid, null, "affects", null, CollectionObjectConstants.NUXEO_DOCTYPE);
+ }
+
+ protected List<String> findRelatedMovements(String subjectCsid) throws URISyntaxException {
+ return findRelatedObjects(subjectCsid, null, "affects", null, MovementConstants.NUXEO_DOCTYPE);
+ }
+
+ protected String findBroader(String subjectCsid) throws URISyntaxException {
+ List<String> relatedObjects = findRelatedObjects(subjectCsid, null, "hasBroader", null, null);
+
+ // There should be only one broader object.
+ String broader = relatedObjects.size() > 0 ? relatedObjects.get(0) : null;
+
+ return broader;
+ }
+
+ protected List<String> findNarrower(String subjectCsid) throws URISyntaxException {
+ return findRelatedSubjects(null, null, "hasBroader", subjectCsid, null);
+ }
+
+ /**
+ * Returns the movement record related to the specified record, if there is only one.
+ * Returns null if there are zero or more than one related movement records.
+ *
+ * @param subjectCsid The csid of the record
+ * @return
+ * @throws URISyntaxException
+ * @throws DocumentException
+ */
+ protected String findSingleRelatedMovement(String subjectCsid) throws URISyntaxException, DocumentException {
+ String foundMovementCsid = null;
+ List<String> movementCsids = findRelatedMovements(subjectCsid);
+
+ for (String movementCsid : movementCsids) {
+ PoxPayloadOut movementPayload = findMovementByCsid(movementCsid);
+ String movementWorkflowState = getFieldValue(movementPayload, CollectionSpaceClient.COLLECTIONSPACE_CORE_SCHEMA, CollectionSpaceClient.COLLECTIONSPACE_CORE_WORKFLOWSTATE);
+
+ if (!movementWorkflowState.equals(WorkflowClient.WORKFLOWSTATE_DELETED)) {
+ if (foundMovementCsid != null) {
+ return null;
+ }
+
+ foundMovementCsid = movementCsid;
+ }
+ }
+
+ return foundMovementCsid;
+ }
+
+ protected PoxPayloadOut findByUri(String uri) throws URISyntaxException, DocumentException {
+ PoxPayloadOut payload = null;
+ String[] uriParts = uri.split("/");
+
+ if (uriParts.length == 3) {
+ String serviceName = uriParts[1];
+ String csid = uriParts[2];
+
+ payload = findByCsid(serviceName, csid);
+ }
+ else if (uriParts.length == 5) {
+ String serviceName = uriParts[1];
+ String vocabularyCsid = uriParts[2];
+ String items = uriParts[3];
+ String csid = uriParts[4];
+
+ if (items.equals("items")) {
+ payload = findAuthorityItemByCsid(serviceName, vocabularyCsid, csid);
+ }
+ }
+
+ return payload;
+ }
+
+ protected PoxPayloadOut findByCsid(String serviceName, String csid) throws URISyntaxException, DocumentException {
+ NuxeoBasedResource resource = (NuxeoBasedResource) getResourceMap().get(serviceName);
+ byte[] outputBytes = (byte[]) resource.get(null, createUriInfo(), csid).getEntity();
+
+ PoxPayloadOut payload = new PoxPayloadOut(outputBytes);
+
+ return payload;
+ }
+
+ protected PoxPayloadOut findCollectionObjectByCsid(String csid) throws URISyntaxException, DocumentException {
+ return findByCsid(CollectionObjectClient.SERVICE_NAME, csid);
+ }
+
+ protected PoxPayloadOut findMovementByCsid(String csid) throws URISyntaxException, DocumentException {
+ return findByCsid(MovementClient.SERVICE_NAME, csid);
+ }
+
+ protected List<String> findAll(String serviceName, int pageSize, int pageNum) throws URISyntaxException, DocumentException {
+ return findAll(serviceName, pageSize, pageNum, null);
+ }
+
+ protected List<String> findAll(String serviceName, int pageSize, int pageNum, String sortBy) throws URISyntaxException, DocumentException {
+ NuxeoBasedResource resource = (NuxeoBasedResource) getResourceMap().get(serviceName);
+
+ return findAll(resource, pageSize, pageNum, null);
+ }
+
+ protected List<String> findAll(NuxeoBasedResource resource, int pageSize, int pageNum, String sortBy) throws URISyntaxException, DocumentException {
+ AbstractCommonList list = resource.getList(createPagedListUriInfo(resource.getServiceName(), pageNum, pageSize, sortBy));
+ List<String> csids = new ArrayList<String>();
+
+ if (list instanceof RelationsCommonList) {
+ for (RelationListItem item : ((RelationsCommonList) list).getRelationListItem()) {
+ csids.add(item.getCsid());
+ }
+ }
+ else {
+ for (AbstractCommonList.ListItem item : list.getListItem()) {
+ for (org.w3c.dom.Element element : item.getAny()) {
+
+ if (element.getTagName().equals("csid")) {
+ csids.add(element.getTextContent());
+ break;
+ }
+ }
+ }
+ }
+
+ return csids;
+ }
+
+ protected List<String> findAllCollectionObjects(int pageSize, int pageNum) throws URISyntaxException, DocumentException {
+ return findAll(CollectionObjectClient.SERVICE_NAME, pageSize, pageNum);
+ }
+
+ protected List<String> getVocabularyCsids(String serviceName) throws URISyntaxException {
+ AuthorityResource<?, ?> resource = (AuthorityResource<?, ?>) getResourceMap().get(serviceName);
+
+ return getVocabularyCsids(resource);
+ }
+
+ protected List<String> getVocabularyCsids(AuthorityResource<?, ?> resource) throws URISyntaxException {
+ AbstractCommonList vocabularyList = resource.getAuthorityList(createDeleteFilterUriInfo());
+ List<String> csids = new ArrayList<String>();
+
+ for (AbstractCommonList.ListItem item : vocabularyList.getListItem()) {
+ for (org.w3c.dom.Element element : item.getAny()) {
+ if (element.getTagName().equals("csid")) {
+ csids.add(element.getTextContent());
+ break;
+ }
+ }
+ }
+
+ return csids;
+ }
+
+ protected List<String> findAllAuthorityItems(String serviceName, String vocabularyCsid, int pageSize, int pageNum) throws URISyntaxException, DocumentException {
+ return findAllAuthorityItems(serviceName, vocabularyCsid, pageSize, pageNum, null);
+ }
+
+ protected List<String> findAllAuthorityItems(String serviceName, String vocabularyCsid, int pageSize, int pageNum, String sortBy) throws URISyntaxException, DocumentException {
+ AuthorityResource<?, ?> resource = (AuthorityResource<?, ?>) getResourceMap().get(serviceName);
+
+ return findAllAuthorityItems(resource, vocabularyCsid, pageSize, pageNum, sortBy);
+ }
+
+ protected List<String> findAllAuthorityItems(AuthorityResource<?, ?> resource, String vocabularyCsid, int pageSize, int pageNum, String sortBy) throws URISyntaxException, DocumentException {
+ AbstractCommonList list = resource.getAuthorityItemList(vocabularyCsid, createPagedListUriInfo(resource.getServiceName(), pageNum, pageSize, sortBy));
+ List<String> csids = new ArrayList<String>();
+
+ for (AbstractCommonList.ListItem item : list.getListItem()) {
+ for (org.w3c.dom.Element element : item.getAny()) {
+
+ if (element.getTagName().equals("csid")) {
+ csids.add(element.getTextContent());
+ break;
+ }
+ }
+ }
+
+ return csids;
+ }
+
+ protected PoxPayloadOut findAuthorityItemByCsid(String serviceName, String csid) throws URISyntaxException, DocumentException {
+ List<String> vocabularyCsids = getVocabularyCsids(serviceName);
+ PoxPayloadOut itemPayload = null;
+
+ for (String vocabularyCsid : vocabularyCsids) {
+ logger.debug("vocabularyCsid=" + vocabularyCsid);
+
+ // FIXME: This throws DocumentNotFoundException, so will never go to the next vocabulary
+ itemPayload = findAuthorityItemByCsid(serviceName, vocabularyCsid, csid);
+
+ if (itemPayload != null) {
+ break;
+ }
+ }
+
+ return itemPayload;
+ }
+
+ protected PoxPayloadOut findAuthorityItemByCsid(String serviceName, String vocabularyCsid, String csid) throws URISyntaxException, DocumentException {
+ AuthorityResource<?, ?> resource = (AuthorityResource<?, ?>) getResourceMap().get(serviceName);
+ byte[] response = resource.getAuthorityItem(null, createDeleteFilterUriInfo(), getResourceMap(), vocabularyCsid, csid);
+
+ PoxPayloadOut payload = new PoxPayloadOut(response);
+
+ return payload;
+ }
+
+ protected String getAuthorityServiceNameForDocType(String authorityDocType) {
+ if (authorityServiceNamesByDocType == null) {
+ authorityServiceNamesByDocType = new HashMap<String, String>();
+
+ for (String serviceName : getResourceMap().keySet()) {
+ AuthorityResource resource = (AuthorityResource) getResourceMap().get(serviceName);
+
+ if (resource instanceof AuthorityResource) {
+ AuthorityResource<?, ?> authorityResource = (AuthorityResource<?, ?>) resource;
+ String docType = authorityResource.getItemDocType(getTenantId());
+
+ authorityServiceNamesByDocType.put(docType, serviceName);
+ }
+ }
+ }
+
+ return authorityServiceNamesByDocType.get(authorityDocType);
+ }
+
+ protected PoxPayloadOut findTaxonByCsid(String csid) throws URISyntaxException, DocumentException {
+ return findAuthorityItemByCsid(TaxonomyAuthorityClient.SERVICE_NAME, csid);
+ }
+
+ protected PoxPayloadOut findAuthorityItemByShortId(String serviceName, String vocabularyShortId, String itemShortId) throws URISyntaxException, DocumentException {
+ AuthorityResource<?, ?> resource = (AuthorityResource<?, ?>) getResourceMap().get(serviceName);
+ byte[] response = resource.getAuthorityItem(null, createDeleteFilterUriInfo(), getResourceMap(), "urn:cspace:name(" + vocabularyShortId + ")", "urn:cspace:name(" + itemShortId + ")");
+
+ PoxPayloadOut payload = new PoxPayloadOut(response);
+
+ return payload;
+ }
+
+ protected PoxPayloadOut findAuthorityItemByRefName(String serviceName, String refName) throws URISyntaxException, DocumentException {
+ RefName.AuthorityItem item = RefName.AuthorityItem.parse(refName);
+
+ String vocabularyShortId = item.getParentShortIdentifier();
+ String itemShortId = item.getShortIdentifier();
+
+ return findAuthorityItemByShortId(serviceName, vocabularyShortId, itemShortId);
+ }
+
+ protected PoxPayloadOut findPlaceByRefName(String refName) throws URISyntaxException, DocumentException {
+ return findAuthorityItemByRefName(PlaceAuthorityClient.SERVICE_NAME, refName);
+ }
+
+ protected PoxPayloadOut findTaxonByRefName(String refName) throws URISyntaxException, DocumentException {
+ return findAuthorityItemByRefName(TaxonomyAuthorityClient.SERVICE_NAME, refName);
+ }
+
+ protected List<AuthorityRefDocList.AuthorityRefDocItem> findReferencingFields(String serviceName, String parentCsid, String csid, String type, int pageNum, int pageSize) throws URISyntaxException {
+ logger.debug("findReferencingFields serviceName=" + serviceName + " parentCsid=" + parentCsid + " csid=" + csid + " type=" + type);
+
+ AuthorityResource<?, ?> resource = (AuthorityResource<?, ?>) getResourceMap().get(serviceName);
+ UriTemplateRegistry uriTemplateRegistry = ServiceMain.getInstance().getUriTemplateRegistry();
+
+ // The pageNum and pageSize params don't work right for the refobj request.
+ // More items than the pageSize might be returned, and the next page may
+ // contain repeats of items already returned on the previous page. Any
+ // code that uses this function should be aware of this.
+
+ AuthorityRefDocList refDocList = resource.getReferencingObjects(parentCsid, csid, uriTemplateRegistry, createRefSearchFilterUriInfo(type, pageNum, pageSize));
+
+ return refDocList.getAuthorityRefDocItem();
+ }
+
+ /**
+ * Finds records that reference a given authority item. Soft-deleted records are not included.
+ *
+ * @param serviceName The name of the authority service, e.g. "personauthorities"
+ * @param parentCsid The csid of the authority instance (aka vocabulary). This may be a guid or a urn,
+ * e.g. "7a4981c4-77b7-433b-8086", "urn:cspace:name(person)"
+ * @param csid The csid of the authority item.
+ * @param type The meta-type of the referencing record, e.g. "object", "procedure", "authority".
+ * The possible values are any of the ServiceBindingUtils.SERVICE_TYPE_* constants.
+ * Only referencing records that are of the specified type are returned.
+ * If null, all referencing records of type "object", "procedure", and "authority"
+ * are returned.
+ * @param sourceField The name of the source field in the referencing record,
+ * e.g. "collectionobjects_common:fieldCollector".
+ * Only records that reference the given item in the specified field are returned.
+ * If null, returns records that reference the item in any field.
+ * @return A List containing the csids of referencing records.
+ * @throws URISyntaxException
+ */
+ protected List<String> findReferencingObjects(String serviceName, String parentCsid, String csid, String type, String sourceField) throws URISyntaxException {
+ logger.debug("findReferencingObjects serviceName=" + serviceName + " parentCsid=" + parentCsid + " csid=" + csid + " type=" + type + " sourceField=" + sourceField);
+
+ List<AuthorityRefDocList.AuthorityRefDocItem> items = findReferencingFields(serviceName, parentCsid, csid, type, 0, 0);
+ List<String> csids = new ArrayList<String>();
+
+ for (AuthorityRefDocList.AuthorityRefDocItem item : items) {
+ /*
+ * If a multivalue field contains a reference to the object multiple times, the referencing object
+ * seems to get returned multiple times in the list, but only the first has a non-null workflow state.
+ * A bug? Handle this by discarding list items with a null workflow state.
+ */
+
+ if (item.getWorkflowState() != null && !item.getWorkflowState().equals(WorkflowClient.WORKFLOWSTATE_DELETED) && (sourceField == null || item.getSourceField().equals(sourceField))) {
+ csids.add(item.getDocId());
+ }
+ }
+
+ return csids;
+ }
+
+ protected List<String> findReferencingObjects(String serviceName, String csid, String type, String sourceField) throws URISyntaxException, DocumentException {
+ logger.debug("findReferencingObjects serviceName=" + serviceName + " csid=" + csid + " type=" + type + " sourceField=" + sourceField);
+
+ List<String> vocabularyCsids = getVocabularyCsids(serviceName);
+ String parentCsid = null;
+
+ if (vocabularyCsids.size() == 1) {
+ parentCsid = vocabularyCsids.get(0);
+ }
+ else {
+ for (String vocabularyCsid : vocabularyCsids) {
+ PoxPayloadOut itemPayload = findAuthorityItemByCsid(serviceName, vocabularyCsid, csid);
+
+ if (itemPayload != null) {
+ parentCsid = vocabularyCsid;
+ break;
+ }
+ }
+ }
+
+ return findReferencingObjects(serviceName, parentCsid, csid, type, sourceField);
+ }
+
+ protected List<String> findReferencingCollectionObjects(String serviceName, String csid, String sourceField) throws URISyntaxException, DocumentException {
+ return findReferencingObjects(serviceName, csid, ServiceBindingUtils.SERVICE_TYPE_OBJECT, sourceField);
+ }
+
+ protected List<String> findReferencingCollectionObjects(String serviceName, String vocabularyShortId, String csid, String sourceField) throws URISyntaxException, DocumentException {
+ return findReferencingObjects(serviceName, "urn:cspace:name(" + vocabularyShortId + ")", csid, ServiceBindingUtils.SERVICE_TYPE_OBJECT, sourceField);
+ }
+
+ /**
+ * Create a stub UriInfo
+ *
+ * @throws URISyntaxException
+ */
+ protected UriInfo createUriInfo() throws URISyntaxException {
+ return createUriInfo("");
+ }
+
+ protected UriInfo createUriInfo(String queryString) throws URISyntaxException {
+ return createUriInfo(queryString, Collections.<PathSegment> emptyList());
+ }
+
+ protected UriInfo createUriInfo(String queryString, List<PathSegment> pathSegments) throws URISyntaxException {
+ queryString = escapeQueryString(queryString);
+
+ URI absolutePath = new URI("");
+ URI baseUri = new URI("");
+
+ return new UriInfoImpl(absolutePath, baseUri, "", queryString, pathSegments);
+ }
+
+ protected UriInfo createDeleteFilterUriInfo() throws URISyntaxException {
+ return createUriInfo("wf_deleted=false&pgSz=0");
+ }
+
+ protected UriInfo createKeywordSearchUriInfo(String schemaName, String fieldName, String value) throws URISyntaxException {
+ return createUriInfo("kw=&as=( (" +schemaName + ":" + fieldName + " ILIKE \"" + value + "\") )&wf_deleted=false&pgSz=0");
+ }
+
+ protected UriInfo createRelationSearchUriInfo(String subjectCsid, String subjectDocType, String predicate, String objectCsid, String objectDocType) throws URISyntaxException {
+ List<String> queryParams = new ArrayList<String>(6);
+
+ if (StringUtils.isNotEmpty(subjectCsid)) {
+ queryParams.add(IRelationsManager.SUBJECT_QP + "=" + subjectCsid);
+ }
+
+ if (StringUtils.isNotEmpty(subjectDocType)) {
+ queryParams.add(IRelationsManager.SUBJECT_TYPE_QP + "=" + subjectDocType);
+ }
+
+ if (StringUtils.isNotEmpty(predicate)) {
+ queryParams.add(IRelationsManager.PREDICATE_QP + "=" + predicate);
+ }
+
+ if (StringUtils.isNotEmpty(objectCsid)) {
+ queryParams.add(IRelationsManager.OBJECT_QP + "=" + objectCsid);
+ }
+
+ if (StringUtils.isNotEmpty(objectDocType)) {
+ queryParams.add(IRelationsManager.OBJECT_TYPE_QP + "=" + objectDocType);
+ }
+
+ queryParams.add("wf_deleted=false");
+ queryParams.add("pgSz=0");
+
+ return createUriInfo(StringUtils.join(queryParams, "&"));
+ }
+
+ protected UriInfo createRefSearchFilterUriInfo(String type) throws URISyntaxException {
+ return createRefSearchFilterUriInfo(type, 1, 0);
+ }
+
+ protected UriInfo createRefSearchFilterUriInfo(String type, int pageNum, int pageSize) throws URISyntaxException {
+ String queryString = "wf_deleted=false&pgSz=" + pageSize + "&pgNum=" + pageNum;
+
+ if (type != null) {
+ queryString = "type=" + type + "&" + queryString;
+ }
+
+ return createUriInfo(queryString);
+ }
+
+ protected UriInfo createPagedListUriInfo(String serviceName, int pageNum, int pageSize) throws URISyntaxException {
+ return createPagedListUriInfo(serviceName, pageNum, pageSize, null);
+ }
+
+ protected UriInfo createPagedListUriInfo(String serviceName, int pageNum, int pageSize, String sortBy) throws URISyntaxException {
+ List<PathSegment> pathSegments = new ArrayList<PathSegment>(1);
+ pathSegments.add(new PathSegmentImpl(serviceName, false));
+
+ return createUriInfo("pgSz=" + pageSize + "&pgNum=" + pageNum + (sortBy != null ? "&sortBy=" + sortBy : "") + "&wf_deleted=false", pathSegments);
+ }
+
+ protected String escapeQueryString(String queryString) throws URISyntaxException {
+ URI uri = new URI(null, null, null, queryString, null);
+
+ return uri.getRawQuery();
+ }
+
+ /**
+ * Get a field value from a PoxPayloadOut, given a part name and xpath expression.
+ */
+ protected String getFieldValue(PoxPayloadOut payload, String partLabel, String fieldPath) {
+ String value = null;
+ PayloadOutputPart part = payload.getPart(partLabel);
+
+ if (part != null) {
+ Element element = part.asElement();
+ Node node = element.selectSingleNode(fieldPath);
+
+ if (node != null) {
+ value = node.getText();
+ }
+ }
+
+ return value;
+ }
+
+ protected String getFieldValue(PoxPayloadOut payload, String fieldPath) {
+ String value = null;
+
+ for (PayloadOutputPart part : payload.getParts()) {
+ Element element = part.asElement();
+ Node node = element.selectSingleNode(fieldPath);
+
+ if (node != null) {
+ value = node.getText();
+ break;
+ }
+ }
+
+ return value;
+ }
+
+ protected boolean getBooleanFieldValue(PoxPayloadOut payload, String partLabel, String fieldPath) {
+ String value = getFieldValue(payload, partLabel, fieldPath);
+
+ return (value != null && value.equals("true"));
+ }
+
+ protected List<String> getFieldValues(PoxPayloadOut payload, String partLabel, String fieldPath) {
+ List<String> values = new ArrayList<String>();
+ PayloadOutputPart part = payload.getPart(partLabel);
+
+ if (part != null) {
+ Element element = part.asElement();
+ List<Node> nodes = element.selectNodes(fieldPath);
+
+ if (nodes != null) {
+ for (Node node : nodes) {
+ values.add(node.getText());
+ }
+ }
+ }
+
+ return values;
+ }
+
+ protected String getDisplayNameFromRefName(String refName) {
+ RefName.AuthorityItem item = RefName.AuthorityItem.parse(refName);
+
+ return (item == null ? refName : item.displayName);
+ }
+
+ protected String getCsid(PoxPayloadOut payload) {
+ String uri = getFieldValue(payload, CollectionSpaceClient.COLLECTIONSPACE_CORE_SCHEMA, CollectionSpaceClient.COLLECTIONSPACE_CORE_URI);
+ String[] elements = StringUtils.split(uri, '/');
+ String csid = elements[elements.length - 1];
+
+ return csid;
+ }
+
+ protected String getRefName(PoxPayloadOut payload) {
+ return getFieldValue(payload, CollectionSpaceClient.COLLECTIONSPACE_CORE_SCHEMA, CollectionSpaceClient.COLLECTIONSPACE_CORE_REFNAME);
+ }
+
+ protected class ResourceException extends Exception {
+ private static final long serialVersionUID = 1L;
+
+ private Response response;
+
+ public ResourceException(Response response, String message) {
+ super(message);
+ this.setResponse(response);
+ }
+
+ public Response getResponse() {
+ return response;
+ }
+
+ public void setResponse(Response response) {
+ this.response = response;
+ }
+ }
+}
protected final int BAD_REQUEST_STATUS = Response.Status.BAD_REQUEST.getStatusCode();
public InvocationResults invokeBatchJob(ServiceContext<PoxPayloadIn, PoxPayloadOut> ctx, String csid,
- ResourceMap resourceMap, InvocationContext invocationCtx) throws Exception {
-
- CoreSessionInterface repoSession = null;
- boolean releaseRepoSession = false;
+ ResourceMap<PoxPayloadIn, PoxPayloadOut> resourceMap, InvocationContext invocationCtx) throws Exception {
String invocationMode = invocationCtx.getMode();
String modeProperty = null;
throw new BadRequestException("BatchResource: unknown Invocation Mode: " + invocationMode);
}
+ //
+ // Acquire a repository session handle if we don't already have one
+ boolean releaseRepoSession = false;
+ CoreSessionInterface repoSession = this.getRepositorySession();
RepositoryClientImpl repoClient = (RepositoryClientImpl) this.getRepositoryClient(ctx);
- repoSession = this.getRepositorySession();
if (repoSession == null) {
repoSession = repoClient.getRepositorySession(ctx);
releaseRepoSession = true;
}
}
className = (String) NuxeoUtils.getProperyValue(docModel, BatchJAXBSchema.BATCH_CLASS_NAME); //docModel.getPropertyValue(BatchJAXBSchema.BATCH_CLASS_NAME);
+
+ className = className.trim();
+ ClassLoader tccl = Thread.currentThread().getContextClassLoader();
+ Class<?> c = tccl.loadClass(className);
+ // enable validation assertions
+ tccl.setClassAssertionStatus(className, true);
+ if (!BatchInvocable.class.isAssignableFrom(c)) {
+ throw new RuntimeException("BatchResource: Class: " + className + " does not implement BatchInvocable!");
+ }
+
+ BatchInvocable batchInstance = (BatchInvocable) c.newInstance();
+ List<String> modes = batchInstance.getSupportedInvocationModes();
+ if (!modes.contains(invocationMode)) {
+ throw new BadRequestException("BatchResource: Invoked with unsupported context mode: " + invocationMode);
+ }
+
+ batchInstance.setInvocationContext(invocationCtx);
+ batchInstance.setServiceContext(ctx);
+
+ if (resourceMap != null) {
+ batchInstance.setResourceMap(resourceMap);
+ } else {
+ resourceMap = ResteasyProviderFactory.getContextData(ResourceMap.class);
+ if (resourceMap != null) {
+ batchInstance.setResourceMap(resourceMap);
+ } else {
+ logger.warn("BatchResource.invoke did not get a resourceMapHolder in Context!");
+ }
+ }
+
+ batchInstance.run();
+ int status = batchInstance.getCompletionStatus();
+ if (status == Invocable.STATUS_ERROR) {
+ InvocationError error = batchInstance.getErrorInfo();
+ if (error.getResponseCode() == BAD_REQUEST_STATUS) {
+ throw new BadRequestException("BatchResouce: batchProcess encountered error: "
+ + batchInstance.getErrorInfo());
+ } else {
+ throw new RuntimeException("BatchResouce: batchProcess encountered error: "
+ + batchInstance.getErrorInfo());
+
+ }
+ }
+
+ InvocationResults results = batchInstance.getResults();
+ return results;
} catch (PropertyException pe) {
if (logger.isDebugEnabled()) {
logger.debug("Property exception getting batch values: ", pe);
}
}
- className = className.trim();
- ClassLoader tccl = Thread.currentThread().getContextClassLoader();
- Class<?> c = tccl.loadClass(className);
- // enable validation assertions
- tccl.setClassAssertionStatus(className, true);
- if (!BatchInvocable.class.isAssignableFrom(c)) {
- throw new RuntimeException("BatchResource: Class: " + className + " does not implement BatchInvocable!");
- }
-
- BatchInvocable batchInstance = (BatchInvocable) c.newInstance();
- List<String> modes = batchInstance.getSupportedInvocationModes();
- if (!modes.contains(invocationMode)) {
- throw new BadRequestException("BatchResource: Invoked with unsupported context mode: " + invocationMode);
- }
-
- batchInstance.setInvocationContext(invocationCtx);
- batchInstance.setServiceContext(ctx);
-
- if (resourceMap != null) {
- batchInstance.setResourceMap(resourceMap);
- } else {
- resourceMap = ResteasyProviderFactory.getContextData(ResourceMap.class);
- if (resourceMap != null) {
- batchInstance.setResourceMap(resourceMap);
- } else {
- logger.warn("BatchResource.invoke did not get a resourceMapHolder in Context!");
- }
- }
-
- batchInstance.run();
- int status = batchInstance.getCompletionStatus();
- if (status == Invocable.STATUS_ERROR) {
- InvocationError error = batchInstance.getErrorInfo();
- if (error.getResponseCode() == BAD_REQUEST_STATUS) {
- throw new BadRequestException("BatchResouce: batchProcess encountered error: "
- + batchInstance.getErrorInfo());
- } else {
- throw new RuntimeException("BatchResouce: batchProcess encountered error: "
- + batchInstance.getErrorInfo());
-
- }
- }
-
- InvocationResults results = batchInstance.getResults();
- return results;
}
}
--- /dev/null
+package org.collectionspace.services.batch.nuxeo;
+
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import javax.ws.rs.core.UriInfo;
+
+import org.collectionspace.services.client.MovementClient;
+import org.collectionspace.services.common.NuxeoBasedResource;
+import org.collectionspace.services.common.invocable.InvocationResults;
+import org.collectionspace.services.jaxb.AbstractCommonList;
+import org.collectionspace.services.movement.MovementResource;
+import org.collectionspace.services.movement.nuxeo.MovementBotGardenConstants;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class ClearLocationLabelRequestBatchJob extends AbstractBatchJob {
+ final Logger logger = LoggerFactory.getLogger(ClearLocationLabelRequestBatchJob.class);
+
+ public ClearLocationLabelRequestBatchJob() {
+ setSupportedInvocationModes(Arrays.asList(INVOCATION_MODE_SINGLE, INVOCATION_MODE_LIST, INVOCATION_MODE_NO_CONTEXT));
+ }
+
+ public void run() {
+ setCompletionStatus(STATUS_MIN_PROGRESS);
+
+ try {
+ /*
+ * For now, treat any mode as if it were no context.
+ */
+
+ setResults(clearLabelRequests());
+ setCompletionStatus(STATUS_COMPLETE);
+ }
+ catch(Exception e) {
+ setCompletionStatus(STATUS_ERROR);
+ setErrorInfo(new InvocationError(INT_ERROR_STATUS, e.getMessage()));
+ }
+ }
+
+ public InvocationResults clearLabelRequests() throws URISyntaxException {
+ List<String> movementCsids = findLabelRequests();
+ InvocationResults results = null;
+
+ if (movementCsids.size() > 0) {
+ results = clearLabelRequests(movementCsids);
+ }
+ else {
+ results = new InvocationResults();
+ results.setUserNote("No label requests found");
+ }
+
+ return results;
+ }
+
+ public InvocationResults clearLabelRequests(String movementCsid) throws URISyntaxException {
+ return clearLabelRequests(Arrays.asList(movementCsid));
+ }
+
+ public InvocationResults clearLabelRequests(List<String> movementCsids) throws URISyntaxException {
+ InvocationResults results = new InvocationResults();
+ long numAffected = 0;
+
+ for (String movementCsid : movementCsids) {
+ clearLabelRequest(movementCsid);
+ numAffected = numAffected + 1;
+ }
+
+ results.setNumAffected(numAffected);
+ results.setUserNote("Removed " + numAffected + " label " + (numAffected == 1 ? "request" : "requests"));
+
+ return results;
+ }
+
+ private void clearLabelRequest(String movementCsid) throws URISyntaxException {
+ logger.debug("clear label request: movementCsid=" + movementCsid);
+
+ final String updatePayload =
+ "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" +
+ "<document name=\"movements\">" +
+ "<ns2:movements_common xmlns:ns2=\"http://collectionspace.org/services/movement\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">" +
+ getFieldXml("reasonForMove", MovementBotGardenConstants.OTHER_ACTION_CODE) +
+ "</ns2:movements_common>" +
+ "<ns2:movements_botgarden xmlns:ns2=\"http://collectionspace.org/services/movement/local/botgarden\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">" +
+ getFieldXml("labelRequested", MovementBotGardenConstants.LABEL_REQUESTED_NO_VALUE) +
+ getFieldXml("labelSize", "") +
+ getFieldXml("labelStandType", "") +
+ getFieldXml("labelCount", "") +
+ "</ns2:movements_botgarden>" +
+ "</document>";
+
+ NuxeoBasedResource resource = (NuxeoBasedResource) getResourceMap().get(MovementClient.SERVICE_NAME);
+ resource.update(getResourceMap(), createUriInfo(), movementCsid, updatePayload);
+ }
+
+ private List<String> findLabelRequests() throws URISyntaxException {
+ List<String> csids = new ArrayList<String>();
+ MovementResource movementResource = (MovementResource) getResourceMap().get(MovementClient.SERVICE_NAME);
+ AbstractCommonList movementList = movementResource.getList(createLabelRequestSearchUriInfo());
+
+ for (AbstractCommonList.ListItem item : movementList.getListItem()) {
+ for (org.w3c.dom.Element element : item.getAny()) {
+ if (element.getTagName().equals("csid")) {
+ csids.add(element.getTextContent());
+ break;
+ }
+ }
+ }
+
+ return csids;
+ }
+
+ private UriInfo createLabelRequestSearchUriInfo() throws URISyntaxException {
+ return createKeywordSearchUriInfo(MovementBotGardenConstants.LABEL_REQUESTED_SCHEMA_NAME, MovementBotGardenConstants.LABEL_REQUESTED_FIELD_NAME,
+ MovementBotGardenConstants.LABEL_REQUESTED_YES_VALUE);
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.collectionspace.services.batch.nuxeo;
+
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import javax.ws.rs.core.UriInfo;
+
+import org.collectionspace.services.client.PottagClient;
+import org.collectionspace.services.common.NuxeoBasedResource;
+import org.collectionspace.services.common.invocable.InvocationResults;
+import org.collectionspace.services.jaxb.AbstractCommonList;
+import org.collectionspace.services.pottag.PottagResource;
+import org.collectionspace.services.pottag.nuxeo.PottagConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class ClearPotTagLabelRequestBatchJob extends AbstractBatchJob {
+ final Logger logger = LoggerFactory.getLogger(ClearPotTagLabelRequestBatchJob.class);
+
+ public ClearPotTagLabelRequestBatchJob() {
+ setSupportedInvocationModes(Arrays.asList(INVOCATION_MODE_SINGLE, INVOCATION_MODE_LIST, INVOCATION_MODE_NO_CONTEXT));
+ }
+
+ public void run() {
+ setCompletionStatus(STATUS_MIN_PROGRESS);
+
+ try {
+ /*
+ * For now, treat any mode as if it were no context.
+ */
+
+ setResults(clearLabelRequests());
+ setCompletionStatus(STATUS_COMPLETE);
+ }
+ catch(Exception e) {
+ setCompletionStatus(STATUS_ERROR);
+ setErrorInfo(new InvocationError(INT_ERROR_STATUS, e.getMessage()));
+ }
+ }
+
+ public InvocationResults clearLabelRequests() throws URISyntaxException {
+ List<String> potTagCsids = findLabelRequests();
+ InvocationResults results = null;
+
+ if (potTagCsids.size() > 0) {
+ results = clearLabelRequests(potTagCsids);
+ }
+ else {
+ results = new InvocationResults();
+ results.setUserNote("No label requests found");
+ }
+
+ return results;
+ }
+
+ public InvocationResults clearLabelRequests(String potTagCsid) throws URISyntaxException {
+ return clearLabelRequests(Arrays.asList(potTagCsid));
+ }
+
+ public InvocationResults clearLabelRequests(List<String> potTagCsids) throws URISyntaxException {
+ InvocationResults results = new InvocationResults();
+ long numAffected = 0;
+
+ for (String potTagCsid : potTagCsids) {
+ clearLabelRequest(potTagCsid);
+ numAffected = numAffected + 1;
+ }
+
+ results.setNumAffected(numAffected);
+ results.setUserNote("Removed " + numAffected + " label " + (numAffected == 1 ? "request" : "requests"));
+
+ return results;
+ }
+
+ private void clearLabelRequest(String potTagCsid) throws URISyntaxException {
+ logger.debug("clear label request: potTagCsid=" + potTagCsid);
+
+ final String updatePayload =
+ "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" +
+ "<document name=\"pottags\">" +
+ "<ns2:pottags_common xmlns:ns2=\"http://collectionspace.org/services/pottag\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">" +
+ getFieldXml(PottagConstants.LABEL_REQUESTED_FIELD_NAME, PottagConstants.LABEL_REQUESTED_NO_VALUE) +
+ "</ns2:pottags_common>" +
+ "</document>";
+
+ NuxeoBasedResource resource = (NuxeoBasedResource) getResourceMap().get(PottagClient.SERVICE_NAME);
+ resource.update(getResourceMap(), createUriInfo(), potTagCsid, updatePayload);
+ }
+
+ private List<String> findLabelRequests() throws URISyntaxException {
+ List<String> csids = new ArrayList<String>();
+ PottagResource potTagResource = (PottagResource) getResourceMap().get(PottagClient.SERVICE_NAME);
+ AbstractCommonList potTagList = potTagResource.getList(createLabelRequestSearchUriInfo());
+
+ for (AbstractCommonList.ListItem item : potTagList.getListItem()) {
+ for (org.w3c.dom.Element element : item.getAny()) {
+ if (element.getTagName().equals("csid")) {
+ csids.add(element.getTextContent());
+ break;
+ }
+ }
+ }
+
+ return csids;
+ }
+
+ private UriInfo createLabelRequestSearchUriInfo() throws URISyntaxException {
+ return createKeywordSearchUriInfo(PottagConstants.LABEL_REQUESTED_SCHEMA_NAME, PottagConstants.LABEL_REQUESTED_FIELD_NAME, PottagConstants.LABEL_REQUESTED_YES_VALUE);
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.collectionspace.services.batch.nuxeo;
+
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import javax.ws.rs.core.UriInfo;
+
+import org.collectionspace.services.client.LoanoutClient;
+import org.collectionspace.services.common.NuxeoBasedResource;
+import org.collectionspace.services.common.invocable.InvocationResults;
+import org.collectionspace.services.jaxb.AbstractCommonList;
+import org.collectionspace.services.loanout.LoanoutResource;
+import org.collectionspace.services.loanout.nuxeo.LoanoutBotGardenConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class ClearVoucherLabelRequestBatchJob extends AbstractBatchJob {
+ final Logger logger = LoggerFactory.getLogger(ClearVoucherLabelRequestBatchJob.class);
+
+ public ClearVoucherLabelRequestBatchJob() {
+ this.setSupportedInvocationModes(Arrays.asList(INVOCATION_MODE_SINGLE, INVOCATION_MODE_LIST, INVOCATION_MODE_NO_CONTEXT));
+ }
+
+ public void run() {
+ setCompletionStatus(STATUS_MIN_PROGRESS);
+
+ try {
+ /*
+ * For now, treat any mode as if it were no context.
+ */
+
+ setResults(clearLabelRequests());
+ setCompletionStatus(STATUS_COMPLETE);
+ }
+ catch(Exception e) {
+ setCompletionStatus(STATUS_ERROR);
+ setErrorInfo(new InvocationError(INT_ERROR_STATUS, e.getMessage()));
+ }
+ }
+
+ public InvocationResults clearLabelRequests() throws URISyntaxException {
+ List<String> loanoutCsids = findLabelRequests();
+ InvocationResults results = null;
+
+ if (loanoutCsids.size() > 0) {
+ results = clearLabelRequests(loanoutCsids);
+ }
+ else {
+ results = new InvocationResults();
+ results.setUserNote("No label requests found");
+ }
+
+ return results;
+ }
+
+ public InvocationResults clearLabelRequests(String loanoutCsid) throws URISyntaxException {
+ return clearLabelRequests(Arrays.asList(loanoutCsid));
+ }
+
+ public InvocationResults clearLabelRequests(List<String> loanoutCsids) throws URISyntaxException {
+ InvocationResults results = new InvocationResults();
+ long numAffected = 0;
+
+ for (String loanoutCsid : loanoutCsids) {
+ clearLabelRequest(loanoutCsid);
+ numAffected = numAffected + 1;
+ }
+
+ results.setNumAffected(numAffected);
+ results.setUserNote("Removed " + numAffected + " label " + (numAffected == 1 ? "request" : "requests"));
+
+ return results;
+ }
+
+ private void clearLabelRequest(String loanoutCsid) throws URISyntaxException {
+ logger.debug("clear label request: loanoutCsid=" + loanoutCsid);
+
+ final String updatePayload =
+ "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" +
+ "<document name=\"loansout\">" +
+ "<ns2:loansout_botgarden xmlns:ns2=\"http://collectionspace.org/services/loanout/local/botgarden\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">" +
+ getFieldXml("labelRequested", LoanoutBotGardenConstants.LABEL_REQUESTED_NO_VALUE) +
+ "</ns2:loansout_botgarden>" +
+ "</document>";
+
+ NuxeoBasedResource resource = (NuxeoBasedResource) getResourceMap().get(LoanoutClient.SERVICE_NAME);
+ resource.update(getResourceMap(), createUriInfo(), loanoutCsid, updatePayload);
+ }
+
+ private List<String> findLabelRequests() throws URISyntaxException {
+ List<String> csids = new ArrayList<String>();
+ LoanoutResource loanoutResource = (LoanoutResource) getResourceMap().get(LoanoutClient.SERVICE_NAME);
+ AbstractCommonList loanoutList = loanoutResource.getList(createLabelRequestSearchUriInfo());
+
+ for (AbstractCommonList.ListItem item : loanoutList.getListItem()) {
+ for (org.w3c.dom.Element element : item.getAny()) {
+ if (element.getTagName().equals("csid")) {
+ csids.add(element.getTextContent());
+ break;
+ }
+ }
+ }
+
+ return csids;
+ }
+
+ private UriInfo createLabelRequestSearchUriInfo() throws URISyntaxException {
+ return createKeywordSearchUriInfo(LoanoutBotGardenConstants.LABEL_REQUESTED_SCHEMA_NAME, LoanoutBotGardenConstants.LABEL_REQUESTED_FIELD_NAME,
+ LoanoutBotGardenConstants.LABEL_REQUESTED_YES_VALUE);
+ }
+}
\ No newline at end of file
import javax.ws.rs.core.Response;
-import org.collectionspace.services.batch.BatchInvocable;
+import org.collectionspace.services.batch.AbstractBatchInvocable;
import org.collectionspace.services.client.CollectionSpaceClientUtils;
import org.collectionspace.services.common.NuxeoBasedResource;
-import org.collectionspace.services.common.ResourceMap;
import org.collectionspace.services.common.api.GregorianCalendarDateTimeUtils;
-import org.collectionspace.services.common.context.ServiceContext;
import org.collectionspace.services.common.invocable.InvocationContext;
import org.collectionspace.services.common.invocable.InvocationResults;
import org.collectionspace.services.client.LoanoutClient;
import org.collectionspace.services.client.RelationClient;
-public class CreateAndLinkLoanOutBatchJob implements BatchInvocable {
+public class CreateAndLinkLoanOutBatchJob extends AbstractBatchInvocable {
private static ArrayList<String> invocationModes = null;
private InvocationContext invocationCtx;
- private ServiceContext ctx;
private int completionStatus;
- private ResourceMap resourceMap;
private InvocationResults results;
private InvocationError errorInfo;
private final String RELATION_TYPE = "affects";
- private final String LOAN_DOCTYPE = "LoanOut";
+ private final String LOAN_DOCTYPE = "LoanOut";
private final String RELATION_PREDICATE_DISP = "affects";
protected final int CREATED_STATUS = Response.Status.CREATED.getStatusCode();
protected final int BAD_REQUEST_STATUS = Response.Status.BAD_REQUEST.getStatusCode();
CreateAndLinkLoanOutBatchJob.setupClassStatics();
invocationCtx = null;
completionStatus = STATUS_UNSTARTED;
- resourceMap = null;
results = new InvocationResults();
errorInfo = null;
}
return CreateAndLinkLoanOutBatchJob.invocationModes;
}
- @Override
- public void setServiceContext(ServiceContext context) {
- this.ctx = context;
- }
- @Override
- public ServiceContext getServiceContext() {
- return ctx;
- }
-
@Override
public InvocationContext getInvocationContext() {
return invocationCtx;
this.invocationCtx = context;
}
- /**
- * Sets the invocation context for the batch job. Called before run().
- * @param invocationCtx an instance of InvocationContext.
- */
- public void setResourceMap(ResourceMap resourceMap) {
- this.resourceMap = resourceMap;
- }
-
/**
* The main work logic of the batch job. Will be called after setContext.
*/
// First, create the Loanout
// We fetch the resource class by service name
- NuxeoBasedResource resource = (NuxeoBasedResource) resourceMap.get( LoanoutClient.SERVICE_NAME);
- Response response = resource.create(resourceMap, null, loanoutPayload);
+ NuxeoBasedResource resource = (NuxeoBasedResource) getResourceMap().get(LoanoutClient.SERVICE_NAME);
+ Response response = resource.create(getResourceMap(), null, loanoutPayload);
if(response.getStatus() != CREATED_STATUS) {
completionStatus = STATUS_ERROR;
errorInfo = new InvocationError(INT_ERROR_STATUS,
+ "<relationshipType>"+RELATION_TYPE+"</relationshipType>"
+ "<predicateDisplayName>"+RELATION_PREDICATE_DISP+"</predicateDisplayName>"
+ "</ns2:relations_common></document>";
- NuxeoBasedResource resource = (NuxeoBasedResource) resourceMap.get(RelationClient.SERVICE_NAME);
- Response response = resource.create(resourceMap, null, relationPayload);
+ NuxeoBasedResource resource = (NuxeoBasedResource) getResourceMap().get(RelationClient.SERVICE_NAME);
+ Response response = resource.create(getResourceMap(), null, relationPayload);
if(response.getStatus() != CREATED_STATUS) {
completionStatus = STATUS_ERROR;
errorInfo = new InvocationError(INT_ERROR_STATUS,
--- /dev/null
+package org.collectionspace.services.batch.nuxeo;
+
+import java.net.URISyntaxException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Response;
+
+import org.apache.commons.lang.StringUtils;
+import org.collectionspace.services.client.CollectionSpaceClientUtils;
+import org.collectionspace.services.client.LoanoutClient;
+import org.collectionspace.services.client.PoxPayloadOut;
+import org.collectionspace.services.client.workflow.WorkflowClient;
+import org.collectionspace.services.collectionobject.nuxeo.CollectionObjectBotGardenConstants;
+import org.collectionspace.services.collectionobject.nuxeo.CollectionObjectConstants;
+import org.collectionspace.services.common.NuxeoBasedResource;
+import org.collectionspace.services.common.invocable.InvocationResults;
+import org.collectionspace.services.common.relation.nuxeo.RelationConstants;
+import org.collectionspace.services.loanout.nuxeo.LoanoutBotGardenConstants;
+import org.collectionspace.services.loanout.nuxeo.LoanoutConstants;
+import org.collectionspace.services.movement.nuxeo.MovementConstants;
+import org.collectionspace.services.place.nuxeo.PlaceBotGardenConstants;
+import org.collectionspace.services.place.nuxeo.PlaceConstants;
+import org.dom4j.DocumentException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class CreateVoucherBatchJob extends AbstractBatchJob {
+ final Logger logger = LoggerFactory.getLogger(CreateVoucherBatchJob.class);
+
+ public CreateVoucherBatchJob() {
+ setSupportedInvocationModes(Arrays.asList(INVOCATION_MODE_SINGLE));
+ }
+
+ public void run() {
+ setCompletionStatus(STATUS_MIN_PROGRESS);
+
+ try {
+ String mode = getInvocationContext().getMode();
+
+ if (!mode.equalsIgnoreCase(INVOCATION_MODE_SINGLE)) {
+ throw new Exception("Unsupported invocation mode: " + mode);
+ }
+
+ String csid = getInvocationContext().getSingleCSID();
+
+ if (StringUtils.isEmpty(csid)) {
+ throw new Exception("Missing context csid");
+ }
+
+ String docType = getInvocationContext().getDocType();
+
+ if (docType.equals(CollectionObjectConstants.NUXEO_DOCTYPE)) {
+ setResults(createVoucherFromCataloging(csid));
+ }
+ else if (docType.equals(MovementConstants.NUXEO_DOCTYPE)) {
+ setResults(createVoucherFromCurrentLocation(csid));
+ }
+ else {
+ throw new Exception("Unsupported docType: " + docType);
+ }
+
+ setCompletionStatus(STATUS_COMPLETE);
+ }
+ catch(Exception e) {
+ setCompletionStatus(STATUS_ERROR);
+ setErrorInfo(new InvocationError(INT_ERROR_STATUS, e.getMessage()));
+ }
+ }
+
+ public InvocationResults createVoucherFromCataloging(String collectionObjectCsid) throws ResourceException, URISyntaxException, DocumentException {
+ return createVoucherFromCataloging(collectionObjectCsid, null);
+ }
+
+ public InvocationResults createVoucherFromCataloging(String collectionObjectCsid, String movementCsid) throws ResourceException, URISyntaxException, DocumentException {
+ InvocationResults results = new InvocationResults();
+
+ PoxPayloadOut collectionObjectPayload = findCollectionObjectByCsid(collectionObjectCsid);
+ String collectionObjectWorkflowState = getFieldValue(collectionObjectPayload, CollectionObjectBotGardenConstants.WORKFLOW_STATE_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.WORKFLOW_STATE_FIELD_NAME);
+
+ if (collectionObjectWorkflowState.equals(WorkflowClient.WORKFLOWSTATE_DELETED)) {
+ logger.debug("skipping deleted collectionobject: collectionObjectCsid=" + collectionObjectCsid);
+
+ results.setNumAffected(0);
+ results.setUserNote("skipped deleted record");
+ }
+ else {
+ Map<String, String> botGardenFields = new HashMap<String, String>();
+
+ if (movementCsid == null) {
+ movementCsid = findSingleRelatedMovement(collectionObjectCsid);
+ }
+
+ if (movementCsid != null) {
+ PoxPayloadOut movementPayload = findMovementByCsid(movementCsid);
+
+ if (movementPayload != null) {
+ botGardenFields.put("gardenLocation", getFieldValue(movementPayload, MovementConstants.CURRENT_LOCATION_SCHEMA_NAME, MovementConstants.CURRENT_LOCATION_FIELD_NAME));
+ }
+ }
+
+ botGardenFields.put("fieldCollectionNote", getFieldCollectionNote(collectionObjectPayload));
+ botGardenFields.put("annotation", getAnnotation(collectionObjectPayload));
+ botGardenFields.put("labelRequested", LoanoutBotGardenConstants.LABEL_REQUESTED_NO_VALUE);
+
+ Map<String, String> naturalHistoryFields = new HashMap<String, String>();
+ naturalHistoryFields.put("numLent", "1");
+
+ String voucherCsid = createVoucher(botGardenFields, naturalHistoryFields);
+ logger.debug("voucher created: voucherCsid=" + voucherCsid);
+
+ String forwardRelationCsid = createRelation(voucherCsid, LoanoutConstants.NUXEO_DOCTYPE, collectionObjectCsid, CollectionObjectConstants.NUXEO_DOCTYPE, RelationConstants.AFFECTS_TYPE);
+ String backwardRelationCsid = createRelation(collectionObjectCsid, CollectionObjectConstants.NUXEO_DOCTYPE, voucherCsid, LoanoutConstants.NUXEO_DOCTYPE, RelationConstants.AFFECTS_TYPE);
+ logger.debug("relations created: forwardRelationCsid=" + forwardRelationCsid + " backwardRelationCsid=" + backwardRelationCsid);
+
+ results.setNumAffected(1);
+ results.setPrimaryURICreated("loanout.html?csid=" + voucherCsid);
+ results.setUserNote("Voucher created");
+ }
+
+ return results;
+ }
+
+ private String getFieldCollectionNote(PoxPayloadOut collectionObjectPayload) throws URISyntaxException, DocumentException {
+ String placeNote = "";
+ String reverseFieldCollectionPlace = getReverseFieldCollectionPlace(collectionObjectPayload);
+
+ if (StringUtils.isNotBlank(reverseFieldCollectionPlace)) {
+ placeNote = reverseFieldCollectionPlace;
+ }
+ else {
+ String taxonomicRange = this.getFieldValue(collectionObjectPayload, CollectionObjectBotGardenConstants.TAXONOMIC_RANGE_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.TAXONOMIC_RANGE_FIELD_NAME);
+
+ if (StringUtils.isNotBlank(taxonomicRange)) {
+ placeNote = "Geographic range " + taxonomicRange;
+ }
+ }
+
+ String comment = this.getFieldValue(collectionObjectPayload, CollectionObjectBotGardenConstants.COMMENT_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.COMMENT_FIELD_NAME);
+ String collectionNote = "";
+
+ if (StringUtils.isNotBlank(placeNote) && StringUtils.isNotBlank(comment)) {
+ collectionNote = placeNote + ": " + comment;
+ }
+ else if (StringUtils.isNotBlank(placeNote)) {
+ collectionNote = placeNote;
+ }
+ else {
+ collectionNote = comment;
+ }
+
+ return collectionNote;
+ }
+
+ private String getReverseFieldCollectionPlace(PoxPayloadOut collectionObjectPayload) throws URISyntaxException, DocumentException {
+ String reverseDisplayName = null;
+ String fieldCollectionPlaceRefName = getFieldValue(collectionObjectPayload, CollectionObjectBotGardenConstants.FIELD_COLLECTION_PLACE_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.FIELD_COLLECTION_PLACE_FIELD_NAME);
+
+ if (StringUtils.isNotBlank(fieldCollectionPlaceRefName)) {
+ PoxPayloadOut placePayload = null;
+
+ try {
+ placePayload = findPlaceByRefName(fieldCollectionPlaceRefName);
+ }
+ catch (WebApplicationException e) {
+ logger.error("Error finding place: refName=" + fieldCollectionPlaceRefName, e);
+ }
+
+ if (placePayload != null) {
+ List<String> termTypes = getFieldValues(placePayload, PlaceConstants.TERM_TYPE_SCHEMA_NAME, PlaceConstants.TERM_TYPE_FIELD_NAME);
+ List<String> displayNames = getFieldValues(placePayload, PlaceConstants.DISPLAY_NAME_SCHEMA_NAME, PlaceConstants.DISPLAY_NAME_FIELD_NAME);
+
+ int index = termTypes.indexOf(PlaceBotGardenConstants.REVERSE_TERM_TYPE);
+
+ if (index < 0) {
+ // There's no reverse term. Just use the primary.
+
+ if (displayNames.size() > 0) {
+ reverseDisplayName = displayNames.get(0);
+ }
+ }
+ else {
+ reverseDisplayName = displayNames.get(index);
+ }
+ }
+ }
+
+ if (reverseDisplayName == null) {
+ reverseDisplayName = "";
+ }
+
+ return reverseDisplayName;
+ }
+
+ private String getAnnotation(PoxPayloadOut collectionObjectPayload) {
+ String annotation = "";
+ String determinationKind = getFieldValue(collectionObjectPayload, CollectionObjectBotGardenConstants.DETERMINATION_KIND_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.DETERMINATION_KIND_FIELD_NAME);
+
+ if (determinationKind.equals(CollectionObjectBotGardenConstants.DETERMINATION_KIND_DETERMINATION_VALUE)) {
+ String determinationBy = getDisplayNameFromRefName(getFieldValue(collectionObjectPayload, CollectionObjectBotGardenConstants.DETERMINATION_BY_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.DETERMINATION_BY_FIELD_NAME));
+
+ if (StringUtils.isNotBlank(determinationBy)) {
+ annotation += "det. by " + determinationBy;
+
+ String determinationInstitution = getDisplayNameFromRefName(getFieldValue(collectionObjectPayload, CollectionObjectBotGardenConstants.DETERMINATION_INSTITUTION_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.DETERMINATION_INSTITUTION_FIELD_NAME));
+ String determinationDate = getFieldValue(collectionObjectPayload, CollectionObjectBotGardenConstants.DETERMINATION_DATE_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.DETERMINATION_DATE_FIELD_NAME);
+
+ if (StringUtils.isNotBlank(determinationInstitution)) {
+ annotation += ", " + determinationInstitution;
+ }
+
+ if (StringUtils.isNotBlank(determinationDate)) {
+ annotation += ", " + determinationDate;
+ }
+ }
+ }
+
+ return annotation;
+ }
+
+ public InvocationResults createVoucherFromCurrentLocation(String movementCsid) throws ResourceException, URISyntaxException, DocumentException {
+ long numAffected = 0;
+ String primaryUriCreated = null;
+
+ List<String> collectionObjectCsids = findRelatedCollectionObjects(movementCsid);
+
+ for (String collectionObjectCsid : collectionObjectCsids) {
+ InvocationResults innerResults = createVoucherFromCataloging(collectionObjectCsid, movementCsid);
+
+ numAffected = numAffected + innerResults.getNumAffected();
+
+ if (primaryUriCreated == null) {
+ primaryUriCreated = innerResults.getPrimaryURICreated();
+ }
+ }
+
+ InvocationResults results = new InvocationResults();
+ results.setNumAffected(numAffected);
+ results.setPrimaryURICreated(primaryUriCreated);
+
+ if (collectionObjectCsids.size() == 0) {
+ results.setUserNote("No related cataloging record found");
+ }
+ else {
+ results.setUserNote("Voucher created for " + numAffected + " cataloging " + (numAffected == 1 ? "record" : "records"));
+ }
+
+ return results;
+ }
+
+ private String createVoucher(Map<String, String> botGardenFields, Map<String, String> naturalHistoryFields) throws ResourceException {
+ String voucherCsid = null;
+
+ String createVoucherPayload =
+ "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" +
+ "<document name=\"loansout\">" +
+ "<ns2:loansout_botgarden xmlns:ns2=\"http://collectionspace.org/services/loanout/local/botgarden\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">" +
+ getFieldXml(botGardenFields) +
+ "</ns2:loansout_botgarden>" +
+ "<ns2:loansout_naturalhistory xmlns:ns2=\"http://collectionspace.org/services/loanout/domain/naturalhistory\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">" +
+ getFieldXml(naturalHistoryFields) +
+ "</ns2:loansout_naturalhistory>" +
+ "</document>";
+
+ NuxeoBasedResource resource = (NuxeoBasedResource) getResourceMap().get(LoanoutClient.SERVICE_NAME);
+ Response response = resource.create(getResourceMap(), null, createVoucherPayload);
+
+ if (response.getStatus() == CREATED_STATUS) {
+ voucherCsid = CollectionSpaceClientUtils.extractId(response);
+ }
+ else {
+ throw new ResourceException(response, "Error creating voucher");
+ }
+
+ return voucherCsid;
+ }
+}
--- /dev/null
+package org.collectionspace.services.batch.nuxeo;
+
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.commons.lang.StringUtils;
+import org.collectionspace.services.client.PayloadOutputPart;
+import org.collectionspace.services.client.PoxPayloadOut;
+import org.collectionspace.services.client.TaxonomyAuthorityClient;
+import org.collectionspace.services.common.api.TaxonFormatter;
+import org.collectionspace.services.common.invocable.InvocationResults;
+import org.collectionspace.services.common.vocabulary.AuthorityResource;
+import org.collectionspace.services.taxonomy.nuxeo.TaxonConstants;
+import org.dom4j.DocumentException;
+import org.dom4j.Element;
+import org.dom4j.Node;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class FormatTaxonBatchJob extends AbstractBatchJob {
+ final Logger logger = LoggerFactory.getLogger(FormatTaxonBatchJob.class);
+
+ private TaxonFormatter taxonFormatter;
+
+ public FormatTaxonBatchJob() {
+ setSupportedInvocationModes(Arrays.asList(INVOCATION_MODE_SINGLE, INVOCATION_MODE_LIST, INVOCATION_MODE_NO_CONTEXT));
+ this.taxonFormatter = new TaxonFormatter();
+ }
+
+ public void run() {
+ setCompletionStatus(STATUS_MIN_PROGRESS);
+
+ try {
+ String mode = getInvocationContext().getMode();
+
+ if (mode.equalsIgnoreCase(INVOCATION_MODE_SINGLE)) {
+ String csid = getInvocationContext().getSingleCSID();
+
+ if (StringUtils.isEmpty(csid)) {
+ throw new Exception("Missing context csid");
+ }
+
+ setResults(formatTaxon(csid));
+ }
+ else if (mode.equalsIgnoreCase(INVOCATION_MODE_LIST)) {
+ setResults(formatTaxons(getInvocationContext().getListCSIDs().getCsid()));
+ }
+ else if (mode.equalsIgnoreCase(INVOCATION_MODE_NO_CONTEXT)) {
+ setResults(formatAllTaxons());
+ }
+ else {
+ throw new Exception("Unsupported invocation mode: " + mode);
+ }
+
+ setCompletionStatus(STATUS_COMPLETE);
+ }
+ catch(Exception e) {
+ setCompletionStatus(STATUS_ERROR);
+ setErrorInfo(new InvocationError(INT_ERROR_STATUS, e.getMessage()));
+ }
+ }
+
+ public InvocationResults formatAllTaxons() throws URISyntaxException, DocumentException {
+ return formatTaxons(findAllTaxonRecords());
+ }
+
+ public InvocationResults formatTaxon(String taxonCsid) throws URISyntaxException, DocumentException {
+ return formatTaxons(Arrays.asList(taxonCsid));
+ }
+
+ public InvocationResults formatTaxons(List<String> taxonCsids) throws URISyntaxException, DocumentException {
+ InvocationResults results = new InvocationResults();
+ int numAffected = 0;
+
+ for (String taxonCsid : taxonCsids) {
+ formatDisplayNames(taxonCsid);
+
+ numAffected = numAffected + 1;
+ }
+
+ results.setNumAffected(numAffected);
+ results.setUserNote("Updated " + numAffected + " taxonomy " + (numAffected == 1 ? "record" : "records"));
+
+ return results;
+ }
+
+ private List<String> formatDisplayNames(String taxonCsid) throws URISyntaxException, DocumentException {
+ List<String> formattedDisplayNames = new ArrayList<String>();
+
+ PoxPayloadOut taxonPayload = findTaxonByCsid(taxonCsid);
+ String inAuthority = getFieldValue(taxonPayload, TaxonConstants.IN_AUTHORITY_SCHEMA_NAME, TaxonConstants.IN_AUTHORITY_FIELD_NAME);
+
+ String[] displayNamePathElements = TaxonConstants.DISPLAY_NAME_FIELD_NAME.split("/");
+ String termGroupListFieldName = displayNamePathElements[0];
+ String termGroupFieldName = displayNamePathElements[1];
+ String displayNameFieldName = displayNamePathElements[2];
+
+ String[] formattedDisplayNamePathElements = TaxonConstants.FORMATTED_DISPLAY_NAME_FIELD_NAME.split("/");
+ String formattedDisplayNameFieldName = formattedDisplayNamePathElements[2];
+
+ PayloadOutputPart part = taxonPayload.getPart(TaxonConstants.DISPLAY_NAME_SCHEMA_NAME);
+
+ if (part != null) {
+ Element element = part.asElement();
+ Node termGroupListNode = element.selectSingleNode(termGroupListFieldName);
+ List<Element> termGroupElements = termGroupListNode.selectNodes(termGroupFieldName);
+
+ for (Element termGroupElement : termGroupElements) {
+ Node displayNameNode = termGroupElement.selectSingleNode(displayNameFieldName);
+ String displayName = (displayNameNode == null) ? "" : displayNameNode.getText();
+ String formattedDisplayName = taxonFormatter.format(displayName);
+
+ Element formattedDisplayNameElement = (Element) termGroupElement.selectSingleNode(formattedDisplayNameFieldName);
+
+ if (formattedDisplayNameElement == null) {
+ formattedDisplayNameElement = termGroupElement.addElement(formattedDisplayNameFieldName);
+ }
+
+ formattedDisplayNameElement.setText(formattedDisplayName);
+ formattedDisplayNames.add(formattedDisplayName);
+ }
+
+ String updatePayload =
+ "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" +
+ "<document name=\"taxon\">" +
+ "<ns2:taxon_common xmlns:ns2=\"http://collectionspace.org/services/taxonomy\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">" +
+ termGroupListNode.asXML() +
+ "</ns2:taxon_common>" +
+ "</document>";
+
+ AuthorityResource<?, ?> resource = (AuthorityResource<?, ?>) getResourceMap().get(TaxonomyAuthorityClient.SERVICE_NAME);
+ resource.updateAuthorityItem(getResourceMap(), createUriInfo(), inAuthority, taxonCsid, updatePayload);
+ }
+
+ return formattedDisplayNames;
+ }
+
+ private List<String> findAllTaxonRecords() {
+ // TODO
+ return Collections.emptyList();
+ }
+}
--- /dev/null
+package org.collectionspace.services.batch.nuxeo;
+
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import javax.ws.rs.core.UriInfo;
+
+import org.apache.commons.lang.StringUtils;
+import org.collectionspace.services.client.LoanoutClient;
+import org.collectionspace.services.client.PoxPayloadOut;
+import org.collectionspace.services.client.workflow.WorkflowClient;
+import org.collectionspace.services.collectionobject.nuxeo.CollectionObjectBotGardenConstants;
+import org.collectionspace.services.collectionobject.nuxeo.CollectionObjectConstants;
+import org.collectionspace.services.common.NuxeoBasedResource;
+import org.collectionspace.services.common.api.TaxonFormatter;
+import org.collectionspace.services.common.invocable.InvocationResults;
+import org.collectionspace.services.jaxb.AbstractCommonList;
+import org.collectionspace.services.loanout.LoanoutResource;
+import org.collectionspace.services.loanout.nuxeo.LoanoutBotGardenConstants;
+
+import org.dom4j.DocumentException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class FormatVoucherNameBatchJob extends AbstractBatchJob {
+ public static final String HYBRID_SEPARATOR = " x ";
+
+ final Logger logger = LoggerFactory.getLogger(FormatVoucherNameBatchJob.class);
+
+ private TaxonFormatter taxonFormatter;
+
+ public FormatVoucherNameBatchJob() {
+ setSupportedInvocationModes(Arrays.asList(INVOCATION_MODE_SINGLE, INVOCATION_MODE_LIST, INVOCATION_MODE_NO_CONTEXT));
+ this.taxonFormatter = new TaxonFormatter();
+ }
+
+ public void run() {
+ setCompletionStatus(STATUS_MIN_PROGRESS);
+
+ try {
+ String mode = getInvocationContext().getMode();
+
+ if (mode.equalsIgnoreCase(INVOCATION_MODE_SINGLE)) {
+ String csid = getInvocationContext().getSingleCSID();
+
+ if (StringUtils.isEmpty(csid)) {
+ throw new Exception("Missing context csid");
+ }
+
+ setResults(formatVoucherName(csid));
+ }
+ else if (mode.equalsIgnoreCase(INVOCATION_MODE_LIST)) {
+ setResults(formatVoucherNames(getInvocationContext().getListCSIDs().getCsid()));
+ }
+ else if (mode.equalsIgnoreCase(INVOCATION_MODE_NO_CONTEXT)) {
+ setResults(formatQueuedVoucherNames());
+ }
+ else {
+ throw new Exception("Unsupported invocation mode: " + mode);
+ }
+
+ setCompletionStatus(STATUS_COMPLETE);
+ }
+ catch(Exception e) {
+ setCompletionStatus(STATUS_ERROR);
+ setErrorInfo(new InvocationError(INT_ERROR_STATUS, e.getMessage()));
+ }
+ }
+
+ public InvocationResults formatQueuedVoucherNames() throws URISyntaxException, DocumentException {
+ return formatVoucherNames(findLabelRequests());
+ }
+
+ public InvocationResults formatVoucherName(String voucherCsid) throws URISyntaxException, DocumentException {
+ return formatVoucherNames(Arrays.asList(voucherCsid));
+ }
+
+ public InvocationResults formatVoucherNames(List<String> voucherCsids) throws URISyntaxException, DocumentException {
+ InvocationResults results = new InvocationResults();
+ int numAffected = 0;
+ List<String> formattedNames = new ArrayList<String>();
+
+ for (String voucherCsid : voucherCsids) {
+ VoucherName name = getVoucherName(voucherCsid);
+ String formattedName = formatVoucherName(name);
+
+ logger.debug("formattedName=" + formattedName);
+
+ setStyledName(voucherCsid, formattedName);
+
+ formattedNames.add(formattedName);
+ numAffected = numAffected + 1;
+ }
+
+ results.setNumAffected(numAffected);
+ results.setUserNote("Updated " + numAffected + " " + (numAffected == 1 ? "voucher" : "vouchers") + (numAffected == 1 ? ": " + formattedNames.get(0) : ""));
+
+ return results;
+ }
+
+ private List<String> findLabelRequests() throws URISyntaxException {
+ List<String> csids = new ArrayList<String>();
+ LoanoutResource loanoutResource = (LoanoutResource) getResourceMap().get(LoanoutClient.SERVICE_NAME);
+ AbstractCommonList loanoutList = loanoutResource.getList(createLabelRequestSearchUriInfo());
+
+ for (AbstractCommonList.ListItem item : loanoutList.getListItem()) {
+ for (org.w3c.dom.Element element : item.getAny()) {
+ if (element.getTagName().equals("csid")) {
+ csids.add(element.getTextContent());
+ break;
+ }
+ }
+ }
+
+ return csids;
+ }
+
+ public VoucherName getVoucherName(String voucherCsid) throws URISyntaxException, DocumentException {
+ VoucherName name = null;
+ List<String> collectionObjectCsids = findRelatedCollectionObjects(voucherCsid);
+ PoxPayloadOut collectionObjectPayload = null;
+
+ for (String candidateCsid : collectionObjectCsids) {
+ PoxPayloadOut candidatePayload = findCollectionObjectByCsid(candidateCsid);
+ String workflowState = getFieldValue(candidatePayload, CollectionObjectConstants.WORKFLOW_STATE_SCHEMA_NAME, CollectionObjectConstants.WORKFLOW_STATE_FIELD_NAME);
+
+ if (!workflowState.equals(WorkflowClient.WORKFLOWSTATE_DELETED)) {
+ collectionObjectPayload = candidatePayload;
+ }
+ }
+
+ if (collectionObjectPayload != null) {
+ name = new VoucherName();
+
+ name.setName(getDisplayNameFromRefName(getFieldValue(collectionObjectPayload, CollectionObjectBotGardenConstants.TAXON_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.TAXON_FIELD_NAME)));
+ name.setHybrid(getBooleanFieldValue(collectionObjectPayload, CollectionObjectBotGardenConstants.HYBRID_FLAG_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.HYBRID_FLAG_FIELD_NAME));
+
+ if (name.isHybrid()) {
+ List<String> hybridParents = this.getFieldValues(collectionObjectPayload, CollectionObjectBotGardenConstants.HYBRID_PARENT_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.HYBRID_PARENT_FIELD_NAME);
+ List<String> hybridQualifiers = this.getFieldValues(collectionObjectPayload, CollectionObjectBotGardenConstants.HYBRID_QUALIFIER_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.HYBRID_QUALIFIER_FIELD_NAME);
+
+ int femaleIndex = hybridQualifiers.indexOf(CollectionObjectBotGardenConstants.HYBRID_QUALIFIER_FEMALE_VALUE);
+ int maleIndex = hybridQualifiers.indexOf(CollectionObjectBotGardenConstants.HYBRID_QUALIFIER_MALE_VALUE);
+
+ if (femaleIndex >= 0) {
+ name.setFemaleParentName(getDisplayNameFromRefName(hybridParents.get(femaleIndex)));
+ }
+
+ if (maleIndex >= 0) {
+ name.setMaleParentName(getDisplayNameFromRefName(hybridParents.get(maleIndex)));
+ }
+ }
+ }
+
+ return name;
+ }
+
+ public String formatVoucherName(VoucherName name) {
+ String formattedName = "";
+
+ if (name != null) {
+ if (name.isHybrid()) {
+ if (name.getFemaleParentName() != null) {
+ formattedName += taxonFormatter.format(name.getFemaleParentName());
+ }
+
+ formattedName += HYBRID_SEPARATOR;
+
+ if (name.getMaleParentName() != null) {
+ formattedName += taxonFormatter.format(name.getMaleParentName());
+ }
+ }
+ else {
+ if (name.getName() != null) {
+ formattedName = taxonFormatter.format(name.getName());
+ }
+ }
+ }
+
+ return formattedName;
+ }
+
+ private void setStyledName(String loanoutCsid, String styledName) throws URISyntaxException {
+ final String updatePayload =
+ "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" +
+ "<document name=\"loansout\">" +
+ "<ns2:loansout_botgarden xmlns:ns2=\"http://collectionspace.org/services/loanout/local/botgarden\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">" +
+ getFieldXml("styledName", styledName) +
+ "</ns2:loansout_botgarden>" +
+ "</document>";
+
+ NuxeoBasedResource resource = (NuxeoBasedResource) getResourceMap().get(LoanoutClient.SERVICE_NAME);
+ resource.update(getResourceMap(), createUriInfo(), loanoutCsid, updatePayload);
+ }
+
+ private UriInfo createLabelRequestSearchUriInfo() throws URISyntaxException {
+ return createKeywordSearchUriInfo(LoanoutBotGardenConstants.LABEL_REQUESTED_SCHEMA_NAME, LoanoutBotGardenConstants.LABEL_REQUESTED_FIELD_NAME,
+ LoanoutBotGardenConstants.LABEL_REQUESTED_YES_VALUE);
+ }
+
+ public class VoucherName {
+ private boolean isHybrid = false;
+ private String name;
+ private String femaleParentName;
+ private String maleParentName;
+
+ public boolean isHybrid() {
+ return isHybrid;
+ }
+
+ public void setHybrid(boolean isHybrid) {
+ this.isHybrid = isHybrid;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getFemaleParentName() {
+ return femaleParentName;
+ }
+
+ public void setFemaleParentName(String femaleParentName) {
+ this.femaleParentName = femaleParentName;
+ }
+
+ public String getMaleParentName() {
+ return maleParentName;
+ }
+
+ public void setMaleParentName(String maleParentName) {
+ this.maleParentName = maleParentName;
+ }
+ }
+}
--- /dev/null
+package org.collectionspace.services.batch.nuxeo;
+
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.collectionspace.services.client.PayloadOutputPart;
+import org.collectionspace.services.client.PoxPayloadOut;
+import org.collectionspace.services.client.RelationClient;
+import org.collectionspace.services.client.workflow.WorkflowClient;
+import org.collectionspace.services.common.NuxeoBasedResource;
+import org.collectionspace.services.common.api.RefNameUtils;
+import org.collectionspace.services.common.api.RefNameUtils.AuthorityTermInfo;
+import org.collectionspace.services.common.authorityref.AuthorityRefDocList;
+import org.collectionspace.services.common.invocable.InvocationContext.Params.Param;
+import org.collectionspace.services.common.invocable.InvocationResults;
+import org.collectionspace.services.common.relation.RelationResource;
+import org.collectionspace.services.common.vocabulary.AuthorityResource;
+import org.collectionspace.services.relation.RelationsCommonList;
+import org.collectionspace.services.relation.RelationsCommonList.RelationListItem;
+import org.dom4j.Document;
+import org.dom4j.DocumentException;
+import org.dom4j.DocumentHelper;
+import org.dom4j.Element;
+import org.dom4j.Node;
+import org.nuxeo.common.utils.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A batch job that merges authority items. The single and list contexts are
+ * supported.
+ *
+ * The merge target is a record into which one or more source records will be
+ * merged. A merge source is a record that will be merged into the target, as
+ * follows: Each term in a source record is added to the target as a non-
+ * preferred term, if that term does not already exist in the target. If a term
+ * in the source already exists in the target, each non-blank term field is
+ * copied to the target, if that field is empty in the target. If the field is
+ * non-empty in the target, and differs from the source field, a warning is
+ * emitted and no action is taken. If a source is successfully merged into the
+ * target, all references to the source are transferred to the target, and the
+ * source record is soft-deleted.
+ *
+ * The context (singleCSID or listCSIDs of the batch invocation payload
+ * specifies the source record(s).
+ *
+ * The following parameters are allowed:
+ *
+ * targetCSID: The csid of the target record. Only one target may be supplied.
+ *
+ * @author ray
+ */
+public class MergeAuthorityItemsBatchJob extends AbstractBatchJob {
+ final Logger logger = LoggerFactory.getLogger(MergeAuthorityItemsBatchJob.class);
+
+ public MergeAuthorityItemsBatchJob() {
+ setSupportedInvocationModes(Arrays.asList(INVOCATION_MODE_SINGLE, INVOCATION_MODE_LIST));
+ }
+
+ @Override
+ public void run() {
+ setCompletionStatus(STATUS_MIN_PROGRESS);
+
+ try {
+ String docType = null;
+ String targetCsid = null;
+ List<String> sourceCsids = new ArrayList<String>();
+
+ for (Param param : this.getParams()) {
+ String key = param.getKey();
+
+ // I don't want this batch job to appear in the UI, since it won't run successfully without parameters.
+ // That means it can't be registered with any docType. But if the invocation payload contains a docType,
+ // it will be checked against the null registered docType, and will fail. So docType should be passed as a
+ // parameter instead.
+
+ if (key.equals("docType")) {
+ docType = param.getValue();
+ }
+ else if (key.equals("targetCSID")) {
+ targetCsid = param.getValue();
+ }
+ else if (key.equals("sourceCSID")) {
+ sourceCsids.add(param.getValue());
+ }
+ }
+
+ if (docType == null || docType.equals("")) {
+ throw new Exception("a docType must be supplied");
+ }
+
+ if (targetCsid == null || targetCsid.equals("")) {
+ throw new Exception("a target csid parameter (targetCSID) must be supplied");
+ }
+
+ if (sourceCsids.size() == 0) {
+ throw new Exception("a source csid must be supplied");
+ }
+
+ InvocationResults results = merge(docType, targetCsid, sourceCsids);
+
+ setResults(results);
+ setCompletionStatus(STATUS_COMPLETE);
+ }
+ catch (Exception e) {
+ setCompletionStatus(STATUS_ERROR);
+ setErrorInfo(new InvocationError(INT_ERROR_STATUS, e.getMessage()));
+ }
+ }
+
+ public InvocationResults merge(String docType, String targetCsid, String sourceCsid) throws URISyntaxException, DocumentException {
+ return merge(docType, targetCsid, Arrays.asList(sourceCsid));
+ }
+
+ public InvocationResults merge(String docType, String targetCsid, List<String> sourceCsids) throws URISyntaxException, DocumentException {
+ logger.debug("Merging docType=" + docType + " targetCsid=" + targetCsid + " sourceCsids=" + StringUtils.join(sourceCsids, ","));
+
+ String serviceName = getAuthorityServiceNameForDocType(docType);
+
+ PoxPayloadOut targetItemPayload = findAuthorityItemByCsid(serviceName, targetCsid);
+ List<PoxPayloadOut> sourceItemPayloads = new ArrayList<PoxPayloadOut>();
+
+ for (String sourceCsid : sourceCsids) {
+ sourceItemPayloads.add(findAuthorityItemByCsid(serviceName, sourceCsid));
+ }
+
+ return merge(docType, targetItemPayload, sourceItemPayloads);
+ }
+
+ private InvocationResults merge(String docType, PoxPayloadOut targetItemPayload, List<PoxPayloadOut> sourceItemPayloads) throws URISyntaxException, DocumentException {
+ int numAffected = 0;
+ List<String> userNotes = new ArrayList<String>();
+
+ Element targetTermGroupListElement = getTermGroupListElement(targetItemPayload);
+ Element mergedTermGroupListElement = targetTermGroupListElement.createCopy();
+
+ String targetCsid = getCsid(targetItemPayload);
+ String targetRefName = getRefName(targetItemPayload);
+ String inAuthority = getFieldValue(targetItemPayload, "inAuthority");
+
+ logger.debug("Merging term groups");
+
+ for (PoxPayloadOut sourceItemPayload : sourceItemPayloads) {
+ String sourceCsid = getCsid(sourceItemPayload);
+ Element sourceTermGroupListElement = getTermGroupListElement(sourceItemPayload);
+
+ logger.debug("Merging term groups from source " + sourceCsid + " into target " + targetCsid);
+
+ try {
+ mergeTermGroupLists(mergedTermGroupListElement, sourceTermGroupListElement);
+ }
+ catch(RuntimeException e) {
+ throw new RuntimeException("Error merging source record " + sourceCsid + " into target record " + targetCsid + ": " + e.getMessage(), e);
+ }
+ }
+
+ logger.debug("Updating target: docType=" + docType + " inAuthority=" + inAuthority + " targetCsid=" + targetCsid);
+
+ updateAuthorityItem(docType, inAuthority, targetCsid, getUpdatePayload(targetTermGroupListElement, mergedTermGroupListElement));
+
+ userNotes.add("The target record with CSID " + targetCsid + " (" + targetRefName + ") was updated.");
+ numAffected++;
+
+ String serviceName = getAuthorityServiceNameForDocType(docType);
+
+ logger.debug("Updating references");
+
+ for (PoxPayloadOut sourceItemPayload : sourceItemPayloads) {
+ String sourceCsid = getCsid(sourceItemPayload);
+ String sourceRefName = getRefName(sourceItemPayload);
+
+ InvocationResults results = updateReferences(serviceName, inAuthority, sourceCsid, sourceRefName, targetRefName);
+
+ userNotes.add(results.getUserNote());
+ numAffected += results.getNumAffected();
+ }
+
+ logger.debug("Deleting source items");
+
+ for (PoxPayloadOut sourceItemPayload : sourceItemPayloads) {
+ String sourceCsid = getCsid(sourceItemPayload);
+ String sourceRefName = getRefName(sourceItemPayload);
+
+ InvocationResults results = deleteAuthorityItem(docType, getFieldValue(sourceItemPayload, "inAuthority"), sourceCsid);
+
+ userNotes.add(results.getUserNote());
+ numAffected += results.getNumAffected();
+ }
+
+ InvocationResults results = new InvocationResults();
+ results.setNumAffected(numAffected);
+ results.setUserNote(StringUtils.join(userNotes, "\n"));
+
+ return results;
+ }
+
+ private InvocationResults updateReferences(String serviceName, String inAuthority, String sourceCsid, String sourceRefName, String targetRefName) throws URISyntaxException, DocumentException {
+ logger.debug("Updating references: serviceName=" + serviceName + " inAuthority=" + inAuthority + " sourceCsid=" + sourceCsid + " sourceRefName=" + sourceRefName + " targetRefName=" + targetRefName);
+
+ int pageNum = 0;
+ int pageSize = 100;
+ List<AuthorityRefDocList.AuthorityRefDocItem> items;
+
+ int loopCount = 0;
+ int numUpdated = 0;
+
+ logger.debug("Looping with pageSize=" + pageSize);
+
+ do {
+ loopCount++;
+
+ // The pageNum/pageSize parameters don't work properly for refobj requests!
+ // It should be safe to repeatedly fetch page 0 for a large-ish page size,
+ // and update that page, until no references are left.
+
+ items = findReferencingFields(serviceName, inAuthority, sourceCsid, null, pageNum, pageSize);
+ Map<String, ReferencingRecord> referencingRecordsByCsid = new LinkedHashMap<String, ReferencingRecord>();
+
+ logger.debug("Loop " + loopCount + ": " + items.size() + " items found");
+
+ for (AuthorityRefDocList.AuthorityRefDocItem item : items) {
+ // If a record contains a reference to the record multiple times, multiple items are returned,
+ // but only the first has a non-null workflow state. A bug?
+
+ String itemCsid = item.getDocId();
+ ReferencingRecord record = referencingRecordsByCsid.get(itemCsid);
+
+ if (record == null) {
+ if (item.getWorkflowState() != null && !item.getWorkflowState().equals(WorkflowClient.WORKFLOWSTATE_DELETED)) {
+ record = new ReferencingRecord(item.getUri());
+ referencingRecordsByCsid.put(itemCsid, record);
+ }
+ }
+
+ if (record != null) {
+ String[] sourceFieldElements = item.getSourceField().split(":");
+ String partName = sourceFieldElements[0];
+ String fieldName = sourceFieldElements[1];
+
+ Map<String, Set<String>> fields = record.getFields();
+ Set<String> fieldsInPart = fields.get(partName);
+
+ if (fieldsInPart == null) {
+ fieldsInPart = new HashSet<String>();
+ fields.put(partName, fieldsInPart);
+ }
+
+ fieldsInPart.add(fieldName);
+ }
+ }
+
+ List<ReferencingRecord> referencingRecords = new ArrayList<ReferencingRecord>(referencingRecordsByCsid.values());
+
+ logger.debug("Loop " + loopCount + ": updating " + referencingRecords.size() + " records");
+
+ for (ReferencingRecord record : referencingRecords) {
+ InvocationResults results = updateReferencingRecord(record, sourceRefName, targetRefName);
+ numUpdated += results.getNumAffected();
+ }
+ }
+ while (items.size() > 0);
+
+ InvocationResults results = new InvocationResults();
+ results.setNumAffected(numUpdated);
+ results.setUserNote(numUpdated > 0 ?
+ numUpdated + " records that referenced the source record with CSID " + sourceCsid + " were updated." :
+ "No records referenced the source record with CSID " + sourceCsid + ".");
+
+ return results;
+ }
+
+ private InvocationResults updateReferencingRecord(ReferencingRecord record, String fromRefName, String toRefName) throws URISyntaxException, DocumentException {
+ String fromRefNameStem = RefNameUtils.stripAuthorityTermDisplayName(fromRefName);
+ // String toRefNameStem = RefNameUtils.stripAuthorityTermDisplayName(toRefName);
+
+ logger.debug("Updating references: record.uri=" + record.getUri() + " fromRefName=" + fromRefName + " toRefName=" + toRefName);
+
+ Map<String, Set<String>> fields = record.getFields();
+
+ PoxPayloadOut recordPayload = findByUri(record.getUri());
+ Document recordDocument = recordPayload.getDOMDocument();
+ Document newDocument = (Document) recordDocument.clone();
+ Element rootElement = newDocument.getRootElement();
+
+ for (Element partElement : (List<Element>) rootElement.elements()) {
+ String partName = partElement.getName();
+
+ if (fields.containsKey(partName)) {
+ for (String fieldName : fields.get(partName)) {
+ List<Node> nodes = partElement.selectNodes("descendant::" + fieldName);
+
+ for (Node node : nodes) {
+ String text = node.getText();
+ String refNameStem = null;
+
+ try {
+ refNameStem = RefNameUtils.stripAuthorityTermDisplayName(text);
+ }
+ catch(IllegalArgumentException e) {}
+
+ if (refNameStem != null && refNameStem.equals(fromRefNameStem)) {
+ AuthorityTermInfo termInfo = RefNameUtils.parseAuthorityTermInfo(text);
+ // String newRefName = toRefNameStem + "'" + termInfo.displayName + "'";
+ String newRefName = toRefName;
+
+ node.setText(newRefName);
+ }
+ }
+ }
+ }
+ else {
+ rootElement.remove(partElement);
+ }
+ }
+
+ String payload = newDocument.asXML();
+
+ return updateUri(record.getUri(), payload);
+ }
+
+ private InvocationResults updateUri(String uri, String payload) throws URISyntaxException {
+ String[] uriParts = uri.split("/");
+
+ if (uriParts.length == 3) {
+ String serviceName = uriParts[1];
+ String csid = uriParts[2];
+
+ NuxeoBasedResource resource = (NuxeoBasedResource) getResourceMap().get(serviceName);
+
+ resource.update(getResourceMap(), createUriInfo(), csid, payload);
+ }
+ else if (uriParts.length == 5) {
+ String serviceName = uriParts[1];
+ String vocabularyCsid = uriParts[2];
+ String items = uriParts[3];
+ String csid = uriParts[4];
+
+ if (items.equals("items")) {
+ AuthorityResource<?, ?> resource = (AuthorityResource<?, ?>) getResourceMap().get(serviceName);
+
+ resource.updateAuthorityItem(getResourceMap(), createUriInfo(), vocabularyCsid, csid, payload);
+ }
+ }
+ else {
+ throw new IllegalArgumentException("Invalid uri " + uri);
+ }
+
+ logger.debug("Updated referencing record " + uri);
+
+ InvocationResults results = new InvocationResults();
+ results.setNumAffected(1);
+ results.setUserNote("Updated referencing record " + uri);
+
+ return results;
+ }
+
+ private void updateAuthorityItem(String docType, String inAuthority, String csid, String payload) throws URISyntaxException {
+ String serviceName = getAuthorityServiceNameForDocType(docType);
+ AuthorityResource<?, ?> resource = (AuthorityResource<?, ?>) getResourceMap().get(serviceName);
+
+ resource.updateAuthorityItem(getResourceMap(), createUriInfo(), inAuthority, csid, payload);
+ }
+
+ private InvocationResults deleteAuthorityItem(String docType, String inAuthority, String csid) throws URISyntaxException {
+ int numAffected = 0;
+ List<String> userNotes = new ArrayList<String>();
+
+ // If the item is the broader context of any items, warn and do nothing.
+
+ List<String> narrowerItemCsids = findNarrower(csid);
+
+ if (narrowerItemCsids.size() > 0) {
+ logger.debug("Item " + csid + " has narrower items -- not deleting");
+
+ userNotes.add("The source record with CSID " + csid + " was not deleted because it has narrower context items.");
+ }
+ else {
+ // If the item has a broader context, delete the relation.
+
+ List<String> relationCsids = new ArrayList<String>();
+
+ for (RelationsCommonList.RelationListItem item : findRelated(csid, null, "hasBroader", null, null)) {
+ relationCsids.add(item.getCsid());
+ }
+
+ if (relationCsids.size() > 0) {
+ RelationResource relationResource = (RelationResource) getResourceMap().get(RelationClient.SERVICE_NAME);
+
+ for (String relationCsid : relationCsids) {
+ logger.debug("Deleting hasBroader relation " + relationCsid);
+
+ relationResource.delete(relationCsid);
+
+ userNotes.add("The broader relation with CSID " + relationCsid + " was deleted.");
+ numAffected++;
+ }
+ }
+
+ String serviceName = getAuthorityServiceNameForDocType(docType);
+ AuthorityResource<?, ?> resource = (AuthorityResource<?, ?>) getResourceMap().get(serviceName);
+
+ logger.debug("Soft deleting: docType=" + docType + " inAuthority=" + inAuthority + " csid=" + csid);
+
+ resource.updateItemWorkflowWithTransition(null, inAuthority, csid, "delete");
+
+ userNotes.add("The source record with CSID " + csid + " was soft deleted.");
+ numAffected++;
+ }
+
+ InvocationResults results = new InvocationResults();
+ results.setNumAffected(numAffected);
+ results.setUserNote(StringUtils.join(userNotes, "\n"));
+
+ return results;
+ }
+
+ /**
+ * @param Returns a map of the term groups in term group list, keyed by display name.
+ * If multiple groups have the same display name, an exception is thrown.
+ * @return The term groups.
+ */
+ private Map<String, Element> getTermGroups(Element termGroupListElement) {
+ Map<String, Element> termGroups = new LinkedHashMap<String, Element>();
+ Iterator<Element> childIterator = termGroupListElement.elementIterator();
+
+ while (childIterator.hasNext()) {
+ Element termGroupElement = childIterator.next();
+ String displayName = getDisplayName(termGroupElement);
+
+ if (termGroups.containsKey(displayName)) {
+ // Two term groups in the same item have identical display names.
+
+ throw new RuntimeException("multiple terms have display name \"" + displayName + "\"");
+ }
+ else {
+ termGroups.put(displayName, termGroupElement);
+ }
+ }
+
+ return termGroups;
+ }
+
+ private String getDisplayName(Element termGroupElement) {
+ Node displayNameNode = termGroupElement.selectSingleNode("termDisplayName");
+ String displayName = (displayNameNode == null) ? "" : displayNameNode.getText();
+
+ return displayName;
+ }
+
+ private Element getTermGroupListElement(PoxPayloadOut itemPayload) {
+ Element termGroupListElement = null;
+ Element commonPartElement = findCommonPartElement(itemPayload);
+
+ if (commonPartElement != null) {
+ termGroupListElement = findTermGroupListElement(commonPartElement);
+ }
+
+ return termGroupListElement;
+ }
+
+ private Element findCommonPartElement(PoxPayloadOut itemPayload) {
+ Element commonPartElement = null;
+
+ for (PayloadOutputPart candidatePart : itemPayload.getParts()) {
+ Element candidatePartElement = candidatePart.asElement();
+
+ if (candidatePartElement.getName().endsWith("_common")) {
+ commonPartElement = candidatePartElement;
+ break;
+ }
+ }
+
+ return commonPartElement;
+ }
+
+ private Element findTermGroupListElement(Element contextElement) {
+ Element termGroupListElement = null;
+ Iterator<Element> childIterator = contextElement.elementIterator();
+
+ while (childIterator.hasNext()) {
+ Element candidateElement = childIterator.next();
+
+ if (candidateElement.getName().endsWith("TermGroupList")) {
+ termGroupListElement = candidateElement;
+ break;
+ }
+ }
+
+ return termGroupListElement;
+ }
+
+ private void mergeTermGroupLists(Element targetTermGroupListElement, Element sourceTermGroupListElement) {
+ Map<String, Element> sourceTermGroups;
+
+ try {
+ sourceTermGroups = getTermGroups(sourceTermGroupListElement);
+ }
+ catch(RuntimeException e) {
+ throw new RuntimeException("a problem was found in the source record: " + e.getMessage(), e);
+ }
+
+ for (Element targetTermGroupElement : (List<Element>) targetTermGroupListElement.elements()) {
+ String displayName = getDisplayName(targetTermGroupElement);
+
+ if (sourceTermGroups.containsKey(displayName)) {
+ logger.debug("Merging in existing term \"" + displayName + "\"");
+
+ try {
+ mergeTermGroups(targetTermGroupElement, sourceTermGroups.get(displayName));
+ }
+ catch(RuntimeException e) {
+ throw new RuntimeException("could not merge term groups with display name \"" + displayName + "\": " + e.getMessage(), e);
+ }
+
+ sourceTermGroups.remove(displayName);
+ }
+ }
+
+ for (Element sourceTermGroupElement : sourceTermGroups.values()) {
+ logger.debug("Adding new term \"" + getDisplayName(sourceTermGroupElement) + "\"");
+
+ targetTermGroupListElement.add(sourceTermGroupElement.createCopy());
+ }
+ }
+
+ private void mergeTermGroups(Element targetTermGroupElement, Element sourceTermGroupElement) {
+ // This function assumes there are no nested repeating groups.
+
+ for (Element sourceChildElement : (List<Element>) sourceTermGroupElement.elements()) {
+ String sourceValue = sourceChildElement.getText();
+
+ if (sourceValue == null) {
+ sourceValue = "";
+ }
+
+ if (sourceValue.length() > 0) {
+ String name = sourceChildElement.getName();
+ Element targetChildElement = targetTermGroupElement.element(name);
+
+ if (targetChildElement == null) {
+ targetTermGroupElement.add(sourceChildElement.createCopy());
+ }
+ else {
+ String targetValue = targetChildElement.getText();
+
+ if (targetValue == null) {
+ targetValue = "";
+ }
+
+ if (!targetValue.equals(sourceValue)) {
+ if (targetValue.length() > 0) {
+ throw new RuntimeException("merge conflict in field " + name + ": source value \"" + sourceValue + "\" differs from target value \"" + targetValue +"\"");
+ }
+
+ targetTermGroupElement.remove(targetChildElement);
+ targetTermGroupElement.add(sourceChildElement.createCopy());
+ }
+ }
+ }
+ }
+ }
+
+ private String getUpdatePayload(Element originalTermGroupListElement, Element updatedTermGroupListElement) {
+ List<Element> parents = new ArrayList<Element>();
+
+ for (Element e = originalTermGroupListElement; e != null; e = e.getParent()) {
+ parents.add(e);
+ }
+
+ Collections.reverse(parents);
+
+ // Remove the original termGroupList element
+ parents.remove(parents.size() - 1);
+
+ // Remove the root
+ Element rootElement = parents.remove(0);
+
+ // Copy the root to a new document
+ Document document = DocumentHelper.createDocument(copyElement(rootElement));
+ Element current = document.getRootElement();
+
+ // Copy the remaining parents
+ for (Element parent : parents) {
+ Element parentCopy = copyElement(parent);
+
+ current.add(parentCopy);
+ current = parentCopy;
+ }
+
+ // Add the updated termGroupList element
+
+ current.add(updatedTermGroupListElement);
+
+ String payload = document.asXML();
+
+ return payload;
+ }
+
+ private Element copyElement(Element element) {
+ Element copy = DocumentHelper.createElement(element.getQName());
+ copy.appendAttributes(element);
+
+ return copy;
+ }
+
+ private class ReferencingRecord {
+ private String uri;
+ private Map<String, Set<String>> fields;
+
+ public ReferencingRecord(String uri) {
+ this.uri = uri;
+ this.fields = new HashMap<String, Set<String>>();
+ }
+
+ public String getUri() {
+ return uri;
+ }
+
+ public void setUri(String uri) {
+ this.uri = uri;
+ }
+
+ public Map<String, Set<String>> getFields() {
+ return fields;
+ }
+ }
+}
\ No newline at end of file
--- /dev/null
+/*
+ * This file contains code from Florent Guillame's nuxeo-reindex-fulltext module.
+ *
+ */
+
+package org.collectionspace.services.batch.nuxeo;
+
+import java.io.File;
+import java.io.Serializable;
+import java.lang.reflect.Field;
+import java.security.Principal;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.commons.lang.StringUtils;
+import org.collectionspace.services.client.PoxPayloadIn;
+import org.collectionspace.services.client.PoxPayloadOut;
+import org.collectionspace.services.common.CollectionSpaceResource;
+import org.collectionspace.services.common.NuxeoBasedResource;
+import org.collectionspace.services.common.StoredValuesUriTemplate;
+import org.collectionspace.services.common.UriTemplateFactory;
+import org.collectionspace.services.common.UriTemplateRegistryKey;
+import org.collectionspace.services.common.invocable.InvocationContext.ListCSIDs;
+import org.collectionspace.services.common.invocable.InvocationContext.Params.Param;
+import org.collectionspace.services.common.invocable.InvocationResults;
+import org.collectionspace.services.common.vocabulary.AuthorityResource;
+import org.collectionspace.services.nuxeo.util.ReindexFulltextRoot.ReindexInfo;
+import org.nuxeo.ecm.core.api.AbstractSession;
+import org.nuxeo.ecm.core.api.ClientException;
+import org.nuxeo.ecm.core.api.CoreSession;
+import org.nuxeo.ecm.core.api.IterableQueryResult;
+import org.nuxeo.ecm.core.api.NuxeoException;
+import org.nuxeo.ecm.core.api.NuxeoPrincipal;
+import org.nuxeo.ecm.core.event.EventService;
+import org.nuxeo.ecm.core.query.QueryFilter;
+import org.nuxeo.ecm.core.query.sql.NXQL;
+import org.nuxeo.ecm.core.storage.FulltextConfiguration;
+import org.nuxeo.ecm.core.storage.sql.Model;
+import org.nuxeo.ecm.core.storage.sql.Node;
+import org.nuxeo.ecm.core.storage.sql.Session;
+import org.nuxeo.ecm.core.storage.sql.SimpleProperty;
+import org.nuxeo.ecm.core.storage.sql.coremodel.SQLFulltextExtractorWork;
+import org.nuxeo.ecm.core.storage.sql.coremodel.SQLSession;
+import org.nuxeo.ecm.core.work.api.Work;
+import org.nuxeo.ecm.core.work.api.WorkManager;
+import org.nuxeo.ecm.core.work.api.WorkManager.Scheduling;
+import org.nuxeo.runtime.api.Framework;
+import org.nuxeo.runtime.transaction.TransactionHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class ReindexFullTextBatchJob extends AbstractBatchJob {
+ final Logger log = LoggerFactory.getLogger(ReindexFullTextBatchJob.class);
+
+ public static final String DC_TITLE = "dc:title";
+ public static final int DEFAULT_BATCH_SIZE = 1000;
+ public static final int DEFAULT_START_BATCH = 0;
+ public static final int DEFAULT_END_BATCH = 0;
+ public static final int DEFAULT_BATCH_PAUSE = 0;
+ public static final String BATCH_STOP_FILE = "stopBatch";
+ public static final String DOCTYPE_STOP_FILE = "stopDocType";
+
+ private int batchSize = DEFAULT_BATCH_SIZE;
+ private int batchPause = DEFAULT_BATCH_PAUSE;
+ private int startBatch = DEFAULT_START_BATCH;
+ private int endBatch = DEFAULT_END_BATCH;
+ private int numAffected = 0;
+
+ private String stopFileDirectory;
+
+ private CoreSession coreSession;
+ private Session session = null;
+ protected FulltextConfiguration fulltextConfiguration;
+
+ private Map<String, NuxeoBasedResource> resourcesByDocType;
+
+ public ReindexFullTextBatchJob() {
+ setSupportedInvocationModes(Arrays.asList(INVOCATION_MODE_NO_CONTEXT, INVOCATION_MODE_SINGLE, INVOCATION_MODE_LIST));
+
+ stopFileDirectory = System.getProperty("java.io.tmpdir") + File.separator + ReindexFullTextBatchJob.class.getName();
+
+ log.debug("stop file directory is " + stopFileDirectory);
+ }
+
+ @Override
+ public void run() {
+ setCompletionStatus(STATUS_MIN_PROGRESS);
+
+ numAffected = 0;
+
+ // This is needed so that resource calls (which start transactions)
+ // will work. Otherwise, a javax.transaction.NotSupportedException
+ // ("Nested transactions are not supported") is thrown.
+
+ boolean isTransactionActive = TransactionHelper.isTransactionActive();
+
+ if (isTransactionActive) {
+ TransactionHelper.commitOrRollbackTransaction();
+ }
+
+ try {
+ coreSession = getRepoSession().getCoreSession();
+
+ if (requestIsForInvocationModeSingle()) {
+ String csid = getInvocationContext().getSingleCSID();
+
+ if (csid == null) {
+ throw new Exception("No singleCSID was supplied in invocation context.");
+ }
+
+ String docType = getInvocationContext().getDocType();
+
+ if (StringUtils.isEmpty(docType)) {
+ throw new Exception("No docType was supplied in invocation context.");
+ }
+
+ log.debug("Reindexing " + docType + " record with csid: " + csid);
+
+ reindexDocument(docType, csid);
+ }
+ else if (requestIsForInvocationModeList()) {
+ ListCSIDs list = getInvocationContext().getListCSIDs();
+ List<String> csids = list.getCsid();
+
+ if (csids == null || csids.size() == 0) {
+ throw new Exception("no listCSIDs were supplied");
+ }
+
+ String docType = getInvocationContext().getDocType();
+
+ if (StringUtils.isEmpty(docType)) {
+ throw new Exception("No docType was supplied in invocation context.");
+ }
+
+ log.debug("Reindexing " + csids.size() + " " + docType + " records with csids: " + csids.get(0) + ", ...");
+
+ if (log.isTraceEnabled()) {
+ log.trace(StringUtils.join(csids, ", "));
+ }
+
+ reindexDocuments(docType, csids);
+ }
+ else if (requestIsForInvocationModeNoContext()) {
+ Set<String> docTypes = new LinkedHashSet<String>();
+ String docType;
+
+ docType = getInvocationContext().getDocType();
+
+ if (StringUtils.isNotEmpty(docType)) {
+ docTypes.add(docType);
+ }
+
+ // Read batch size, start and end batches, pause, and additional doctypes from params.
+
+ for (Param param : this.getParams()) {
+ if (param.getKey().equals("batchSize")) {
+ batchSize = Integer.parseInt(param.getValue());
+ }
+ else if (param.getKey().equals("startBatch")) {
+ startBatch = Integer.parseInt(param.getValue());
+ }
+ else if (param.getKey().equals("endBatch")) {
+ endBatch = Integer.parseInt(param.getValue());
+ }
+ else if (param.getKey().equals("batchPause")) {
+ batchPause = Integer.parseInt(param.getValue());
+ }
+ else if (param.getKey().equals("docType")) {
+ docType = param.getValue();
+
+ if (StringUtils.isNotEmpty(docType)) {
+ docTypes.add(docType);
+ }
+ }
+ }
+
+ initResourceMap();
+ reindexDocuments(docTypes);
+ }
+
+ log.debug("reindexing complete");
+
+ InvocationResults results = new InvocationResults();
+ results.setNumAffected(numAffected);
+ results.setUserNote("reindexed " + numAffected + " records");
+
+ setResults(results);
+ setCompletionStatus(STATUS_COMPLETE);
+ }
+ catch(StoppedException e) {
+ log.debug("reindexing terminated by stop file");
+
+ InvocationResults results = new InvocationResults();
+ results.setNumAffected(numAffected);
+ results.setUserNote("reindexing terminated by stop file");
+
+ setResults(results);
+ setCompletionStatus(STATUS_COMPLETE);
+ }
+ catch(Exception e) {
+ setErrorResult(e.getMessage());
+ }
+ finally {
+ // This is needed so that when the session is released after this
+ // batch job exits (in BatchDocumentModelHandler), there isn't an exception.
+ // Otherwise, a "Session invoked in a container without a transaction active"
+ // error is thrown from RepositoryJavaClientImpl.releaseRepositorySession.
+
+ if (isTransactionActive) {
+ TransactionHelper.startTransaction();
+ }
+ }
+ }
+
+ private void initResourceMap() {
+ resourcesByDocType = new HashMap<String, NuxeoBasedResource>();
+
+ for (CollectionSpaceResource<PoxPayloadIn, PoxPayloadOut> resource : getResourceMap().values()) {
+ Map<UriTemplateRegistryKey, StoredValuesUriTemplate> entries = resource.getUriRegistryEntries();
+
+ for (UriTemplateRegistryKey key : entries.keySet()) {
+ String docType = key.getDocType();
+ String tenantId = key.getTenantId();
+
+ if (getTenantId().equals(tenantId)) {
+ if (resourcesByDocType.containsKey(docType)) {
+ log.warn("multiple resources found for docType " + docType);
+
+ NuxeoBasedResource currentResource = resourcesByDocType.get(docType);
+ NuxeoBasedResource candidateResource = (NuxeoBasedResource) resource;
+
+ // Favor the resource that isn't an AuthorityResource. This
+ // is really just to deal with Contacts, which are handled
+ // by ContactResource, PersonAuthorityResource, and
+ // OrgAuthorityResource. We want to use ContactResource.
+
+ if (!(candidateResource instanceof AuthorityResource) && (currentResource instanceof AuthorityResource)) {
+ resourcesByDocType.put(docType, candidateResource);
+ }
+
+ log.warn("using " + resourcesByDocType.get(docType));
+ }
+ else {
+ resourcesByDocType.put(docType, (NuxeoBasedResource) resource);
+ }
+ }
+ }
+ }
+ }
+
+ private void reindexDocuments(Set<String> docTypes) throws Exception {
+ if (docTypes == null) {
+ docTypes = new LinkedHashSet<String>();
+ }
+
+ // If no types are specified, do them all.
+
+ if (docTypes.size() == 0) {
+ docTypes.addAll(getAllDocTypes());
+ }
+
+ for (String docType : docTypes) {
+ reindexDocuments(docType);
+ }
+ }
+
+ private List<String> getAllDocTypes() {
+ List<String> docTypes = new ArrayList<String>(resourcesByDocType.keySet());
+ Collections.sort(docTypes);
+
+ log.debug("Call to getAllDocTypes() method found: " + StringUtils.join(docTypes, ", "));
+
+ return docTypes;
+ }
+
+ private void reindexDocuments(String docType) throws Exception {
+ // Check for a stop file before reindexing the docType.
+
+ if (batchStopFileExists() || docTypeStopFileExists()) {
+ throw new StoppedException();
+ }
+
+ log.debug("reindexing docType " + docType);
+
+ NuxeoBasedResource resource = resourcesByDocType.get(docType);
+
+ if (resource == null) {
+ log.warn("No service resource found for docType " + docType);
+ }
+
+ boolean isAuthorityItem = false;
+
+ if (resource instanceof AuthorityResource) {
+ UriTemplateRegistryKey key = new UriTemplateRegistryKey(getTenantId(), docType);
+ StoredValuesUriTemplate uriTemplate = resource.getUriRegistryEntries().get(key);
+
+ log.debug("uriTemplateType=" + uriTemplate.getUriTemplateType());
+
+ if (uriTemplate.getUriTemplateType() == UriTemplateFactory.ITEM) {
+ isAuthorityItem = true;
+ }
+ }
+
+ int pageSize = batchSize;
+
+ // The supplied start and end batch numbers start with 1, but the page number starts with 0.
+ int startPage = (startBatch > 0) ? startBatch - 1 : 0;
+ int endPage = (endBatch > 0) ? endBatch - 1 : Integer.MAX_VALUE;
+
+ if (isAuthorityItem) {
+ List<String> vocabularyCsids = getVocabularyCsids((AuthorityResource<?, ?>) resource);
+
+ for (String vocabularyCsid : vocabularyCsids) {
+ int pageNum = startPage;
+ List<String> csids = null;
+
+ log.debug("Reindexing vocabulary of " + docType + " with csid " + vocabularyCsid);
+
+ do {
+ // Check for a stop file before reindexing the batch.
+
+ if (batchStopFileExists()) {
+ throw new StoppedException();
+ }
+
+ csids = findAllAuthorityItems((AuthorityResource<?, ?>) resource, vocabularyCsid, pageSize, pageNum, "collectionspace_core:createdAt, ecm:name");
+
+ if (csids.size() > 0) {
+ log.debug("reindexing vocabulary of " + docType +" with csid " + vocabularyCsid + ", batch " + (pageNum + 1) + ": " + csids.size() + " records starting with " + csids.get(0));
+
+ // Pause for the configured amount of time.
+
+ if (batchPause > 0) {
+ log.trace("pausing " + batchPause + " ms");
+
+ Thread.sleep(batchPause);
+ }
+
+ reindexDocuments(docType, csids);
+ }
+
+ pageNum++;
+ }
+ while(csids.size() == pageSize && pageNum <= endPage);
+ }
+ } else {
+ int pageNum = startPage;
+ List<String> csids = null;
+
+ do {
+ // Check for a stop file before reindexing the batch.
+
+ if (batchStopFileExists()) {
+ throw new StoppedException();
+ }
+
+ csids = findAll(resource, pageSize, pageNum, "collectionspace_core:createdAt, ecm:name");
+
+ if (csids.size() > 0) {
+ log.debug("reindexing " + docType +" batch " + (pageNum + 1) + ": " + csids.size() + " records starting with " + csids.get(0));
+
+ // Pause for the configured amount of time.
+
+ if (batchPause > 0) {
+ log.trace("pausing " + batchPause + " ms");
+
+ Thread.sleep(batchPause);
+ }
+
+ reindexDocuments(docType, csids);
+ }
+
+ pageNum++;
+ }
+ while(csids.size() == pageSize && pageNum <= endPage);
+ }
+ }
+
+ private void reindexDocument(String docType, String csid) throws Exception {
+ reindexDocuments(docType, Arrays.asList(csid));
+ }
+
+ private void reindexDocuments(String docType, List<String> csids) throws Exception {
+ // Convert the csids to structs of nuxeo id and type, as expected
+ // by doBatch.
+
+ if (csids == null || csids.size() == 0) {
+ return;
+ }
+
+ getLowLevelSession();
+ List<ReindexInfo> infos = new ArrayList<ReindexInfo>();
+
+ String query = "SELECT ecm:uuid, ecm:primaryType FROM Document " +
+ "WHERE ecm:name IN (" + StringUtils.join(quoteList(csids), ',') + ") " +
+ "AND ecm:primaryType LIKE '" + docType + "%' " +
+ "AND ecm:isCheckedInVersion = 0 AND ecm:isProxy = 0";
+ IterableQueryResult result = session.queryAndFetch(query, NXQL.NXQL, QueryFilter.EMPTY);
+
+ try {
+ for (Map<String, Serializable> map : result) {
+ String id = (String) map.get(NXQL.ECM_UUID);
+ String type = (String) map.get(NXQL.ECM_PRIMARYTYPE);
+ infos.add(new ReindexInfo(id, type));
+ }
+ } finally {
+ result.close();
+ }
+
+ if (csids.size() != infos.size()) {
+ log.warn("didn't find info for all the supplied csids: expected " + csids.size() + ", found " + infos.size());
+ }
+
+ if (log.isTraceEnabled()) {
+ for (ReindexInfo info : infos) {
+ log.trace(info.type + " " + info.id);
+ }
+ }
+
+ doBatch(infos);
+
+ numAffected += infos.size();
+ }
+
+ private List<String> quoteList(List<String> values) {
+ List<String> quoted = new ArrayList<String>(values.size());
+
+ for (String value : values) {
+ quoted.add("'" + value + "'");
+ }
+
+ return quoted;
+ }
+
+ private boolean batchStopFileExists() {
+ return (stopFileDirectory != null && new File(stopFileDirectory + File.separator + BATCH_STOP_FILE).isFile());
+ }
+
+ private boolean docTypeStopFileExists() {
+ return (stopFileDirectory != null && new File(stopFileDirectory + File.separator + DOCTYPE_STOP_FILE).isFile());
+ }
+
+ private static class StoppedException extends Exception {
+ private static final long serialVersionUID = 8813189331855935939L;
+
+ public StoppedException() {
+
+ }
+ }
+
+ /*
+ * The code below this comment is copied from the nuxeo-reindex-fulltext
+ * module. The original copyright is below.
+ */
+
+ /*
+ * (C) Copyright 2012 Nuxeo SA (http://nuxeo.com/) and contributors.
+ *
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the GNU Lesser General Public License
+ * (LGPL) version 2.1 which accompanies this distribution, and is available at
+ * http://www.gnu.org/licenses/lgpl.html
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * Contributors:
+ * Florent Guillaume
+ */
+
+ /**
+ * Launches a fulltext reindexing of the database.
+ *
+ * @param batchSize the batch size, defaults to 100
+ * @param batch if present, the batch number to process instead of all
+ * batches; starts at 1
+ * @return when done, ok + the total number of docs
+ */
+ public String reindexFulltext(int batchSize, int batch, String query) throws Exception {
+ Principal principal = coreSession.getPrincipal();
+ if (!(principal instanceof NuxeoPrincipal)) {
+ return "unauthorized";
+ }
+ NuxeoPrincipal nuxeoPrincipal = (NuxeoPrincipal) principal;
+ if (!nuxeoPrincipal.isAdministrator()) {
+ return "unauthorized";
+ }
+
+ log("Reindexing starting");
+ if (batchSize <= 0) {
+ batchSize = DEFAULT_BATCH_SIZE;
+ }
+
+ //
+ // A default query that gets ALL the documents
+ //
+ if (query == null) {
+ query = "SELECT ecm:uuid, ecm:primaryType FROM Document"
+ + " WHERE ecm:isProxy = 0"
+ + " AND ecm:currentLifeCycleState <> 'deleted'"
+ + " ORDER BY ecm:uuid";
+ }
+
+ List<ReindexInfo> infos = getInfos(query);
+ int size = infos.size();
+ int numBatches = (size + batchSize - 1) / batchSize;
+ if (batch < 0 || batch > numBatches) {
+ batch = 0; // all
+ }
+ batch--;
+
+ log("Reindexing of %s documents, batch size: %s, number of batches: %s",
+ size, batchSize, numBatches);
+ if (batch >= 0) {
+ log("Reindexing limited to batch: %s", batch + 1);
+ }
+
+ //
+ // Commit and close the transaction that was started by our standard request lifecycle.
+ //
+ boolean tx = TransactionHelper.isTransactionActive();
+ if (tx) {
+ TransactionHelper.commitOrRollbackTransaction();
+ }
+
+ int n = 0;
+ int errs = 0;
+ for (int i = 0; i < numBatches; i++) {
+ if (batch >= 0 && batch != i) {
+ continue;
+ }
+ int pos = i * batchSize;
+ int end = pos + batchSize;
+ if (end > size) {
+ end = size;
+ }
+ List<ReindexInfo> batchInfos = infos.subList(pos, end);
+ log("Reindexing batch %s/%s, first id: %s", i + 1, numBatches,
+ batchInfos.get(0).id);
+ try {
+ doBatch(batchInfos);
+ } catch (NuxeoException e) {
+ log.error("Error processing batch " + i + 1, e);
+ errs++;
+ }
+ n += end - pos;
+ }
+
+ log("Reindexing done");
+ //
+ // Start a new transaction so our standard request lifecycle can complete.
+ //
+ if (tx) {
+ TransactionHelper.startTransaction();
+ }
+ return "done: " + n + " total: " + size + " batch_errors: " + errs;
+ }
+
+ protected void log(String format, Object... args) {
+ log.warn(String.format(format, args));
+ }
+
+ /**
+ * This has to be called once the transaction has been started.
+ */
+ protected void getLowLevelSession() throws Exception {
+ try {
+ SQLSession s = (SQLSession) ((AbstractSession) coreSession).getSession();
+ Field f2 = SQLSession.class.getDeclaredField("session");
+ f2.setAccessible(true);
+ session = (Session) f2.get(s);
+ fulltextConfiguration = session.getModel().getFulltextConfiguration();
+ } catch (ReflectiveOperationException e) {
+ throw new NuxeoException(e);
+ }
+ }
+
+ protected List<ReindexInfo> getInfos(String query) throws Exception {
+ getLowLevelSession();
+ List<ReindexInfo> infos = new ArrayList<ReindexInfo>();
+ IterableQueryResult it = session.queryAndFetch(query, NXQL.NXQL,
+ QueryFilter.EMPTY);
+ try {
+ for (Map<String, Serializable> map : it) {
+ Serializable id = map.get(NXQL.ECM_UUID);
+ String type = (String) map.get(NXQL.ECM_PRIMARYTYPE);
+ infos.add(new ReindexInfo(id, type));
+ }
+ } finally {
+ it.close();
+ }
+ return infos;
+ }
+
+ protected void doBatch(List<ReindexInfo> infos) throws Exception {
+ boolean tx;
+ boolean ok;
+
+ // transaction for the sync batch
+ tx = TransactionHelper.startTransaction();
+
+ getLowLevelSession(); // for fulltextInfo
+ List<Serializable> ids = new ArrayList<Serializable>(infos.size());
+ Set<String> asyncIds = new HashSet<String>();
+ Model model = session.getModel();
+ for (ReindexInfo info : infos) {
+ ids.add(info.id);
+ if (fulltextConfiguration.isFulltextIndexable(info.type)) {
+ asyncIds.add(model.idToString(info.id));
+ }
+ }
+ ok = false;
+ try {
+ runSyncBatch(ids, asyncIds);
+ ok = true;
+ } finally {
+ if (tx) {
+ if (!ok) {
+ TransactionHelper.setTransactionRollbackOnly();
+ log.error("Rolling back sync");
+ }
+ TransactionHelper.commitOrRollbackTransaction();
+ }
+ }
+
+ runAsyncBatch(asyncIds);
+
+ // wait for async completion after transaction commit
+ Framework.getLocalService(EventService.class).waitForAsyncCompletion();
+ }
+
+ /*
+ * Do this at the low-level session level because we may have to modify
+ * things like versions which aren't usually modifiable, and it's also good
+ * to bypass all listeners.
+ */
+ protected void runSyncBatch(List<Serializable> ids, Set<String> asyncIds) throws Exception {
+ getLowLevelSession();
+
+ session.getNodesByIds(ids); // batch fetch
+
+ Map<Serializable, String> titles = new HashMap<Serializable, String>();
+ for (Serializable id : ids) {
+ Node node = session.getNodeById(id);
+ if (asyncIds.contains(id)) {
+ node.setSimpleProperty(Model.FULLTEXT_JOBID_PROP, id);
+ }
+ SimpleProperty prop;
+ try {
+ prop = node.getSimpleProperty(DC_TITLE);
+ } catch (IllegalArgumentException e) {
+ continue;
+ }
+ String title = (String) prop.getValue();
+ titles.put(id, title);
+ prop.setValue(title + " ");
+ }
+ session.save();
+
+ for (Serializable id : ids) {
+ Node node = session.getNodeById(id);
+ SimpleProperty prop;
+ try {
+ prop = node.getSimpleProperty(DC_TITLE);
+ } catch (IllegalArgumentException e) {
+ continue;
+ }
+ prop.setValue(titles.get(id));
+ }
+ session.save();
+ }
+
+ protected void runAsyncBatch(Set<String> asyncIds) {
+ if (asyncIds.isEmpty()) {
+ return;
+ }
+ String repositoryName = coreSession.getRepositoryName();
+ WorkManager workManager = Framework.getLocalService(WorkManager.class);
+ for (String id : asyncIds) {
+ Work work = new SQLFulltextExtractorWork(repositoryName, id);
+ // schedule immediately, we're outside a transaction
+ workManager.schedule(work, Scheduling.IF_NOT_SCHEDULED, false);
+ }
+ }
+}
--- /dev/null
+package org.collectionspace.services.batch.nuxeo;
+
+import java.net.URISyntaxException;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.commons.lang.StringUtils;
+import org.collectionspace.services.client.PoxPayloadOut;
+import org.collectionspace.services.client.TaxonomyAuthorityClient;
+import org.collectionspace.services.collectionobject.nuxeo.CollectionObjectBotGardenConstants;
+import org.collectionspace.services.collectionobject.nuxeo.CollectionObjectConstants;
+import org.collectionspace.services.common.api.RefName;
+import org.collectionspace.services.common.invocable.InvocationResults;
+import org.collectionspace.services.common.vocabulary.AuthorityResource;
+import org.collectionspace.services.taxonomy.nuxeo.TaxonBotGardenConstants;
+import org.collectionspace.services.taxonomy.nuxeo.TaxonConstants;
+import org.dom4j.DocumentException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A batch job that sets the access code on taxonomy records. The single CSID context is supported.
+ *
+ * If the document is a taxon record, the access codes of the taxon record and all of its descendant
+ * (narrower context) records are updated.
+ *
+ * If the document is a collectionobject, the access codes of all taxon records referenced by the
+ * collectionobject's taxonomic identification are updated, and propagated up the taxon
+ * hierarchy to the ancestors (broader contexts) of each taxon record.
+ *
+ * @author ray
+ *
+ */
+public class UpdateAccessCodeBatchJob extends AbstractBatchJob {
+ final Logger logger = LoggerFactory.getLogger(UpdateAccessCodeBatchJob.class);
+
+ private final String[] TAXON_FIELD_NAME_PARTS = CollectionObjectBotGardenConstants.TAXON_FIELD_NAME.split("\\/");
+ private final String TAXON_FIELD_NAME_WITHOUT_PATH = TAXON_FIELD_NAME_PARTS[TAXON_FIELD_NAME_PARTS.length - 1];
+
+ public UpdateAccessCodeBatchJob() {
+ this.setSupportedInvocationModes(Arrays.asList(INVOCATION_MODE_SINGLE));
+ }
+
+ @Override
+ public void run() {
+ setCompletionStatus(STATUS_MIN_PROGRESS);
+
+ try {
+ if (this.requestIsForInvocationModeSingle()) {
+ String csid = getInvocationContext().getSingleCSID();
+
+ if (StringUtils.isEmpty(csid)) {
+ throw new Exception("Missing context csid");
+ }
+
+ String docType = getInvocationContext().getDocType();
+
+ if (docType.equals(TaxonConstants.NUXEO_DOCTYPE)) {
+ setResults(updateAccessCode(csid, true));
+ //setResults(updateParentAccessCode(csid, true));
+ }
+ else if (docType.equals(CollectionObjectConstants.NUXEO_DOCTYPE)) {
+ setResults(updateReferencedAccessCodes(csid, true));
+ }
+ else {
+ throw new Exception("Unsupported document type: " + docType);
+ }
+ }
+ else {
+ throw new Exception("Unsupported invocation mode: " + this.getInvocationContext().getMode());
+ }
+
+ setCompletionStatus(STATUS_COMPLETE);
+ }
+ catch(Exception e) {
+ setCompletionStatus(STATUS_ERROR);
+ setErrorInfo(new InvocationError(INT_ERROR_STATUS, e.getMessage()));
+ }
+ }
+
+
+ /**
+ * Updates the access code of the specified taxon record.
+ *
+ * @param taxonRefNameOrCsid The refname or csid of the taxon record.
+ * @param deep If true, update the access codes of all descendant (narrower context)
+ * taxon records. On a deep update, the access codes of all descendant
+ * records are updated first, before calculating the access code of the parent.
+ * This ensures that the access codes of children are up-to-date, and can be
+ * used to calculate an up-to-date value for the parent.
+ *
+ * If false, only the specified taxon record is updated. The calculation
+ * of the access code uses the access codes of child taxon records, so
+ * an accurate result depends on the accuracy of the children's access codes.
+ * @return The results of the invocation.
+ * @throws URISyntaxException
+ * @throws DocumentException
+ */
+ public InvocationResults updateAccessCode(String taxonRefNameOrCsid, boolean deep) throws URISyntaxException, DocumentException {
+ UpdateAccessCodeResults updateResults = updateAccessCode(taxonRefNameOrCsid, deep, false);
+
+ InvocationResults results = new InvocationResults();
+ results.setNumAffected(updateResults.getNumAffected());
+ results.setUserNote(updateResults.isChanged() ? "access code changed to " + updateResults.getAccessCode() : "access code not changed");
+
+ return results;
+ }
+
+ /**
+ * Updates the access code of the parent (broader context) of the specified taxon record.
+ *
+ * @param taxonCsid The csid of the taxon record.
+ * @param propagate If true, propagate the access code up the taxon hierarchy to
+ * all ancestors of the taxon record. The propagation stops when
+ * the new value of the access code is the same as the old value,
+ * or when a root node (a node with no broader context) is reached.
+ *
+ * If false, update only the access code of the parent.
+ * @return The results of the invocation.
+ * @throws URISyntaxException
+ * @throws DocumentException
+ */
+ public InvocationResults updateParentAccessCode(String taxonCsid, boolean propagate) throws URISyntaxException, DocumentException {
+ PoxPayloadOut taxonPayload = findTaxonByCsid(taxonCsid);
+ String taxonRefName = getFieldValue(taxonPayload, TaxonConstants.REFNAME_SCHEMA_NAME, TaxonConstants.REFNAME_FIELD_NAME);
+ String accessCode = getFieldValue(taxonPayload, TaxonBotGardenConstants.ACCESS_CODE_SCHEMA_NAME, TaxonBotGardenConstants.ACCESS_CODE_FIELD_NAME);
+
+ logger.debug("updating parent access code: taxonRefName=" + taxonRefName + " propagate=" + propagate + " accessCode=" + accessCode);
+
+ UpdateAccessCodeResults updateResults = updateParentAccessCode(taxonCsid, accessCode, propagate);
+
+ InvocationResults results = new InvocationResults();
+ results.setNumAffected(updateResults.getNumAffected());
+ results.setUserNote(results.getNumAffected() + " access codes changed");
+
+ return results;
+ }
+
+ /**
+ * Updates the access codes of all taxon records that are referenced in the taxonomic identification
+ * field of the specified collectionobject.
+ *
+ * @param collectionObjectCsid The csid of the collectionobject.
+ * @param propagate If true, propagate the access code up the taxon hierarchy to
+ * the ancestors of each referenced taxon record. The propagation stops when
+ * the new value of the access code is the same as the old value,
+ * or when a root node (a node with no broader context) is reached.
+ *
+ * If false, update only the access codes of the taxon records
+ * that are directly referenced.
+ * @return The results of the invocation.
+ * @throws URISyntaxException
+ * @throws DocumentException
+ */
+ public InvocationResults updateReferencedAccessCodes(String collectionObjectCsid, boolean propagate) throws URISyntaxException, DocumentException {
+ PoxPayloadOut collectionObjectPayload = findCollectionObjectByCsid(collectionObjectCsid);
+
+ String deadFlag = getFieldValue(collectionObjectPayload, CollectionObjectBotGardenConstants.DEAD_FLAG_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.DEAD_FLAG_FIELD_NAME);
+ boolean isAlive = (deadFlag == null) || (!deadFlag.equalsIgnoreCase("true"));
+
+ logger.debug("updating referenced access codes: collectionObjectCsid=" + collectionObjectCsid + " propagate=" + propagate + " isAlive=" + isAlive);
+
+ List<String> taxonRefNames = getFieldValues(collectionObjectPayload, CollectionObjectBotGardenConstants.TAXON_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.TAXON_FIELD_NAME);
+ long numAffected = 0;
+
+ for (String taxonRefName : taxonRefNames) {
+ PoxPayloadOut taxonPayload = findTaxonByRefName(taxonRefName);
+ UpdateAccessCodeResults updateResults = updateAccessCode(taxonPayload, false, isAlive);
+
+ if (updateResults.isChanged()) {
+ numAffected += updateResults.getNumAffected();
+
+ if (propagate) {
+ UpdateAccessCodeResults parentUpdateResults = updateParentAccessCode(getCsid(taxonPayload), updateResults.getAccessCode(), true);
+
+ numAffected += parentUpdateResults.getNumAffected();
+ }
+ }
+ }
+
+ InvocationResults results = new InvocationResults();
+ results.setNumAffected(numAffected);
+ results.setUserNote(numAffected + " access codes changed");
+
+ return results;
+ }
+
+ /**
+ * Updates the access code of the specified taxon record. The access code is determined by
+ * examining all collectionobjects that have a taxonomic identification that matches the
+ * refname of the taxon record, as well as the access codes of child (narrower context)
+ * taxon records. If all referencing collectionobjects are dead (as determined
+ * by the dead flag), and all child taxon records are dead (as determined by their access
+ * codes), then the access code is set to Dead. If any matching collectionobjects
+ * are not dead, or any child taxons are not dead, and the access code is currently Dead,
+ * the access code is set to Unrestricted. Otherwise, the access code is not changed.
+ *
+ * @param taxonPayload The services payload of the taxon record.
+ * @param deep If true, update the access code of all descendant taxon records.
+ * On a deep update, the access codes of all descendant
+ * records are updated first, before calculating the access code of the parent.
+ * This ensures that the access codes of children are up-to-date, and can be
+ * used to calculate an up-to-date value for the parent.
+
+ * If false, only the specified taxon record is updated. The calculation
+ * of the access code uses the access codes of child taxon records, so
+ * an accurate result depends on the accuracy of the children's access codes.
+ * @param knownAlive A hint that a child taxon of the specified taxon is known to be
+ * alive, or that a collectionobject of the specified taxon is known to be
+ * alive. This parameter allows for optimization when propagating
+ * access code changes up the hierarchy; if a child taxon or
+ * referencing collectionobject is known to be alive, and the
+ * current access code is Dead, then the access code can be changed
+ * to Unrestricted without examining any other records.
+ * @return The results of the update.
+ * @throws DocumentException
+ * @throws URISyntaxException
+ */
+ public UpdateAccessCodeResults updateAccessCode(PoxPayloadOut taxonPayload, boolean deep, boolean knownAlive) throws URISyntaxException, DocumentException {
+ UpdateAccessCodeResults results = new UpdateAccessCodeResults();
+ boolean foundAlive = knownAlive;
+
+ String taxonCsid = getCsid(taxonPayload);
+ String taxonRefName = getFieldValue(taxonPayload, TaxonConstants.REFNAME_SCHEMA_NAME, TaxonConstants.REFNAME_FIELD_NAME);
+ String accessCode = getFieldValue(taxonPayload, TaxonBotGardenConstants.ACCESS_CODE_SCHEMA_NAME, TaxonBotGardenConstants.ACCESS_CODE_FIELD_NAME);
+
+ logger.debug("updating access code: taxonRefName=" + taxonRefName + " deep=" + deep + " knownAlive=" + knownAlive);
+
+ if (accessCode == null) {
+ accessCode = "";
+ }
+
+ List<String> childTaxonCsids = findNarrower(taxonCsid);
+
+ if (deep) {
+ long numChildrenChanged = 0;
+
+ // Update the access code on all the children, and track whether any are alive.
+
+ for (String childTaxonCsid : childTaxonCsids) {
+ UpdateAccessCodeResults childResults = updateAccessCode(childTaxonCsid, true, false);
+
+ if (!childResults.isSoftDeleted()) {
+ String childAccessCode = childResults.getAccessCode();
+ boolean isChildAlive = !childAccessCode.equals(TaxonBotGardenConstants.ACCESS_CODE_DEAD_VALUE);
+
+ if (isChildAlive) {
+ foundAlive = true;
+ }
+
+ if (childResults.isChanged()) {
+ numChildrenChanged++;
+ }
+ }
+ }
+
+ results.setNumAffected(numChildrenChanged);
+ }
+ else {
+ if (!foundAlive) {
+ // Check if any of the children are alive.
+
+ for (String childTaxonCsid : childTaxonCsids) {
+ PoxPayloadOut childTaxonPayload = findTaxonByCsid(childTaxonCsid);
+
+ String childAccessCode = getFieldValue(childTaxonPayload, TaxonBotGardenConstants.ACCESS_CODE_SCHEMA_NAME,
+ TaxonBotGardenConstants.ACCESS_CODE_FIELD_NAME);
+ boolean isChildAlive = !childAccessCode.equals(TaxonBotGardenConstants.ACCESS_CODE_DEAD_VALUE);
+
+ if (isChildAlive) {
+ foundAlive = true;
+ break;
+ }
+ }
+ }
+ }
+
+ if (!foundAlive) {
+ // Check if any examples of this taxon are alive.
+
+ RefName.AuthorityItem item = RefName.AuthorityItem.parse(taxonRefName);
+ String vocabularyShortId = item.getParentShortIdentifier();
+
+ List<String> collectionObjectCsids = findReferencingCollectionObjects(TaxonomyAuthorityClient.SERVICE_NAME, vocabularyShortId, taxonCsid,
+ CollectionObjectBotGardenConstants.TAXON_SCHEMA_NAME + ":" + TAXON_FIELD_NAME_WITHOUT_PATH);
+
+ for (String collectionObjectCsid : collectionObjectCsids) {
+ PoxPayloadOut collectionObjectPayload = findCollectionObjectByCsid(collectionObjectCsid);
+
+ String deadFlag = getFieldValue(collectionObjectPayload, CollectionObjectBotGardenConstants.DEAD_FLAG_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.DEAD_FLAG_FIELD_NAME);
+ boolean isDead = (deadFlag != null) && (deadFlag.equalsIgnoreCase("true"));
+
+ if (!isDead) {
+ foundAlive = true;
+ break;
+ }
+ }
+ }
+
+ String newAccessCode;
+
+ // The access code only needs to be changed if:
+ //
+ // 1. There is a living example of the taxon, but the access code is dead.
+ // 2. There are no living examples, but the access code is not dead.
+ //
+ // Otherwise, the access code should stay the same. In particular, if there is a
+ // living example, and the access code is not dead, the current value of unrestricted
+ // or restricted should be retained.
+
+ if (foundAlive && (StringUtils.isEmpty(accessCode) || accessCode.equals(TaxonBotGardenConstants.ACCESS_CODE_DEAD_VALUE))) {
+ newAccessCode = TaxonBotGardenConstants.ACCESS_CODE_UNRESTRICTED_VALUE;
+ }
+ else if (!foundAlive) {
+ newAccessCode = TaxonBotGardenConstants.ACCESS_CODE_DEAD_VALUE;
+ }
+ else {
+ newAccessCode = accessCode;
+ }
+
+ if (!newAccessCode.equals(accessCode)) {
+ String inAuthority = getFieldValue(taxonPayload, TaxonConstants.IN_AUTHORITY_SCHEMA_NAME, TaxonConstants.IN_AUTHORITY_FIELD_NAME);
+
+ setAccessCode(inAuthority, taxonCsid, newAccessCode);
+
+ results.setChanged(true);
+ results.setNumAffected(results.getNumAffected() + 1);
+ }
+
+ results.setAccessCode(newAccessCode);
+
+ return results;
+ }
+
+ /**
+ * Updates the access code of the taxon record with the specified refname or csid.
+ *
+ * @param taxonRefNameOrCsid
+ * @param deep
+ * @param knownAlive
+ * @return
+ * @throws URISyntaxException
+ * @throws DocumentException
+ */
+ public UpdateAccessCodeResults updateAccessCode(String taxonRefNameOrCsid, boolean deep, boolean knownAlive) throws URISyntaxException, DocumentException {
+ PoxPayloadOut taxonPayload;
+
+ if (RefName.AuthorityItem.parse(taxonRefNameOrCsid) == null) {
+ taxonPayload = findTaxonByCsid(taxonRefNameOrCsid);
+ }
+ else {
+ taxonPayload = findTaxonByRefName(taxonRefNameOrCsid);
+ }
+
+ return updateAccessCode(taxonPayload, deep, knownAlive);
+ }
+
+ /**
+ * Updates the access code of the parent (broader context) of the specified taxon record,
+ * whose access code is assumed to be a specified value.
+ *
+ * @param taxonCsid The csid of the taxon record.
+ * @param accessCode The access code of the taxon record.
+ * @param propagate If true, propagate the access code up the taxon hierarchy to
+ * all ancestors of the taxon record. The propagation stops when
+ * the new value of the access code is the same as the old value,
+ * or when a root node (a node with no broader context) is reached.
+ *
+ * If false, update only the access code of the parent.
+ * @return The results of the update.
+ * @throws URISyntaxException
+ * @throws DocumentException
+ */
+ public UpdateAccessCodeResults updateParentAccessCode(String taxonCsid, String accessCode, boolean propagate) throws URISyntaxException, DocumentException {
+ UpdateAccessCodeResults results = new UpdateAccessCodeResults();
+ String parentTaxonCsid = findBroader(taxonCsid);
+ long numAffected = 0;
+
+ logger.debug("updating parent access code: taxonCsid=" + taxonCsid + " accessCode=" + accessCode + " propagate=" + propagate);
+
+ if (parentTaxonCsid != null) {
+ boolean isAlive = (accessCode == null) || !accessCode.equals(TaxonBotGardenConstants.ACCESS_CODE_DEAD_VALUE);
+
+ UpdateAccessCodeResults parentUpdateResults = updateAccessCode(parentTaxonCsid, false, isAlive);
+
+ if (parentUpdateResults.isChanged()) {
+ // Except for numAffected, the result fields are probably not all that useful in this situation.
+ // Set the changed flag to whether the immediate parent was changed, and the access code to
+ // the immediate parent's.
+
+ results.setAccessCode(parentUpdateResults.getAccessCode());
+ results.setChanged(true);
+
+ numAffected += parentUpdateResults.getNumAffected();
+
+ if (propagate) {
+ UpdateAccessCodeResults grandparentUpdateResults = updateParentAccessCode(parentTaxonCsid, parentUpdateResults.getAccessCode(), true);
+ numAffected += grandparentUpdateResults.getNumAffected();
+ }
+ }
+ }
+
+ results.setNumAffected(numAffected);
+
+ return results;
+ }
+
+ /**
+ * Sets the access code of the specified taxon record to the specified value.
+ *
+ * @param authorityCsid The csid of the authority containing the taxon record.
+ * @param taxonCsid The csid of the taxon record.
+ * @param accessCode The value of the access code.
+ * @throws URISyntaxException
+ */
+ private void setAccessCode(String authorityCsid, String taxonCsid, String accessCode) throws URISyntaxException {
+ String updatePayload =
+ "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" +
+ "<document name=\"taxon\">" +
+ "<ns2:taxon_naturalhistory xmlns:ns2=\"http://collectionspace.org/services/taxonomy/domain/naturalhistory\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">" +
+ getFieldXml(TaxonBotGardenConstants.ACCESS_CODE_FIELD_NAME, accessCode) +
+ "</ns2:taxon_naturalhistory>" +
+ "</document>";
+
+ AuthorityResource<?, ?> resource = (AuthorityResource<?, ?>) getResourceMap().get(TaxonomyAuthorityClient.SERVICE_NAME);
+ resource.updateAuthorityItem(getResourceMap(), createUriInfo(), authorityCsid, taxonCsid, updatePayload);
+ }
+
+ private class UpdateAccessCodeResults {
+ private boolean isSoftDeleted = false;
+ private boolean isChanged = false;
+ private String accessCode = null;
+ private long numAffected = 0;
+
+ public boolean isSoftDeleted() {
+ return isSoftDeleted;
+ }
+
+ /**
+ * @param isSoftDeleted
+ */
+ public void setSoftDeleted(boolean isSoftDeleted) {
+ this.isSoftDeleted = isSoftDeleted;
+ }
+
+ public boolean isChanged() {
+ return isChanged;
+ }
+
+ public void setChanged(boolean isChanged) {
+ this.isChanged = isChanged;
+ }
+
+ public String getAccessCode() {
+ return accessCode;
+ }
+
+ public void setAccessCode(String accessCode) {
+ this.accessCode = accessCode;
+ }
+
+ public long getNumAffected() {
+ return numAffected;
+ }
+
+ public void setNumAffected(long numAffected) {
+ this.numAffected = numAffected;
+ }
+ }
+}
--- /dev/null
+package org.collectionspace.services.batch.nuxeo;
+
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.commons.lang.StringUtils;
+import org.collectionspace.services.client.CollectionObjectClient;
+import org.collectionspace.services.client.PoxPayloadOut;
+import org.collectionspace.services.client.workflow.WorkflowClient;
+import org.collectionspace.services.collectionobject.nuxeo.CollectionObjectBotGardenConstants;
+import org.collectionspace.services.collectionobject.nuxeo.CollectionObjectConstants;
+import org.collectionspace.services.common.NuxeoBasedResource;
+import org.collectionspace.services.common.invocable.InvocationResults;
+import org.collectionspace.services.movement.nuxeo.MovementBotGardenConstants;
+import org.collectionspace.services.movement.nuxeo.MovementConstants;
+import org.dom4j.DocumentException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class UpdateDeadFlagBatchJob extends AbstractBatchJob {
+ final Logger logger = LoggerFactory.getLogger(UpdateDeadFlagBatchJob.class);
+
+ public UpdateDeadFlagBatchJob() {
+ this.setSupportedInvocationModes(Arrays.asList(INVOCATION_MODE_SINGLE));
+ }
+
+ public void run() {
+ setCompletionStatus(STATUS_MIN_PROGRESS);
+
+ try {
+ String mode = getInvocationContext().getMode();
+
+ if (!mode.equalsIgnoreCase(INVOCATION_MODE_SINGLE)) {
+ throw new Exception("Unsupported invocation mode: " + mode);
+ }
+
+ String movementCsid = getInvocationContext().getSingleCSID();
+
+ if (StringUtils.isEmpty(movementCsid)) {
+ throw new Exception("Missing context csid");
+ }
+
+ setResults(updateRelatedDeadFlags(movementCsid));
+ setCompletionStatus(STATUS_COMPLETE);
+ }
+ catch(Exception e) {
+ setCompletionStatus(STATUS_ERROR);
+ setErrorInfo(new InvocationError(INT_ERROR_STATUS, e.getMessage()));
+ }
+ }
+
+ /**
+ * Update the dead flag for all collectionobjects related to the given movement record,
+ * based on the assumption that the action code of the specified movement record has just changed.
+ *
+ * @param movementCsid the csid of the movement that was updated
+ * @return
+ * @throws URISyntaxException
+ * @throws DocumentException
+ */
+ public InvocationResults updateRelatedDeadFlags(String movementCsid) throws URISyntaxException, DocumentException {
+ InvocationResults results = new InvocationResults();
+ long numAffected = 0;
+ List<String> userNotes = new ArrayList<String>();
+
+ PoxPayloadOut payload = findMovementByCsid(movementCsid);
+
+ String actionCode = getFieldValue(payload, MovementBotGardenConstants.ACTION_CODE_SCHEMA_NAME, MovementBotGardenConstants.ACTION_CODE_FIELD_NAME);
+ logger.debug("actionCode=" + actionCode);
+
+ if (actionCode.equals(MovementBotGardenConstants.DEAD_ACTION_CODE) || actionCode.equals(MovementBotGardenConstants.REVIVED_ACTION_CODE)) {
+ String actionDate = getFieldValue(payload, MovementBotGardenConstants.ACTION_DATE_SCHEMA_NAME,
+ MovementBotGardenConstants.ACTION_DATE_FIELD_NAME);
+ logger.debug("actionDate=" + actionDate);
+
+ List<String> collectionObjectCsids = findRelatedCollectionObjects(movementCsid);
+
+ for (String collectionObjectCsid : collectionObjectCsids) {
+ logger.debug("found related collectionobject: " + collectionObjectCsid);
+
+ InvocationResults collectionObjectResults = updateDeadFlag(collectionObjectCsid, movementCsid, actionCode, actionDate);
+
+ if (collectionObjectResults.getNumAffected() > 0) {
+ numAffected = numAffected + collectionObjectResults.getNumAffected();
+ userNotes.add(collectionObjectResults.getUserNote());
+ }
+ }
+ }
+
+ if (numAffected > 0) {
+ results.setNumAffected(numAffected);
+ results.setUserNote(StringUtils.join(userNotes, ", "));
+ }
+
+ return results;
+ }
+
+ /**
+ * Update the dead flag for the given collectionobject, based on the assumption that the action code
+ * of the specified movement record has just changed, and that the movement record is related to
+ * the collectionobject.
+ *
+ * @param collectionObjectCsid the csid of the collectionobject to update
+ * @param updatedMovementCsid the csid of the related movement that was updated
+ * @return
+ * @throws URISyntaxException
+ * @throws DocumentException
+ */
+ public InvocationResults updateDeadFlag(String collectionObjectCsid, String updatedMovementCsid) throws URISyntaxException, DocumentException {
+ InvocationResults results = new InvocationResults();
+ PoxPayloadOut payload = findMovementByCsid(updatedMovementCsid);
+
+ String actionCode = getFieldValue(payload, MovementBotGardenConstants.ACTION_CODE_SCHEMA_NAME,
+ MovementBotGardenConstants.ACTION_CODE_FIELD_NAME);
+ logger.debug("actionCode=" + actionCode);
+
+ if (actionCode.equals(MovementBotGardenConstants.DEAD_ACTION_CODE) || actionCode.equals(MovementBotGardenConstants.REVIVED_ACTION_CODE)) {
+ String actionDate = getFieldValue(payload, MovementBotGardenConstants.ACTION_DATE_SCHEMA_NAME,
+ MovementBotGardenConstants.ACTION_DATE_FIELD_NAME);
+ logger.debug("actionDate=" + actionDate);
+
+ results = updateDeadFlag(collectionObjectCsid, updatedMovementCsid, actionCode, actionDate);
+ }
+
+ return results;
+ }
+
+ /**
+ * Update the dead flag for the given collectionobject, based on the assumption that the action code
+ * of the specified movement record has just changed, and that the movement record is related to
+ * the collectionobject.
+ *
+ * @param collectionObjectCsid the csid of the collectionobject to update
+ * @param updatedMovementCsid the csid of the related movement that was updated
+ * @param actionCode the action code of the movement
+ * @param actionDate the action date of the movement
+ * @return
+ * @throws URISyntaxException
+ * @throws DocumentException
+ */
+ private InvocationResults updateDeadFlag(String collectionObjectCsid, String updatedMovementCsid, String actionCode, String actionDate) throws URISyntaxException, DocumentException {
+ InvocationResults results = new InvocationResults();
+ PoxPayloadOut payload = findCollectionObjectByCsid(collectionObjectCsid);
+
+ String workflowState = getFieldValue(payload, CollectionObjectConstants.WORKFLOW_STATE_SCHEMA_NAME, CollectionObjectConstants.WORKFLOW_STATE_FIELD_NAME);
+
+ if (workflowState.equals(WorkflowClient.WORKFLOWSTATE_DELETED)) {
+ logger.debug("skipping deleted collectionobject: " + collectionObjectCsid);
+ }
+ else {
+ String deadFlag = getFieldValue(payload, CollectionObjectBotGardenConstants.DEAD_FLAG_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.DEAD_FLAG_FIELD_NAME);
+ boolean isDead = (deadFlag != null) && (deadFlag.equalsIgnoreCase("true"));
+
+ logger.debug("updating dead flag: collectionObjectCsid=" + collectionObjectCsid + " actionCode=" + actionCode + " isDead=" + isDead);
+
+ if (actionCode.equals(MovementBotGardenConstants.REVIVED_ACTION_CODE)) {
+ if (isDead) {
+ /*
+ * The object is dead, but a location was revived. Unset the dead flag and date on the object.
+ */
+ setDeadFlag(collectionObjectCsid, false, null);
+
+ results.setNumAffected(1);
+ results.setUserNote(collectionObjectCsid + " set to alive");
+ }
+ }
+ else if (actionCode.equals(MovementBotGardenConstants.DEAD_ACTION_CODE)) {
+ if (!isDead) {
+ /*
+ * The object is not dead, but a location was marked dead. If there are no remaining live locations,
+ * set the dead flag and date on the object. Any movement record that is not deleted represents
+ * a live location, with one exception: the movement record that was just marked dead may not have
+ * been deleted yet, but it should not count as a live location.
+ */
+ List<String> movementCsids = findRelatedMovements(collectionObjectCsid);
+ boolean liveLocationExists = false;
+
+ for (String movementCsid : movementCsids) {
+ logger.debug("found related movement: movementCsid=" + movementCsid);
+
+ if (!movementCsid.equals(updatedMovementCsid)) {
+ PoxPayloadOut movementPayload = findMovementByCsid(movementCsid);
+ String movementWorkflowState = getFieldValue(movementPayload, MovementConstants.WORKFLOW_STATE_SCHEMA_NAME, MovementConstants.WORKFLOW_STATE_FIELD_NAME);
+
+ if (!movementWorkflowState.equals(WorkflowClient.WORKFLOWSTATE_DELETED)) {
+ logger.debug("found live location: movementCsid=" + movementCsid);
+
+ liveLocationExists = true;
+ break;
+ }
+ }
+ }
+
+ if (!liveLocationExists) {
+ setDeadFlag(collectionObjectCsid, true, actionDate);
+
+ results.setNumAffected(1);
+ results.setUserNote(collectionObjectCsid + " set to dead");
+ }
+ }
+ }
+ }
+
+ return results;
+ }
+
+ /**
+ * Update the dead flag and dead date of the specified collectionobject.
+ *
+ * @param collectionObjectCsid the csid of the collectionobject to update
+ * @param deadFlag the new value of the dead flag field
+ * @param deadDate the new value of the dead date field
+ * @throws URISyntaxException
+ */
+ private void setDeadFlag(String collectionObjectCsid, boolean deadFlag, String deadDate) throws URISyntaxException {
+ String updatePayload =
+ "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" +
+ "<document name=\"collectionobjects\">" +
+ "<ns2:collectionobjects_botgarden xmlns:ns2=\"http://collectionspace.org/services/collectionobject/local/botgarden\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">" +
+ getFieldXml("deadFlag", (deadFlag ? "true" : "false")) +
+ getFieldXml("deadDate", deadDate) +
+ "</ns2:collectionobjects_botgarden>" +
+ "<ns2:collectionobjects_common xmlns:ns2=\"http://collectionspace.org/services/collectionobject\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">" +
+ "</ns2:collectionobjects_common>" +
+ "</document>";
+
+ NuxeoBasedResource resource = (NuxeoBasedResource) getResourceMap().get(CollectionObjectClient.SERVICE_NAME);
+ resource.update(getResourceMap(), createUriInfo(), collectionObjectCsid, updatePayload);
+ }
+}
--- /dev/null
+package org.collectionspace.services.batch.nuxeo;
+
+import java.net.URISyntaxException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+import javax.ws.rs.WebApplicationException;
+
+import org.apache.commons.lang.StringUtils;
+import org.collectionspace.services.client.CollectionObjectClient;
+import org.collectionspace.services.client.PoxPayloadOut;
+import org.collectionspace.services.client.TaxonomyAuthorityClient;
+import org.collectionspace.services.client.workflow.WorkflowClient;
+import org.collectionspace.services.collectionobject.nuxeo.CollectionObjectBotGardenConstants;
+import org.collectionspace.services.collectionobject.nuxeo.CollectionObjectConstants;
+import org.collectionspace.services.common.NuxeoBasedResource;
+import org.collectionspace.services.common.api.RefName;
+import org.collectionspace.services.common.invocable.InvocationContext.ListCSIDs;
+import org.collectionspace.services.common.invocable.InvocationResults;
+import org.collectionspace.services.taxonomy.nuxeo.TaxonBotGardenConstants;
+import org.collectionspace.services.taxonomy.nuxeo.TaxonConstants;
+import org.dom4j.DocumentException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class UpdateRareFlagBatchJob extends AbstractBatchJob {
+ final Logger logger = LoggerFactory.getLogger(UpdateRareFlagBatchJob.class);
+
+ // All conservation categories are considered rare, except for ones that start with the following prefixes.
+ public static final List<String> NON_RARE_CONSERVATION_CATEGORY_PREFIXES = Arrays.asList("none", "DD ", "LC ", "LR (lc) ");
+
+ private static final String[] TAXON_FIELD_NAME_PARTS = CollectionObjectBotGardenConstants.TAXON_FIELD_NAME.split("\\/");
+ private static final String TAXON_FIELD_NAME_WITHOUT_PATH = TAXON_FIELD_NAME_PARTS[TAXON_FIELD_NAME_PARTS.length - 1];
+
+ public UpdateRareFlagBatchJob() {
+ this.setSupportedInvocationModes(Arrays.asList(INVOCATION_MODE_SINGLE, INVOCATION_MODE_LIST, INVOCATION_MODE_NO_CONTEXT));
+ }
+
+ @Override
+ public void run() {
+ setCompletionStatus(STATUS_MIN_PROGRESS);
+
+ try {
+ String mode = getInvocationContext().getMode();
+
+ if (mode.equals(INVOCATION_MODE_SINGLE)) {
+ /*
+ * In a single document context, the single csid must specify a collectionobject or a
+ * taxonomy record. If it's a collectionobject, the rare flag for the specified
+ * collectionobject will be updated. If it's a taxonomy record, the rare flag will be
+ * updated for each collectionobject with a primary determination that refers to the
+ * specified taxonomy record.
+ */
+
+ String csid = getInvocationContext().getSingleCSID();
+
+ if (StringUtils.isEmpty(csid)) {
+ throw new Exception("Missing context csid");
+ }
+
+ String docType = getInvocationContext().getDocType();
+
+ if (docType.equals(CollectionObjectConstants.NUXEO_DOCTYPE)) {
+ setResults(updateRareFlag(csid));
+ }
+ else if (docType.equals(TaxonConstants.NUXEO_DOCTYPE)) {
+ setResults(updateReferencingRareFlags(csid));
+ }
+ else {
+ throw new Exception("Unsupported document type: " + docType);
+ }
+ }
+ else if (mode.equals(INVOCATION_MODE_LIST)) {
+ /*
+ * In a list context, the csids must specify collectionobjects. The rare flag for
+ * each collectionobject will be updated.
+ */
+ ListCSIDs csids = getInvocationContext().getListCSIDs();
+
+ setResults(updateRareFlags(csids.getCsid()));
+ }
+ else if (mode.equals(INVOCATION_MODE_NO_CONTEXT)) {
+ /*
+ * If there is no context, the rare flag will be updated for all (non-deleted)
+ * collectionobjects.
+ */
+
+ setResults(updateAllRareFlags());
+ }
+ else {
+ throw new Exception("Unsupported invocation mode: " + mode);
+ }
+
+ setCompletionStatus(STATUS_COMPLETE);
+ }
+ catch(Exception e) {
+ setCompletionStatus(STATUS_ERROR);
+ setErrorInfo(new InvocationError(INT_ERROR_STATUS, e.getMessage()));
+ }
+ }
+
+ /**
+ * Updates the rare flags of collectionobjects that refer to the specified taxon record.
+ * A collectionobject is considered to refer to the taxon record if the refname of its
+ * primary taxonomic identification is the refname of the taxon record.
+ *
+ * @param taxonCsid The csid of the taxon record
+ * @return
+ * @throws URISyntaxException
+ * @throws DocumentException
+ */
+ public InvocationResults updateReferencingRareFlags(String taxonCsid) throws URISyntaxException, DocumentException {
+ PoxPayloadOut taxonPayload = findTaxonByCsid(taxonCsid);
+ String taxonRefName = getFieldValue(taxonPayload, TaxonConstants.REFNAME_SCHEMA_NAME, TaxonConstants.REFNAME_FIELD_NAME);
+
+ RefName.AuthorityItem item = RefName.AuthorityItem.parse(taxonRefName);
+ String vocabularyShortId = item.getParentShortIdentifier();
+
+ List<String> collectionObjectCsids = findReferencingCollectionObjects(TaxonomyAuthorityClient.SERVICE_NAME, vocabularyShortId, taxonCsid,
+ CollectionObjectBotGardenConstants.TAXON_SCHEMA_NAME + ":" + TAXON_FIELD_NAME_WITHOUT_PATH);
+ long numFound = 0;
+ long numAffected = 0;
+
+ for (String collectionObjectCsid : collectionObjectCsids) {
+ // Filter out results where the taxon is referenced in the correct field, but isn't the primary value.
+
+ PoxPayloadOut collectionObjectPayload = findCollectionObjectByCsid(collectionObjectCsid);
+ String primaryTaxonRefName = getFieldValue(collectionObjectPayload, CollectionObjectBotGardenConstants.TAXON_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.TAXON_FIELD_NAME);
+
+ if (primaryTaxonRefName.equals(taxonRefName)) {
+ numFound++;
+
+ InvocationResults itemResults = updateRareFlag(collectionObjectPayload);
+ numAffected += itemResults.getNumAffected();
+ }
+ }
+
+ InvocationResults results = new InvocationResults();
+ results.setNumAffected(numAffected);
+ results.setUserNote(numFound + " referencing cataloging " + (numFound == 1 ? "record" : "records") + " found, " + numAffected + " updated");
+
+ return results;
+ }
+
+ /**
+ * Updates the rare flag of the specified collectionobject.
+ *
+ * @param collectionObjectCsid The csid of the collectionobject
+ * @return
+ * @throws URISyntaxException
+ * @throws DocumentException
+ */
+ public InvocationResults updateRareFlag(String collectionObjectCsid) throws URISyntaxException, DocumentException {
+ PoxPayloadOut collectionObjectPayload = findCollectionObjectByCsid(collectionObjectCsid);
+
+ return updateRareFlag(collectionObjectPayload);
+ }
+
+ /**
+ * Updates the rare flag of the specified collectionobject. The rare flag is determined by looking at
+ * the taxon record that is referenced by the primary taxonomic determination of the collectionobject.
+ * If the taxon record has a conservation category that is considered rare in its primary plant attributes
+ * group, the rare flag is set to true. Otherwise, it is set to false.
+ *
+ * @param collectionObjectPayload The payload representing the collectionobject
+ * @return
+ * @throws URISyntaxException
+ * @throws DocumentException
+ */
+ public InvocationResults updateRareFlag(PoxPayloadOut collectionObjectPayload) throws URISyntaxException, DocumentException {
+ InvocationResults results = new InvocationResults();
+
+ String uri = this.getFieldValue(collectionObjectPayload, CollectionObjectBotGardenConstants.URI_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.URI_FIELD_NAME);
+ String[] uriParts = uri.split("\\/");
+ String collectionObjectCsid = uriParts[uriParts.length-1];
+
+ String workflowState = getFieldValue(collectionObjectPayload, CollectionObjectConstants.WORKFLOW_STATE_SCHEMA_NAME,
+ CollectionObjectConstants.WORKFLOW_STATE_FIELD_NAME);
+
+ if (workflowState.equals(WorkflowClient.WORKFLOWSTATE_DELETED)) {
+ logger.debug("skipping deleted collectionobject: " + collectionObjectCsid);
+ }
+ else {
+ String taxonRefName = getFieldValue(collectionObjectPayload, CollectionObjectBotGardenConstants.TAXON_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.TAXON_FIELD_NAME);
+ String oldIsRare = getFieldValue(collectionObjectPayload, CollectionObjectBotGardenConstants.RARE_FLAG_SCHEMA_NAME,
+ CollectionObjectBotGardenConstants.RARE_FLAG_FIELD_NAME);
+
+ if (oldIsRare == null) {
+ oldIsRare = "";
+ }
+
+ String newIsRare = "false";
+
+ if (StringUtils.isNotBlank(taxonRefName)) {
+ PoxPayloadOut taxonPayload = null;
+
+ try {
+ taxonPayload = findTaxonByRefName(taxonRefName);
+ }
+ catch (WebApplicationException e) {
+ logger.error("Error finding taxon: refName=" + taxonRefName, e);
+ }
+
+ if (taxonPayload != null) {
+ // UCBG-369: Changing this so that it only checks the primary conservation category.
+
+ String conservationCategory = getFieldValue(taxonPayload, TaxonBotGardenConstants.CONSERVATION_CATEGORY_SCHEMA_NAME,
+ TaxonBotGardenConstants.CONSERVATION_CATEGORY_FIELD_NAME);
+
+ if (isRare(conservationCategory)) {
+ newIsRare = "true";
+ }
+ }
+ }
+
+ if (!newIsRare.equals(oldIsRare)) {
+ logger.debug("setting rare flag: collectionObjectCsid=" + collectionObjectCsid + " oldIsRare=" + oldIsRare +" newIsRare=" + newIsRare);
+
+ setRareFlag(collectionObjectCsid, newIsRare);
+
+ results.setNumAffected(1);
+ results.setUserNote("rare flag set to " + newIsRare);
+ }
+ else {
+ logger.debug("not setting rare flag: collectionObjectCsid=" + collectionObjectCsid + " oldIsRare=" + oldIsRare +" newIsRare=" + newIsRare);
+
+ results.setNumAffected(0);
+ results.setUserNote("rare flag not changed");
+ }
+ }
+
+ return results;
+ }
+
+ public static boolean isRare(String conservationCategoryRefName) {
+ boolean isRare = false;
+
+ if (StringUtils.isNotEmpty(conservationCategoryRefName)) {
+ // The conservation category is non-empty, so it's rare...
+ isRare = true;
+
+ // ...unless it's one of the non-rare ones.
+
+ // Check if the display name starts with a prefix that
+ // indicates that it isn't rare.
+
+ RefName.AuthorityItem item = RefName.AuthorityItem.parse(conservationCategoryRefName);
+ String displayName = item.getDisplayName();
+
+ for (String prefix : NON_RARE_CONSERVATION_CATEGORY_PREFIXES) {
+ if (displayName.startsWith(prefix)) {
+ isRare = false;
+ break;
+ }
+ }
+ }
+
+ return isRare;
+ }
+
+ /**
+ * Updates the rare flags of the specified collectionobjects.
+ *
+ * @param collectionObjectCsids The csids of the collectionobjects
+ * @return
+ * @throws URISyntaxException
+ * @throws DocumentException
+ */
+ public InvocationResults updateRareFlags(List<String> collectionObjectCsids) throws URISyntaxException, DocumentException {
+ int numSubmitted = collectionObjectCsids.size();
+ long numAffected = 0;
+
+
+ for (String collectionObjectCsid : collectionObjectCsids) {
+ InvocationResults itemResults = updateRareFlag(collectionObjectCsid);
+
+ numAffected += itemResults.getNumAffected();
+ }
+
+ InvocationResults results = new InvocationResults();
+ results.setNumAffected(numAffected);
+ results.setUserNote("updated " + numAffected + " of " + numSubmitted + " cataloging records");
+
+ return results;
+ }
+
+ /**
+ * Updates the rare flags of all collectionobjects.
+ *
+ * @return
+ * @throws URISyntaxException
+ * @throws DocumentException
+ */
+ public InvocationResults updateAllRareFlags() throws URISyntaxException, DocumentException {
+ long numFound = 0;
+ long numAffected = 0;
+
+ int pageSize = 50;
+ int pageNum = 0;
+ List<String> csids = Collections.emptyList();
+
+ do {
+ csids = findAllCollectionObjects(pageSize, pageNum);
+ logger.debug("pageNum=" + pageNum + " pageSize=" + pageSize + " result size=" + csids.size());
+
+ InvocationResults pageResults = updateRareFlags(csids);
+
+ numAffected += pageResults.getNumAffected();
+ numFound += csids.size();
+
+ pageNum++;
+ }
+ while (csids.size() == pageSize);
+
+ InvocationResults results = new InvocationResults();
+ results.setNumAffected(numAffected);
+ results.setUserNote("updated " + numAffected + " of " + numFound + " cataloging records");
+
+ return null;
+ }
+
+ /**
+ * Sets the rare flag of the specified collectionobject to the specified value.
+ *
+ * @param collectionObjectCsid The csid of the collectionobject
+ * @param rareFlag The value of the rare flag
+ * @throws URISyntaxException
+ */
+ private void setRareFlag(String collectionObjectCsid, String rareFlag) throws URISyntaxException {
+ String updatePayload =
+ "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" +
+ "<document name=\"collectionobjects\">" +
+ "<ns2:collectionobjects_naturalhistory xmlns:ns2=\"http://collectionspace.org/services/collectionobject/domain/naturalhistory\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">" +
+ getFieldXml("rare", rareFlag) +
+ "</ns2:collectionobjects_naturalhistory>" +
+ "<ns2:collectionobjects_common xmlns:ns2=\"http://collectionspace.org/services/collectionobject\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">" +
+ "</ns2:collectionobjects_common>" +
+ "</document>";
+
+ NuxeoBasedResource resource = (NuxeoBasedResource) getResourceMap().get(CollectionObjectClient.SERVICE_NAME);
+ resource.update(getResourceMap(), createUriInfo(), collectionObjectCsid, updatePayload);
+ }
+}
--- /dev/null
+
+<project name="claim.3rdparty" default="package" basedir=".">
+ <description>
+ claim service 3rdparty
+ </description>
+ <!-- set global properties for this build -->
+ <property name="services.trunk" value="../../.."/>
+ <!-- environment should be declared before reading build.properties -->
+ <property environment="env" />
+ <property file="${services.trunk}/build.properties" />
+ <property name="mvn.opts" value="-V" />
+ <property name="src" location="src"/>
+
+ <condition property="osfamily-unix">
+ <os family="unix" />
+ </condition>
+ <condition property="osfamily-windows">
+ <os family="windows" />
+ </condition>
+
+ <target name="init" >
+ <!-- Create the time stamp -->
+ <tstamp/>
+ </target>
+
+ <target name="package" depends="package-unix,package-windows"
+ description="Package CollectionSpace Services" />
+ <target name="package-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="package-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="install" depends="install-unix,install-windows"
+ description="Install" />
+ <target name="install-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="install-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="clean" depends="clean-unix,clean-windows"
+ description="Delete target directories" >
+ <delete dir="${build}"/>
+ </target>
+ <target name="clean-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="clean" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="clean-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="clean" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="test" depends="test-unix,test-windows" description="Run tests" />
+ <target name="test-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="test" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="test-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="test" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="deploy" depends="install"
+ description="deploy claim in ${jee.server.nuxeo}">
+ <!-- This module is obsolete. The Nuxeo artifacts are now created and deployed using the "csmake" tool
+ <ant antfile="nuxeo-platform-cs-claim/build.xml" target="deploy" inheritall="false"/>
+ -->
+ </target>
+
+ <target name="undeploy"
+ description="undeploy claim from ${jee.server.nuxeo}">
+ <!-- This target is obsolete. The Nuxeo artifacts are now created and deployed using the "csmake" tool
+ leaving this only for backwards compatibility reasons. -->
+ <ant antfile="nuxeo-platform-cs-claim/build.xml" target="undeploy" inheritall="false"/>
+ </target>
+
+ <target name="dist"
+ description="generate distribution for claim" depends="package">
+ <ant antfile="nuxeo-platform-cs-claim/build.xml" target="dist" inheritall="false"/>
+ </target>
+</project>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<project name="nuxeo-platform-cs-claim" default="package" basedir=".">
+ <description>
+ claim nuxeo document type
+ </description>
+ <!-- set global properties for this build -->
+ <property name="services.trunk" value="../../../.."/>
+ <!-- environment should be declared before reading build.properties -->
+ <property environment="env" />
+ <property file="${services.trunk}/build.properties" />
+ <property name="mvn.opts" value="-V" />
+ <property name="src" location="src"/>
+ <!-- JAR files used by CollectionSpace 4.0 and later -->
+ <property name="nuxeo.claim.doctype.jars.all"
+ value="collectionspace.claim.doctype.*.jar"/>
+ <property name="nuxeo.claim.schema.jars.all"
+ value="collectionspace.claim.schema.*.jar"/>
+ <!-- Legacy JAR files used by CollectionSpace 3.3 and earlier -->
+ <property name="nuxeo.claim.legacy.jars.all"
+ value="org.collectionspace.services.claim.3rdparty.nuxeo-*.jar"/>
+ <property name="nuxeo.claim.legacy.jar"
+ value="org.collectionspace.services.claim.3rdparty.nuxeo-${cspace.release}.jar"/>
+
+ <condition property="osfamily-unix">
+ <os family="unix" />
+ </condition>
+ <condition property="osfamily-windows">
+ <os family="windows" />
+ </condition>
+
+ <target name="init" >
+ <!-- Create the time stamp -->
+ <tstamp/>
+ </target>
+
+ <target name="package" depends="package-unix,package-windows"
+ description="Package CollectionSpace Services" />
+ <target name="package-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="package-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="install" depends="install-unix,install-windows"
+ description="Install" />
+ <target name="install-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="install-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="clean" depends="clean-unix,clean-windows"
+ description="Delete target directories" >
+ <delete dir="${build}"/>
+ </target>
+ <target name="clean-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="clean" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="clean-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="clean" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="test" depends="test-unix,test-windows" description="Run tests" />
+ <target name="test-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="test" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="test-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="test" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="deploy" depends="install"
+ description="deploy claim doctype in ${jee.server.nuxeo}">
+ <!-- This target is obsolete. The Nuxeo artifacts are now created and deployed using the "csmake" tool
+ leaving this only for backwards compatibility reasons. -->
+ <copy file="${basedir}/target/${nuxeo.claim.legacy.jar}"
+ todir="${jee.deploy.nuxeo.plugins}"/>
+ </target>
+
+ <target name="undeploy"
+ description="undeploy claim doctype from ${jee.server.nuxeo}">
+ <delete>
+ <!-- Undeploy doctype and schema artifacts -->
+ <fileset dir="${jee.deploy.nuxeo.plugins}">
+ <include name="${nuxeo.claim.doctype.jars.all}"/>
+ </fileset>
+ <fileset dir="${jee.deploy.nuxeo.plugins}">
+ <include name="${nuxeo.claim.schema.jars.all}"/>
+ </fileset>
+ <!-- Undeploy legacy artifacts -->
+ <fileset dir="${jee.deploy.nuxeo.plugins}">
+ <include name="${nuxeo.claim.legacy.jars.all}"/>
+ </fileset>
+ </delete>
+ <!-- Undeploy legacy artifacts from old deployment location through release 0.6 -->
+ <delete quiet="true">
+ <fileset dir="${jee.deploy.nuxeo.system}">
+ <include name="${nuxeo.claim.legacy.jars.all}"/>
+ </fileset>
+ </delete>
+ </target>
+
+
+ <target name="dist"
+ description="generate distribution for claim doctype" depends="package">
+ <copy todir="${services.trunk}/${dist.deploy.nuxeo.plugins}">
+ <fileset file="${basedir}/target/${nuxeo.claim.legacy.jar}"/>
+ </copy>
+ </target>
+
+</project>
+
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <parent>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.claim.3rdparty</artifactId>
+ <version>4.5-SNAPSHOT</version>
+ </parent>
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.claim.3rdparty.nuxeo</artifactId>
+ <name>services.claim.3rdparty.nuxeo</name>
+ <packaging>jar</packaging>
+ <description>
+ Claim Nuxeo Document Type
+ </description>
+
+ <build>
+ <resources>
+ <resource>
+ <directory>src/main/resources</directory>
+ <filtering>true</filtering>
+ </resource>
+ <resource>
+ <directory>../../../../3rdparty/nuxeo/nuxeo-doctype/src/main/resources</directory>
+ <filtering>true</filtering>
+ </resource>
+ </resources>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <configuration>
+ <archive>
+ <manifestFile>target/classes/META-INF/MANIFEST.MF</manifestFile>
+ <manifestEntries>
+ <Bundle-Version>${eclipseVersion}</Bundle-Version>
+ <Bundle-ManifestVersion>2</Bundle-ManifestVersion>
+ </manifestEntries>
+ </archive>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
--- /dev/null
+<?xml version="1.0"?>
+
+<!--
+layouts-contrib.xml
+
+Layout file for configuring screen layouts in the
+user interface of Nuxeo EP's web application, for
+viewing or editing CollectionSpace records stored
+in the Nuxeo repository,
+
+See the "Nuxeo Book" for an introductory description
+of how to edit this file. For instance, for Nuxeo EP 5.3:
+http://doc.nuxeo.org/5.3/books/nuxeo-book/html/
+
+$LastChangedRevision: $
+$LastChangedDate: $
+-->
+
+<component name="org.collectionspace.claim.layouts.webapp">
+
+ <extension target="org.nuxeo.ecm.platform.forms.layout.WebLayoutManager"
+ point="layouts">
+
+ <layout name="claims_common">
+ <templates>
+ <template mode="any">/layouts/layout_default_template.xhtml</template>
+ </templates>
+
+ <rows>
+ <row><widget>claimNumber</widget></row>
+ </rows>
+
+ <widget name="claimNumber" type="text">
+ <labels>
+ <label mode="any">claimNumber</label>
+ </labels>
+ <translated>true</translated>
+ <fields>
+ <field schema="claims_common">claimNumber</field>
+ </fields>
+ <properties widgetMode="edit">
+ <property name="styleClass">dataInputText</property>
+ </properties>
+ </widget>
+ </layout>
+ </extension>
+</component>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+
+<!--
+ Claim schema (XSD)
+
+ Entity : Claim
+ Part : Common
+ Used for: Nuxeo EP core document type
+
+ $LastChangedRevision: $
+ $LastChangedDate: $
+-->
+
+<!-- PATRICK ADVISES CORE SCHEMA CONSIST OF ONLY:
+ * Date filed
+ * Claim number
+ * Filed by (here filedBy)
+ * On behalf of (here filedOnBehalfOf)
+ * Responsible department (here responsibleDepartment)
+-->
+
+<xs:schema
+ xmlns:xs="http://www.w3.org/2001/XMLSchema"
+ xmlns:ns="http://collectionspace.org/services/claim/"
+ xmlns="http://collectionspace.org/services/claim/"
+ targetNamespace="http://collectionspace.org/services/claim/"
+ version="0.1">
+
+ <xs:element name="claimNumber" type="xs:string"/>
+
+ <xs:element name="responsibleDepartments"> <!-- CORE: Responsible department -->
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element name="responsibleDepartment" type="xs:string"
+ minOccurs="0" maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="claimClaimantGroupList" type="claimClaimantGroupList"/> <!-- CORE: Filed by; On behalf of -->
+ <xs:element name="claimReceivedGroupList" type="claimReceivedGroupList"/> <!-- CORE: Date filed -->
+
+ <xs:complexType name="claimClaimantGroupList"> <!-- CORE: Filed by; On behalf of -->
+ <xs:sequence>
+ <xs:element name="claimClaimantGroup" type="claimClaimantGroup" minOccurs="0" maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+ <xs:complexType name="claimClaimantGroup"> <!-- CORE: Filed by; On behalf of -->
+ <xs:sequence>
+ <xs:element name="filedBy" type="xs:string"/> <!-- CORE: Filed by -->
+ <xs:element name="filedOnBehalfOf" type="xs:string"/> <!-- CORE: On behalf of -->
+ <xs:element name="claimantNote" type="xs:string"/> <!-- SUGGESTED CORE -->
+ </xs:sequence>
+ </xs:complexType>
+
+ <xs:complexType name="claimReceivedGroupList"> <!-- CORE: Date filed -->
+ <xs:sequence>
+ <xs:element name="claimReceivedGroup" type="claimReceivedGroup" minOccurs="0" maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+ <xs:complexType name="claimReceivedGroup"> <!-- CORE: Date filed -->
+ <xs:sequence>
+ <xs:element name="claimReceivedDate" type="xs:date"/> <!-- CORE: Date filed -->
+ <xs:element name="claimReceivedNote" type="xs:string"/> <!-- SUGGESTED CORE -->
+ </xs:sequence>
+ </xs:complexType>
+</xs:schema>
\ No newline at end of file
--- /dev/null
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <artifactId>org.collectionspace.services.claim</artifactId>
+ <groupId>org.collectionspace.services</groupId>
+ <version>4.5-SNAPSHOT</version>
+ </parent>
+
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>org.collectionspace.services.claim.3rdparty</artifactId>
+ <name>services.claim.3rdparty</name>
+ <packaging>pom</packaging>
+
+ <description>
+ 3rd party build for claim service
+ </description>
+
+ <properties>
+ <ServiceName>claim</ServiceName>
+ <NuxeoDocTypeName>Claim</NuxeoDocTypeName>
+ <CommonSchemaName>claims_common</CommonSchemaName>
+ <TenantPrefix>Tenant</TenantPrefix>
+ </properties>
+
+ <modules>
+ <!-- This module is obsolete. The Nuxeo artifacts are now created and deployed using the "csmake" tool
+ <module>nuxeo-platform-cs-claim</module>
+ -->
+ </modules>
+</project>
--- /dev/null
+
+<project name="claim" default="package" basedir=".">
+ <description>
+ claim service
+ </description>
+ <!-- set global properties for this build -->
+ <property name="services.trunk" value="../.."/>
+ <!-- environment should be declared before reading build.properties -->
+ <property environment="env" />
+ <property file="${services.trunk}/build.properties" />
+ <property name="mvn.opts" value="-V" />
+ <property name="src" location="src"/>
+
+ <condition property="osfamily-unix">
+ <os family="unix" />
+ </condition>
+ <condition property="osfamily-windows">
+ <os family="windows" />
+ </condition>
+
+ <target name="package" depends="package-unix,package-windows"
+ description="Package CollectionSpace Services" />
+
+ <target name="package-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="package-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+
+ <target name="install" depends="install-unix,install-windows"
+ description="Install" />
+ <target name="install-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="install-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="clean" depends="clean-unix,clean-windows"
+ description="Delete target directories" >
+ <delete dir="${build}"/>
+ </target>
+ <target name="clean-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="clean" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="clean-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="clean" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="test" depends="test-unix,test-windows" description="Run tests" />
+ <target name="test-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="test" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="test-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="test" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="deploy" depends="install"
+ description="deploy claim service">
+ <ant antfile="3rdparty/build.xml" target="deploy" inheritall="false"/>
+ </target>
+
+ <target name="undeploy"
+ description="undeploy claim service">
+ <ant antfile="3rdparty/build.xml" target="undeploy" inheritall="false"/>
+ </target>
+
+ <target name="dist" depends="package"
+ description="distribute claim service">
+ <ant antfile="3rdparty/build.xml" target="dist" inheritall="false"/>
+ </target>
+
+</project>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.claim</artifactId>
+ <version>4.5-SNAPSHOT</version>
+ </parent>
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.claim.client</artifactId>
+ <name>services.claim.client</name>
+
+ <dependencies>
+ <!-- keep slf4j dependencies on the top -->
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ <scope>provided</scope>
+ </dependency>
+<!-- CollectionSpace dependencies -->
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.authority.jaxb</artifactId>
+ <optional>true</optional>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.jaxb</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.common</artifactId>
+ <optional>true</optional>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.client</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.claim.jaxb</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <!-- FIXME: Add dependency on storage location JAXB classes when those are available -->
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.person.client</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+<!-- External dependencies -->
+ <dependency>
+ <groupId>org.testng</groupId>
+ <artifactId>testng</artifactId>
+ <version>5.6</version>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.resteasy</groupId>
+ <artifactId>resteasy-jaxrs</artifactId>
+ <!-- filter out unwanted jars -->
+ <exclusions>
+ <exclusion>
+ <groupId>tjws</groupId>
+ <artifactId>webserver</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.resteasy</groupId>
+ <artifactId>resteasy-jaxb-provider</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.resteasy</groupId>
+ <artifactId>resteasy-multipart-provider</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>commons-httpclient</groupId>
+ <artifactId>commons-httpclient</artifactId>
+ <version>3.1</version>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>collectionspace-services-claim-client</finalName>
+ </build>
+</project>
--- /dev/null
+/**
+ * This document is a part of the source code and related artifacts
+ * for CollectionSpace, an open source collections management system
+ * for museums and related institutions:
+ *
+ * http://www.collectionspace.org
+ * http://wiki.collectionspace.org
+ *
+ * Copyright © 2009 Regents of the University of California
+ *
+ * Licensed under the Educational Community License (ECL), Version 2.0.
+ * You may not use this file except in compliance with this License.
+ *
+ * You may obtain a copy of the ECL 2.0 License at
+ * https://source.collectionspace.org/collection-space/LICENSE.txt
+ */
+
+package org.collectionspace.services.client;
+
+import org.collectionspace.services.claim.ClaimsCommon;
+
+/**
+ * ClaimClient.java
+ *
+ * $LastChangedRevision: 5284 $
+ * $LastChangedDate: 2011-07-22 12:44:36 -0700 (Fri, 22 Jul 2011) $
+ *
+ */
+public class ClaimClient extends AbstractCommonListPoxServiceClientImpl<ClaimProxy, ClaimsCommon> {
+ public static final String SERVICE_NAME = "claims";
+ public static final String SERVICE_PATH_COMPONENT = SERVICE_NAME;
+ public static final String SERVICE_PATH = "/" + SERVICE_PATH_COMPONENT;
+ public static final String SERVICE_PATH_PROXY = SERVICE_PATH + "/";
+ public static final String SERVICE_PAYLOAD_NAME = SERVICE_NAME;
+
+ public ClaimClient() throws Exception {
+ super();
+ // TODO Auto-generated constructor stub
+ }
+
+ @Override
+ public String getServiceName() {
+ return SERVICE_NAME;
+ }
+
+ @Override
+ public String getServicePathComponent() {
+ return SERVICE_PATH_COMPONENT;
+ }
+
+ @Override
+ public Class<ClaimProxy> getProxyClass() {
+ return ClaimProxy.class;
+ }
+}
--- /dev/null
+/**
+ * This document is a part of the source code and related artifacts
+ * for CollectionSpace, an open source collections management system
+ * for museums and related institutions:
+ *
+ * http://www.collectionspace.org
+ * http://wiki.collectionspace.org
+ *
+ * Copyright © 2009 Regents of the University of California
+ *
+ * Licensed under the Educational Community License (ECL), Version 2.0.
+ * You may not use this file except in compliance with this License.
+ *
+ * You may obtain a copy of the ECL 2.0 License at
+ *
+ * https://source.collectionspace.org/collection-space/LICENSE.txt
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.collectionspace.services.client;
+
+import org.jboss.resteasy.client.ClientResponse;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+
+import org.collectionspace.services.client.workflow.WorkflowClient;
+import org.collectionspace.services.jaxb.AbstractCommonList;
+
+/**
+ * ClaimProxy.java
+ *
+ * $LastChangedRevision: 5284 $
+ * $LastChangedDate: 2011-07-22 12:44:36 -0700 (Fri, 22 Jul 2011) $
+ */
+@Path("/claims/")
+@Produces("application/xml")
+@Consumes("application/xml")
+public interface ClaimProxy extends CollectionSpaceCommonListPoxProxy {
+
+ // Sorted list
+ @GET
+ @Produces({"application/xml"})
+ ClientResponse<AbstractCommonList> readListSortedBy(
+ @QueryParam(IClientQueryParams.ORDER_BY_PARAM) String sortFieldName);
+
+ @Override
+ @GET
+ @Produces({"application/xml"})
+ ClientResponse<AbstractCommonList> readIncludeDeleted(
+ @QueryParam(WorkflowClient.WORKFLOW_QUERY_NONDELETED) String includeDeleted);
+
+ @Override
+ @GET
+ @Produces({"application/xml"})
+ ClientResponse<AbstractCommonList> keywordSearchIncludeDeleted(
+ @QueryParam(IQueryManager.SEARCH_TYPE_KEYWORDS_KW) String keywords,
+ @QueryParam(WorkflowClient.WORKFLOW_QUERY_NONDELETED) String includeDeleted);
+
+ @GET
+ @Produces({"application/xml"})
+ ClientResponse<AbstractCommonList> keywordSearchSortedBy(
+ @QueryParam(IQueryManager.SEARCH_TYPE_KEYWORDS_KW) String keywords,
+ @QueryParam(IClientQueryParams.ORDER_BY_PARAM) String sortFieldName);
+}
--- /dev/null
+/**
+ * This document is a part of the source code and related artifacts
+ * for CollectionSpace, an open source collections management system
+ * for museums and related institutions:
+ *
+ * http://www.collectionspace.org
+ * http://wiki.collectionspace.org
+ *
+ * Copyright © 2009 Regents of the University of California
+ *
+ * Licensed under the Educational Community License (ECL), Version 2.0.
+ * You may not use this file except in compliance with this License.
+ *
+ * You may obtain a copy of the ECL 2.0 License at
+ * https://source.collectionspace.org/collection-space/LICENSE.txt
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.collectionspace.services.client.test;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import org.collectionspace.services.PersonJAXBSchema;
+import org.collectionspace.services.client.CollectionSpaceClient;
+import org.collectionspace.services.client.ClaimClient;
+import org.collectionspace.services.client.PersonAuthorityClient;
+import org.collectionspace.services.client.PersonAuthorityClientUtils;
+import org.collectionspace.services.client.PayloadOutputPart;
+import org.collectionspace.services.client.PoxPayloadIn;
+import org.collectionspace.services.client.PoxPayloadOut;
+import org.collectionspace.services.common.authorityref.AuthorityRefList;
+import org.collectionspace.services.jaxb.AbstractCommonList;
+import org.collectionspace.services.claim.ClaimsCommon;
+import org.collectionspace.services.claim.ClaimClaimantGroupList;
+import org.collectionspace.services.claim.ClaimClaimantGroup;
+import org.collectionspace.services.person.PersonTermGroup;
+
+import org.testng.Assert;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.Test;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * ClaimAuthRefsTest, carries out Authority References tests against a
+ * deployed and running Claim Service.
+ *
+ * $LastChangedRevision: 4159 $
+ * $LastChangedDate: 2011-02-15 16:11:08 -0800 (Tue, 15 Feb 2011) $
+ */
+public class ClaimAuthRefsTest extends BaseServiceTest<AbstractCommonList> {
+
+ private final String CLASS_NAME = ClaimAuthRefsTest.class.getName();
+ private final Logger logger = LoggerFactory.getLogger(CLASS_NAME);
+
+ // Instance variables specific to this test.
+ final String PERSON_AUTHORITY_NAME = "TestPersonAuth";
+ private String knownResourceId = null;
+ private List<String> claimIdsCreated = new ArrayList<String>();
+ private List<String> personIdsCreated = new ArrayList<String>();
+ private String personAuthCSID = null;
+ private String claimFilerRefName = null;
+ private String claimOnBehalfOfRefName = null;
+
+ /* (non-Javadoc)
+ * @see org.collectionspace.services.client.test.BaseServiceTest#getClientInstance()
+ */
+ @Override
+ protected CollectionSpaceClient getClientInstance() {
+ throw new UnsupportedOperationException(); //method not supported (or needed) in this test class
+ }
+
+ /* (non-Javadoc)
+ * @see org.collectionspace.services.client.test.BaseServiceTest#getAbstractCommonList(org.jboss.resteasy.client.ClientResponse)
+ */
+ @Override
+ protected AbstractCommonList getCommonList(
+ Response response) {
+ throw new UnsupportedOperationException(); //method not supported (or needed) in this test class
+ }
+
+ // ---------------------------------------------------------------
+ // CRUD tests : CREATE tests
+ // ---------------------------------------------------------------
+ // Success outcomes
+ @Test(dataProvider="testName", dataProviderClass=AbstractServiceTestImpl.class)
+ public void createWithAuthRefs(String testName) throws Exception {
+ testSetup(STATUS_CREATED, ServiceRequestType.CREATE);
+
+ // Submit the request to the service and store the response.
+ String identifier = createIdentifier();
+
+ // Create all the person refs and entities
+ createPersonRefs();
+
+ // Create a new Claim resource.
+ //
+ // One or more fields in this resource will be PersonAuthority
+ // references, and will refer to Person resources by their refNames.
+ ClaimClient claimClient = new ClaimClient();
+ PoxPayloadOut claminInstance = createClaimInstance("claimNumber-" + identifier,
+ claimFilerRefName,
+ claimOnBehalfOfRefName);
+ Response res = claimClient.create(claminInstance);
+ try {
+ int statusCode = res.getStatus();
+ if (logger.isDebugEnabled()) {
+ logger.debug(testName + ": status = " + statusCode);
+ }
+ Assert.assertTrue(testRequestType.isValidStatusCode(statusCode), invalidStatusCodeMessage(testRequestType, statusCode));
+ Assert.assertEquals(statusCode, testExpectedStatusCode);
+
+ String csid = extractId(res);
+ if (knownResourceId == null) {
+ knownResourceId = csid;
+ }
+ // Store the IDs from every resource created by tests,
+ // so they can be deleted after tests have been run.
+ claimIdsCreated.add(csid);
+ } finally {
+ res.close();
+ }
+ }
+
+ protected void createPersonRefs() throws Exception{
+ // Create a temporary PersonAuthority resource, and its corresponding
+ // refName by which it can be identified.
+ PersonAuthorityClient personAuthClient = new PersonAuthorityClient();
+ PoxPayloadOut multipart = PersonAuthorityClientUtils.createPersonAuthorityInstance(
+ PERSON_AUTHORITY_NAME, PERSON_AUTHORITY_NAME, personAuthClient.getCommonPartName());
+ Response res = personAuthClient.create(multipart);
+ try {
+ int statusCode = res.getStatus();
+
+ Assert.assertTrue(testRequestType.isValidStatusCode(statusCode),
+ invalidStatusCodeMessage(testRequestType, statusCode));
+ Assert.assertEquals(statusCode, STATUS_CREATED);
+ personAuthCSID = extractId(res);
+ } finally {
+ res.close();
+ }
+ String authRefName = PersonAuthorityClientUtils.getAuthorityRefName(personAuthCSID, personAuthClient);
+
+ // Create temporary Person resources, and their corresponding refNames
+ // by which they can be identified.
+ String csid = createPerson("Carrie", "ClaimFiler", "carrieClaimFiler", authRefName);
+ personIdsCreated.add(csid);
+ claimFilerRefName = PersonAuthorityClientUtils.getPersonRefName(personAuthCSID, csid, personAuthClient);
+
+ csid = createPerson("Ben", "BehalfOf", "benBehalfOf", authRefName);
+ personIdsCreated.add(csid);
+ claimOnBehalfOfRefName = PersonAuthorityClientUtils.getPersonRefName(personAuthCSID, csid, personAuthClient);
+ }
+
+ protected String createPerson(String firstName, String surName, String shortId, String authRefName ) throws Exception {
+ PersonAuthorityClient personAuthClient = new PersonAuthorityClient();
+ Map<String, String> personInfo = new HashMap<String,String>();
+ personInfo.put(PersonJAXBSchema.FORE_NAME, firstName);
+ personInfo.put(PersonJAXBSchema.SUR_NAME, surName);
+ personInfo.put(PersonJAXBSchema.SHORT_IDENTIFIER, shortId);
+ List<PersonTermGroup> personTerms = new ArrayList<PersonTermGroup>();
+ PersonTermGroup term = new PersonTermGroup();
+ String termName = firstName + " " + surName;
+ term.setTermDisplayName(termName);
+ term.setTermName(termName);
+ personTerms.add(term);
+ PoxPayloadOut multipart =
+ PersonAuthorityClientUtils.createPersonInstance(personAuthCSID,
+ authRefName, personInfo, personTerms, personAuthClient.getItemCommonPartName());
+ Response res = personAuthClient.createItem(personAuthCSID, multipart);
+ int statusCode = res.getStatus();
+
+ Assert.assertTrue(testRequestType.isValidStatusCode(statusCode),
+ invalidStatusCodeMessage(testRequestType, statusCode));
+ Assert.assertEquals(statusCode, STATUS_CREATED);
+ return extractId(res);
+ }
+
+ // Success outcomes
+ @Test(dataProvider="testName", dataProviderClass=AbstractServiceTestImpl.class,
+ dependsOnMethods = {"createWithAuthRefs"})
+ public void readAndCheckAuthRefs(String testName) throws Exception {
+ // Perform setup.
+ testSetup(STATUS_OK, ServiceRequestType.READ);
+
+ // Submit the request to the service and store the response.
+ ClaimClient claimClient = new ClaimClient();
+ Response res = claimClient.read(knownResourceId);
+ try {
+ assertStatusCode(res, testName);
+ PoxPayloadIn input = new PoxPayloadIn(res.readEntity(String.class));
+ ClaimsCommon claimCommon = (ClaimsCommon) extractPart(input, claimClient.getCommonPartName(), ClaimsCommon.class);
+ Assert.assertNotNull(claimCommon);
+
+ if(logger.isDebugEnabled()){
+ logger.debug(objectAsXmlString(claimCommon, ClaimsCommon.class));
+ }
+ // Check a couple of fields
+ Assert.assertEquals(claimCommon.getClaimClaimantGroupList().getClaimClaimantGroup().get(0).getFiledBy(), claimFilerRefName);
+ Assert.assertEquals(claimCommon.getClaimClaimantGroupList().getClaimClaimantGroup().get(0).getFiledOnBehalfOf(), claimOnBehalfOfRefName);
+ } finally {
+ if (res != null) {
+ res.close();
+ }
+ }
+
+ // Get the auth refs and check them
+ res = claimClient.getAuthorityRefs(knownResourceId);
+ AuthorityRefList list = null;
+ try {
+ assertStatusCode(res, testName);
+ list = res.readEntity(AuthorityRefList.class);
+ Assert.assertNotNull(list);
+ } finally {
+ if (res != null) {
+ res.close();
+ }
+ }
+
+ int expectedAuthRefs = personIdsCreated.size();
+ List<AuthorityRefList.AuthorityRefItem> items = list.getAuthorityRefItem();
+ int numAuthRefsFound = items.size();
+ if (logger.isDebugEnabled()) {
+ logger.debug("Expected " + expectedAuthRefs + " authority references, found " + numAuthRefsFound);
+ }
+
+ // Optionally output additional data about list members for debugging.
+ boolean iterateThroughList = true;
+ if (iterateThroughList && logger.isDebugEnabled()) {
+ int i = 0;
+ for (AuthorityRefList.AuthorityRefItem item : items) {
+ logger.debug(testName + ": list-item[" + i + "] Field:" +
+ item.getSourceField() + "= " +
+ item.getAuthDisplayName() +
+ item.getItemDisplayName());
+ logger.debug(testName + ": list-item[" + i + "] refName=" +
+ item.getRefName());
+ logger.debug(testName + ": list-item[" + i + "] URI=" +
+ item.getUri());
+ i++;
+ }
+ }
+
+ Assert.assertEquals(numAuthRefsFound, expectedAuthRefs,
+ "Did not find all expected authority references! " + "Expected " + expectedAuthRefs + ", found " + numAuthRefsFound);
+ }
+
+
+ // ---------------------------------------------------------------
+ // Cleanup of resources created during testing
+ // ---------------------------------------------------------------
+
+ /**
+ * Deletes all resources created by tests, after all tests have been run.
+ *
+ * This cleanup method will always be run, even if one or more tests fail.
+ * For this reason, it attempts to remove all resources created
+ * at any point during testing, even if some of those resources
+ * may be expected to be deleted by certain tests.
+ * @throws Exception
+ */
+ @AfterClass(alwaysRun=true)
+ public void cleanUp() throws Exception {
+ String noTest = System.getProperty("noTestCleanup");
+ if (Boolean.TRUE.toString().equalsIgnoreCase(noTest)) {
+ if (logger.isDebugEnabled()) {
+ logger.debug("Skipping Cleanup phase ...");
+ }
+ return;
+ }
+ if (logger.isDebugEnabled()) {
+ logger.debug("Cleaning up temporary resources created for testing ...");
+ }
+
+ //
+ // Delete Claim resource(s).
+ ClaimClient claimClient = new ClaimClient();
+ for (String resourceId : claimIdsCreated) {
+ // Note: Any non-success responses are ignored and not reported.
+ claimClient.delete(resourceId).close();
+ }
+
+ //
+ // Delete Person resource(s) (before PersonAuthority resources).
+ PersonAuthorityClient personAuthClient = new PersonAuthorityClient();
+ for (String resourceId : personIdsCreated) {
+ // Note: Any non-success responses are ignored and not reported.
+ personAuthClient.deleteItem(personAuthCSID, resourceId).close();
+ }
+ if (personAuthCSID != null) {
+ personAuthClient.delete(personAuthCSID).close();
+ }
+ }
+
+ // ---------------------------------------------------------------
+ // Utility methods used by tests above
+ // ---------------------------------------------------------------
+
+ @Override
+ protected String getServiceName() {
+ return ClaimClient.SERVICE_NAME;
+ }
+
+ @Override
+ public String getServicePathComponent() {
+ return ClaimClient.SERVICE_PATH_COMPONENT;
+ }
+
+ private PoxPayloadOut createClaimInstance(String claimNumber,
+ String claimFiler,
+ String claimFiledOnBehalfOf) throws Exception {
+ ClaimsCommon claimCommon = new ClaimsCommon();
+ claimCommon.setClaimNumber(claimNumber);
+ ClaimClaimantGroupList claimClaimantGroupList = new ClaimClaimantGroupList();
+ ClaimClaimantGroup claimClaimantGroup = new ClaimClaimantGroup();
+ claimClaimantGroup.setFiledBy(claimFiler);
+ claimClaimantGroup.setFiledOnBehalfOf(claimFiledOnBehalfOf);
+ claimClaimantGroupList.getClaimClaimantGroup().add(claimClaimantGroup);
+ claimCommon.setClaimClaimantGroupList(claimClaimantGroupList);
+
+ PoxPayloadOut multipart = new PoxPayloadOut(this.getServicePathComponent());
+ PayloadOutputPart commonPart =
+ multipart.addPart(claimCommon, MediaType.APPLICATION_XML_TYPE);
+ commonPart.setLabel(new ClaimClient().getCommonPartName());
+
+ if(logger.isDebugEnabled()){
+ logger.debug("to be created, claim common");
+ logger.debug(objectAsXmlString(claimCommon, ClaimsCommon.class));
+ }
+
+ return multipart;
+ }
+
+ @Override
+ protected CollectionSpaceClient getClientInstance(String clientPropertiesFilename) throws Exception {
+ throw new UnsupportedOperationException(); //method not supported (or needed) in this test class
+ }
+}
--- /dev/null
+/**
+ * This document is a part of the source code and related artifacts
+ * for CollectionSpace, an open source collections management system
+ * for museums and related institutions:
+ *
+ * http://www.collectionspace.org
+ * http://wiki.collectionspace.org
+ *
+ * Copyright © 2009 Regents of the University of California
+ *
+ * Licensed under the Educational Community License (ECL), Version 2.0.
+ * You may not use this file except in compliance with this License.
+ *
+ * You may obtain a copy of the ECL 2.0 License at
+ * https://source.collectionspace.org/collection-space/LICENSE.txt
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.collectionspace.services.client.test;
+
+import java.util.List;
+import javax.ws.rs.core.MediaType;
+import org.collectionspace.services.common.api.GregorianCalendarDateTimeUtils;
+import org.collectionspace.services.client.CollectionSpaceClient;
+import org.collectionspace.services.client.ClaimClient;
+import org.collectionspace.services.jaxb.AbstractCommonList;
+import org.collectionspace.services.claim.ClaimsCommon;
+import org.collectionspace.services.claim.ResponsibleDepartmentsList;
+import org.collectionspace.services.claim.ClaimClaimantGroupList;
+import org.collectionspace.services.claim.ClaimClaimantGroup;
+import org.collectionspace.services.claim.ClaimReceivedGroupList;
+import org.collectionspace.services.claim.ClaimReceivedGroup;
+
+import org.collectionspace.services.client.PayloadOutputPart;
+import org.collectionspace.services.client.PoxPayloadOut;
+import org.testng.Assert;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * ClaimServiceTest, carries out tests against a
+ * deployed and running Claim Service.
+ *
+ * $LastChangedRevision: 5952 $
+ * $LastChangedDate: 2011-11-14 23:26:36 -0800 (Mon, 14 Nov 2011) $
+ */
+public class ClaimServiceTest extends AbstractPoxServiceTestImpl<AbstractCommonList, ClaimsCommon> {
+
+ /** The logger. */
+ private final String CLASS_NAME = ClaimServiceTest.class.getName();
+ private final Logger logger = LoggerFactory.getLogger(CLASS_NAME);
+
+ // Instance variables specific to this test.
+ private final static String CURRENT_DATE_UTC =
+ GregorianCalendarDateTimeUtils.timestampUTC();
+
+ /* (non-Javadoc)
+ * @see org.collectionspace.services.client.test.BaseServiceTest#getClientInstance()
+ */
+ @Override
+ protected CollectionSpaceClient getClientInstance() throws Exception {
+ return new ClaimClient();
+ }
+
+ // ---------------------------------------------------------------
+ // Utility methods used by tests above
+ // ---------------------------------------------------------------
+
+ @Override
+ protected String getServiceName() {
+ return ClaimClient.SERVICE_NAME;
+ }
+
+ /* (non-Javadoc)
+ * @see org.collectionspace.services.client.test.BaseServiceTest#getServicePathComponent()
+ */
+ @Override
+ public String getServicePathComponent() {
+ return ClaimClient.SERVICE_PATH_COMPONENT;
+ }
+
+ /**
+ * Creates the claim instance.
+ *
+ * @param identifier the identifier
+ * @return the multipart output
+ * @throws Exception
+ */
+ private PoxPayloadOut createClaimInstance(String identifier) throws Exception {
+ return createInstance("claimNumber-" + identifier);
+ }
+
+ /**
+ * Creates an instance of a Claim record for testing.
+ *
+ * @param claimNumber A claim number.
+ * @return Multipart output suitable for use as a payload
+ * in a create or update request.
+ * @throws Exception
+ */
+ @Override
+ protected PoxPayloadOut createInstance(String claimNumber) throws Exception {
+ ClaimsCommon claimCommon = new ClaimsCommon();
+
+ ResponsibleDepartmentsList responsibleDepartmentsList = new ResponsibleDepartmentsList();
+ List<String> responsibleDepartments = responsibleDepartmentsList.getResponsibleDepartment();
+ String identifier = createIdentifier();
+ responsibleDepartments.add("First Responsible Department-" + identifier);
+ responsibleDepartments.add("Second Responsible Department-" + identifier);
+
+ ClaimClaimantGroupList claimClaimantGroupList = new ClaimClaimantGroupList();
+ ClaimClaimantGroup claimClaimantGroup = new ClaimClaimantGroup();
+ claimClaimantGroup.setFiledBy("urn:cspace:core.collectionspace.org:personauthorities:name(TestPersonAuth):item:name(carrieClaimFiler)'Carrie ClaimFiler'");
+ claimClaimantGroup.setFiledOnBehalfOf("urn:cspace:core.collectionspace.org:personauthorities:name(TestPersonAuth):item:name(benBehalfOf)'Ben BehalfOf'");
+ claimClaimantGroup.setClaimantNote(getUTF8DataFragment());
+ claimClaimantGroupList.getClaimClaimantGroup().add(claimClaimantGroup);
+
+ ClaimReceivedGroupList claimReceivedGroupList = new ClaimReceivedGroupList();
+ ClaimReceivedGroup claimReceivedGroup = new ClaimReceivedGroup();
+ claimReceivedGroup.setClaimReceivedDate(CURRENT_DATE_UTC);
+ claimReceivedGroup.setClaimReceivedNote(getUTF8DataFragment());
+ claimReceivedGroupList.getClaimReceivedGroup().add(claimReceivedGroup);
+
+ claimCommon.setResponsibleDepartments(responsibleDepartmentsList);
+ claimCommon.setClaimClaimantGroupList(claimClaimantGroupList);
+ claimCommon.setClaimReceivedGroupList(claimReceivedGroupList);
+ claimCommon.setClaimNumber(claimNumber);
+
+ PoxPayloadOut multipart = new PoxPayloadOut(this.getServicePathComponent());
+ PayloadOutputPart commonPart = multipart.addPart(claimCommon, MediaType.APPLICATION_XML_TYPE);
+ commonPart.setLabel(new ClaimClient().getCommonPartName());
+
+ if(logger.isDebugEnabled()){
+ logger.debug("to be created, claim common");
+ logger.debug(objectAsXmlString(claimCommon, ClaimsCommon.class));
+ }
+
+ return multipart;
+ }
+
+ /*
+ * For convenience and terseness, this test method is the base of the test execution dependency chain. Other test methods may
+ * refer to this method in their @Test annotation declarations.
+ */
+ @Override
+ public void CRUDTests(String testName) {
+ // Needed for TestNG dependency chain.
+ }
+
+ @Override
+ protected PoxPayloadOut createInstance(String commonPartName, String identifier) throws Exception {
+ PoxPayloadOut result = createClaimInstance(identifier);
+ return result;
+ }
+
+ @Override
+ protected ClaimsCommon updateInstance(ClaimsCommon claimCommon) {
+ // Update its content.
+ claimCommon.setClaimNumber(""); // Test deletion of existing string value
+
+ String claimNote = claimCommon.getClaimClaimantGroupList().getClaimClaimantGroup().get(0).getClaimantNote();
+ claimCommon.getClaimClaimantGroupList().getClaimClaimantGroup().get(0).setClaimantNote("updated claim note-" + claimNote);
+
+ claimCommon.getResponsibleDepartments().getResponsibleDepartment().remove(0); // Test removing a value from a list
+
+ String currentTimestamp = GregorianCalendarDateTimeUtils.timestampUTC();
+ claimCommon.getClaimReceivedGroupList().getClaimReceivedGroup().get(0).setClaimReceivedDate(currentTimestamp);
+ claimCommon.getClaimReceivedGroupList().getClaimReceivedGroup().get(0).setClaimReceivedNote("");
+
+ return claimCommon;
+ }
+
+ @Override
+ protected void compareReadInstances(ClaimsCommon original, ClaimsCommon fromRead) throws Exception {
+ // Check selected fields.
+
+ // Check the values of one or more date/time fields.
+ String receivedDate = fromRead.getClaimReceivedGroupList().getClaimReceivedGroup().get(0).getClaimReceivedDate();
+
+ if (logger.isDebugEnabled()) {
+ logger.debug("receivedDate=" + receivedDate);
+ logger.debug("TIMESTAMP_UTC=" + CURRENT_DATE_UTC);
+ }
+ Assert.assertTrue(receivedDate.equals(CURRENT_DATE_UTC));
+
+ // Check the values of fields containing Unicode UTF-8 (non-Latin-1) characters.
+ String claimNote = fromRead.getClaimClaimantGroupList().getClaimClaimantGroup().get(0).getClaimantNote();
+
+ if(logger.isDebugEnabled()){
+ logger.debug("UTF-8 data sent=" + getUTF8DataFragment() + "\n"
+ + "UTF-8 data received=" + claimNote);
+ }
+ Assert.assertEquals(claimNote, getUTF8DataFragment(),
+ "UTF-8 data retrieved '" + claimNote
+ + "' does not match expected data '" + getUTF8DataFragment());
+ }
+
+ @Override
+ protected void compareUpdatedInstances(ClaimsCommon claimCommon, ClaimsCommon updatedClaimCommon) throws Exception {
+ String originalClaimNote = claimCommon.getClaimClaimantGroupList().getClaimClaimantGroup().get(0).getClaimantNote();
+ String updatedClaimNote = updatedClaimCommon.getClaimClaimantGroupList().getClaimClaimantGroup().get(0).getClaimantNote();
+
+ Assert.assertEquals(updatedClaimNote, originalClaimNote,
+ "Data in updated object did not match submitted data.");
+
+ List<String> updatedResponsibleDepartments = updatedClaimCommon.getResponsibleDepartments().getResponsibleDepartment();
+ Assert.assertEquals(1,
+ updatedResponsibleDepartments.size(),
+ "Data in updated object did not match submitted data.");
+
+ Assert.assertEquals(updatedResponsibleDepartments.get(0),
+ claimCommon.getResponsibleDepartments().getResponsibleDepartment().get(0),
+ "Data in updated object did not match submitted data.");
+
+ Assert.assertNotSame(claimCommon.getClaimReceivedGroupList().getClaimReceivedGroup().get(0).getClaimReceivedDate(),
+ updatedClaimCommon.getClaimReceivedGroupList().getClaimReceivedGroup().get(0).getClaimReceivedDate(),
+ "Data in updated object did not match submitted data.");
+
+ if(logger.isDebugEnabled()){
+ logger.debug("UTF-8 data sent=" + originalClaimNote + "\n"
+ + "UTF-8 data received=" + updatedClaimNote);
+ }
+ Assert.assertTrue(updatedClaimNote.contains(getUTF8DataFragment()),
+ "UTF-8 data retrieved '" + updatedClaimNote
+ + "' does not contain expected data '" + getUTF8DataFragment());
+ Assert.assertEquals(updatedClaimNote,
+ originalClaimNote,
+ "Data in updated object did not match submitted data.");
+}
+
+ @Override
+ protected CollectionSpaceClient getClientInstance(String clientPropertiesFilename) throws Exception {
+ // TODO Auto-generated method stub
+ return new ClaimClient();
+ }
+}
--- /dev/null
+log4j.rootLogger=debug, stdout, R
+
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+
+# Pattern to output the caller's file name and line number.
+log4j.appender.stdout.layout.ConversionPattern=%d %-5p [%t] [%c:%L] %m%n
+
+log4j.appender.R=org.apache.log4j.RollingFileAppender
+log4j.appender.R.File=target/test-client.log
+
+log4j.appender.R.MaxFileSize=100KB
+# Keep one backup file
+log4j.appender.R.MaxBackupIndex=1
+
+log4j.appender.R.layout=org.apache.log4j.PatternLayout
+log4j.appender.R.layout.ConversionPattern=%d %-5p [%t] [%c:%L] %m%n
+
+#packages
+log4j.logger.org.collectionspace=DEBUG
+log4j.logger.org.apache=INFO
+log4j.logger.httpclient=INFO
+log4j.logger.org.jboss.resteasy=INFO
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <artifactId>org.collectionspace.services.claim</artifactId>
+ <groupId>org.collectionspace.services</groupId>
+ <version>4.5-SNAPSHOT</version>
+ </parent>
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.claim.jaxb</artifactId>
+ <name>services.claim.jaxb</name>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.jaxb</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>collectionspace-services-claim-jaxb</finalName>
+ <defaultGoal>install</defaultGoal>
+ <plugins>
+ <plugin>
+ <groupId>org.jvnet.jaxb2.maven2</groupId>
+ <artifactId>maven-jaxb2-plugin</artifactId>
+ </plugin>
+ </plugins>
+ </build>
+</project>
+
--- /dev/null
+/**
+ *
+ */
+package org.collectionspace.services;
+
+public interface ClaimJAXBSchema {
+
+ final static String CLAIM_NUMBER = "claimNumber";
+
+}
--- /dev/null
+package org.collectionspace.services;
+
+public interface ClaimListItemJAXBSchema {
+ final static String CLAIM_NUMBER = "claimNumber";
+ final static String CSID = "csid";
+ final static String URI = "url";
+}
--- /dev/null
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+
+<!--
+ Claim schema (XSD)
+
+ Entity : Claim
+ Part : Common
+ Used for: JAXB binding between XML and Java objects
+
+ $LastChangedRevision: 5284 $
+ $LastChangedDate: 2011-07-22 12:44:36 -0700 (Fri, 22 Jul 2011) $
+-->
+
+<xs:schema
+ xmlns:xs="http://www.w3.org/2001/XMLSchema"
+ xmlns:jaxb="http://java.sun.com/xml/ns/jaxb"
+ jaxb:version="1.0" elementFormDefault="unqualified"
+ xmlns:ns="http://collectionspace.org/services/claim"
+ xmlns="http://collectionspace.org/services/claim"
+ targetNamespace="http://collectionspace.org/services/claim"
+ version="0.1"
+>
+
+<!--
+ Avoid XmlRootElement nightmare:
+ See http://weblogs.java.net/blog/kohsuke/archive/2006/03/why_does_jaxb_p.html
+-->
+
+ <xs:element name="claims_common">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element name="claimNumber" type="xs:string"/>
+ <xs:element name="responsibleDepartments" type="responsibleDepartmentsList"/>
+ <xs:element name="claimClaimantGroupList" type="claimClaimantGroupList"/>
+ <xs:element name="claimReceivedGroupList" type="claimReceivedGroupList"/>
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:complexType name="responsibleDepartmentsList">
+ <xs:sequence>
+ <xs:element name="responsibleDepartment" type="xs:string"
+ minOccurs="0" maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+
+ <xs:complexType name="claimClaimantGroupList">
+ <xs:sequence>
+ <xs:element name="claimClaimantGroup" type="claimClaimantGroup" minOccurs="0" maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+ <xs:complexType name="claimClaimantGroup">
+ <xs:sequence>
+ <xs:element name="filedBy" type="xs:string"/>
+ <xs:element name="filedOnBehalfOf" type="xs:string"/>
+ <xs:element name="claimantNote" type="xs:string"/>
+ </xs:sequence>
+ </xs:complexType>
+
+ <xs:complexType name="claimReceivedGroupList">
+ <xs:sequence>
+ <xs:element name="claimReceivedGroup" type="claimReceivedGroup" minOccurs="0" maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+ <xs:complexType name="claimReceivedGroup">
+ <xs:sequence>
+ <xs:element name="claimReceivedDate" type="xs:string"/>
+ <xs:element name="claimReceivedNote" type="xs:string"/>
+ </xs:sequence>
+ </xs:complexType>
+</xs:schema>
+
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- A comment. -->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.main</artifactId>
+ <version>4.5-SNAPSHOT</version>
+ </parent>
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.claim</artifactId>
+ <name>services.claim</name>
+ <packaging>pom</packaging>
+
+ <modules>
+ <module>jaxb</module>
+ <module>service</module>
+ <module>3rdparty</module>
+ <module>client</module>
+ </modules>
+
+</project>
+
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <parent>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.claim</artifactId>
+ <version>4.5-SNAPSHOT</version>
+ </parent>
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.claim.service</artifactId>
+ <name>services.claim.service</name>
+ <packaging>jar</packaging>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.claim.jaxb</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.claim.client</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.collectionobject.jaxb</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <!-- External dependencies -->
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.1</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.testng</groupId>
+ <artifactId>testng</artifactId>
+ <version>5.6</version>
+ </dependency>
+
+ <!-- javax -->
+
+ <dependency>
+ <groupId>javax.security</groupId>
+ <artifactId>jaas</artifactId>
+ <version>1.0.01</version>
+ <scope>provided</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>dom4j</groupId>
+ <artifactId>dom4j</artifactId>
+ <version>1.6.1</version>
+ <scope>provided</scope>
+ </dependency>
+
+ <!-- jboss -->
+
+ <dependency>
+ <groupId>org.jboss.resteasy</groupId>
+ <artifactId>resteasy-jaxrs</artifactId>
+ <exclusions>
+ <exclusion>
+ <groupId>tjws</groupId>
+ <artifactId>webserver</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.resteasy</groupId>
+ <artifactId>resteasy-jaxb-provider</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.resteasy</groupId>
+ <artifactId>resteasy-multipart-provider</artifactId>
+ </dependency>
+
+ <!-- nuxeo -->
+
+ <dependency>
+ <groupId>org.nuxeo.ecm.core</groupId>
+ <artifactId>nuxeo-core-api</artifactId>
+ <version>${nuxeo.core.version}</version>
+ <exclusions>
+ <exclusion>
+ <artifactId>jboss-remoting</artifactId>
+ <groupId>jboss</groupId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>collectionspace-services-claim</finalName>
+ </build>
+</project>
+
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<profilesXml xmlns="http://maven.apache.org/PROFILES/1.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/PROFILES/1.0.0 http://maven.apache.org/xsd/profiles-1.0.0.xsd">
+</profilesXml>
\ No newline at end of file
--- /dev/null
+/**
+ * This document is a part of the source code and related artifacts
+ * for CollectionSpace, an open source collections management system
+ * for museums and related institutions:
+ *
+ * http://www.collectionspace.org
+ * http://wiki.collectionspace.org
+ *
+ * Copyright © 2009 Regents of the University of California
+ *
+ * Licensed under the Educational Community License (ECL), Version 2.0.
+ * You may not use this file except in compliance with this License.
+ *
+ * You may obtain a copy of the ECL 2.0 License at
+ *
+ * https://source.collectionspace.org/collection-space/LICENSE.txt
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.collectionspace.services.claim;
+
+import org.collectionspace.services.client.ClaimClient;
+import org.collectionspace.services.common.NuxeoBasedResource;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.ws.rs.Consumes;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+
+/**
+ * ClaimResource.java
+ *
+ * Handles requests to the Claim service, orchestrates the retrieval
+ * of relevant resources, and returns responses to the client.
+ */
+@Path(ClaimClient.SERVICE_PATH)
+@Consumes("application/xml")
+@Produces("application/xml")
+public class ClaimResource extends NuxeoBasedResource {
+
+ final Logger logger = LoggerFactory.getLogger(ClaimResource.class);
+
+ @Override
+ protected String getVersionString() {
+ final String lastChangeRevision = "$LastChangedRevision: 4763 $";
+ return lastChangeRevision;
+ }
+
+ @Override
+ public String getServiceName() {
+ return ClaimClient.SERVICE_NAME;
+ }
+
+ @Override
+ public Class<ClaimsCommon> getCommonPartClass() {
+ return ClaimsCommon.class;
+ }
+
+}
--- /dev/null
+/**
+ * This document is a part of the source code and related artifacts
+ * for CollectionSpace, an open source collections management system
+ * for museums and related institutions:
+
+ * http://www.collectionspace.org
+ * http://wiki.collectionspace.org
+
+ * Copyright 2009 University of California at Berkeley
+
+ * Licensed under the Educational Community License (ECL), Version 2.0.
+ * You may not use this file except in compliance with this License.
+
+ * You may obtain a copy of the ECL 2.0 License at
+
+ * https://source.collectionspace.org/collection-space/LICENSE.txt
+
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.collectionspace.services.claim.nuxeo;
+
+/**
+ * ClaimConstants specifies constants for the Claim service
+ *
+ */
+public class ClaimConstants {
+
+ public final static String NUXEO_DOCTYPE = "Claim";
+ public final static String NUXEO_SCHEMA_NAME = "claim";
+ public final static String NUXEO_DC_TITLE = "CollectionSpace-Claim";
+}
--- /dev/null
+/**
+ * This document is a part of the source code and related artifacts
+ * for CollectionSpace, an open source collections management system
+ * for museums and related institutions:
+
+ * http://www.collectionspace.org
+ * http://wiki.collectionspace.org
+
+ * Copyright 2009 University of California at Berkeley
+
+ * Licensed under the Educational Community License (ECL), Version 2.0.
+ * You may not use this file except in compliance with this License.
+
+ * You may obtain a copy of the ECL 2.0 License at
+
+ * https://source.collectionspace.org/collection-space/LICENSE.txt
+
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.collectionspace.services.claim.nuxeo;
+
+import org.collectionspace.services.claim.ClaimsCommon;
+import org.collectionspace.services.nuxeo.client.java.NuxeoDocumentModelHandler;
+
+/**
+ * The Class ClaimDocumentModelHandler.
+ *
+ * $LastChangedRevision: 5284 $
+ * $LastChangedDate: 2011-07-22 12:44:36 -0700 (Fri, 22 Jul 2011) $
+ */
+public class ClaimDocumentModelHandler
+ extends NuxeoDocumentModelHandler<ClaimsCommon> {
+
+}
+
--- /dev/null
+package org.collectionspace.services.claim.nuxeo;
+
+import org.collectionspace.services.common.context.ServiceContext;
+import org.collectionspace.services.common.document.InvalidDocumentException;
+import org.collectionspace.services.common.document.ValidatorHandler;
+import org.collectionspace.services.common.document.DocumentHandler.Action;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class ClaimValidatorHandler implements ValidatorHandler {
+
+ final Logger logger = LoggerFactory.getLogger(ClaimValidatorHandler.class);
+
+ @Override
+ public void validate(Action action, ServiceContext ctx)
+ throws InvalidDocumentException {
+
+ if(logger.isDebugEnabled()) {
+ logger.debug("validate() action=" + action.name());
+ }
+
+ }
+
+}
--- /dev/null
+package org.collectionspace.services.test;
+
+/**
+ * Placeholder for server-side testing of Claim service code.
+ *
+ * @version $Revision: 2107 $
+ */
+public class ClaimServiceTest {
+ //empty
+}
--- /dev/null
+<?xml version="1.0" encoding="UTF-8" ?>
+<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
+<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
+
+ <appender name="console" class="org.apache.log4j.ConsoleAppender">
+ <param name="Target" value="System.out" />
+ <layout class="org.apache.log4j.TTCCLayout">
+ <param name="DateFormat" value="ISO8601" />
+ </layout>
+ </appender>
+
+
+ <appender name="unit-tests"
+ class="org.apache.log4j.RollingFileAppender">
+ <param name="File" value="./target/unit-tests.log" />
+ <param name="MaxFileSize" value="10240KB" />
+ <param name="MaxBackupIndex" value="6" />
+ <layout class="org.apache.log4j.TTCCLayout">
+ <param name="DateFormat" value="ISO8601" />
+ </layout>
+ </appender>
+
+ <logger name="org.apache.commons.httpclient" additivity="false">
+ <level value="warn" />
+ <appender-ref ref="console" />
+ <appender-ref ref="unit-tests" />
+ </logger>
+
+ <logger name="httpclient.wire" additivity="false">
+ <level value="info" />
+ <appender-ref ref="console" />
+ <appender-ref ref="unit-tests" />
+ </logger>
+
+ <root>
+ <priority value="debug" />
+ <appender-ref ref="console" />
+ <appender-ref ref="unit-tests" />
+ </root>
+
+</log4j:configuration>
+
+
+
+
import java.net.URL;
import java.util.Properties;
-import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.ClientRequestContext;
import javax.ws.rs.client.ClientRequestFilter;
import javax.ws.rs.core.MultivaluedMap;
// FIXME: Deprecated classes that need to be updated
import org.jboss.resteasy.client.ProxyFactory;
import org.apache.http.impl.client.DefaultHttpClient;
-import org.apache.http.conn.ssl.SSLContexts;
-
-import java.security.SecureRandom;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
-import javax.net.ssl.SSLContext;
import javax.net.ssl.X509TrustManager;
/**
/** The logger. */
static protected final Logger logger = LoggerFactory.getLogger(AbstractServiceClientImpl.class);
- /**
- * The character used to separate the words in a part label
- */
- public static final String PART_LABEL_SEPARATOR = "_";
- /** The Constant PART_COMMON_LABEL. */
- public static final String PART_COMMON_LABEL = "common";
+
/** The properties. */
private Properties properties = new Properties();
/** The url. */
/** The http client. */
private HttpClient httpClient;
private org.apache.http.client.HttpClient httpClient4;
-
+
/** The RESTEasy proxy */
private P proxy;
// JAX-RS path for getting service description meta information
public static final String SERVICE_DESCRIPTION_PATH = "description";
+
+ /**
+ * The character used to separate the words in a part label
+ */
+ public static final String PART_LABEL_SEPARATOR = "_";
+ /** The Constant PART_COMMON_LABEL. */
+ public static final String PART_COMMON_LABEL = "common";
+ //
+ // Profile schema name suffixes and extension suffixes
+ //
+ public static final String BOTGARDEN_PROFILE_NAME = "botgarden";
+ public static final String ANTHROPOLOGY_PROFILE_NAME = "anthro";
+ public final static String LHMC_PROFILE_NAME = "lhmc";
+ public final static String FINEART_PROFILE_NAME = "fcart";
+ public final static String HERBARIUM_PROFILE_NAME = "herbarium";
+ //
+ // Profile schema name suffixes and extension suffixes
+ //
+ public final static String NATURALHISTORY_EXTENSION_NAME = "naturalhistory";
+ public final static String NATURALHISTORY_EXT_EXTENSION_NAME = "naturalhistory_extension";
+ public final static String VARIABLEMEDIA_EXTENSION_NAME = "variablemedia";
/**
* Gets the proxy.
/** The Constant SUBJECT. */
static public final String SUBJECT = "subjectCsid";
+ static public final String SUBJECT_DOCTYPE = "subjectDocumentType";
+
static public final String SUBJECT_REFNAME = "subjectRefName";
static public final String SUBJECT_QP = "sbj";
// static public final String SUBJECT_TYPE = "subjectType";
/** The Constant PREDICATE. */
static public final String PREDICATE = "predicate";
static public final String PREDICATE_QP = "prd";
+ static public final String RELATIONSHIP_TYPE = "relationshipType";
+
/** The Constant OBJECT. */
static public final String OBJECT = "objectCsid";
+ static public final String OBJECT_DOCTYPE = "objectDocumentType";
static public final String OBJECT_REFNAME = "objectRefName";
static public final String OBJECT_QP = "obj";
// static public final String OBJECT_TYPE = "objectType";
import javax.ws.rs.core.Response;
-import org.collectionspace.services.client.AuthorityClient;
import org.collectionspace.services.client.CollectionSpaceClient;
import org.collectionspace.services.client.PayloadInputPart;
import org.collectionspace.services.client.PayloadOutputPart;
import org.collectionspace.services.client.workflow.WorkflowClient;
import org.collectionspace.services.jaxb.AbstractCommonList;
import org.dom4j.Document;
-import org.jboss.resteasy.client.ClientResponse;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.testng.Assert;
/*
public abstract class AbstractPoxServiceTestImpl<CLT extends AbstractCommonList, CPT>
extends AbstractServiceTestImpl<CLT, CPT, PoxPayloadOut, String> {
+ private final String CLASS_NAME = AbstractPoxServiceTestImpl.class.getName();
+ private final Logger logger = LoggerFactory.getLogger(CLASS_NAME);
+
@Override
public CPT extractCommonPartValue(Response res) throws Exception {
CPT result = null;
"Part " + partLabel + " was unexpectedly null.");
return payloadInputPart;
}
+
}
import org.testng.Assert;
import org.testng.annotations.Test;
+import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
/**
return result;
}
- /* (non-Javadoc)
- * @see org.collectionspace.services.client.test.ServiceTest#createWithEmptyEntityBody(java.lang.String)
- */
- @Override
- public void createWithEmptyEntityBody(String testName) throws Exception {
- //FIXME: Should this test really be empty? If so, please comment accordingly.
- }
-
- /* (non-Javadoc)
- * @see org.collectionspace.services.client.test.AbstractServiceTestImpl#createWithMalformedXml(java.lang.String)
- */
- @Override
- public void createWithMalformedXml(String testName) throws Exception {
- //FIXME: Should this test really be empty? If so, please comment accordingly.
- }
-
- /* (non-Javadoc)
- * @see org.collectionspace.services.client.test.AbstractServiceTestImpl#createWithWrongXmlSchema(java.lang.String)
- */
- @Override
- public void createWithWrongXmlSchema(String testName) throws Exception {
- //FIXME: Should this test really be empty? If so, please comment accordingly.
- }
-
/* (non-Javadoc)
* @see org.collectionspace.services.client.test.ServiceTest#updateNonExistent(java.lang.String)
*/
Assert.assertEquals(updatedWorkflowCommons.getCurrentLifeCycleState(), expectedLifeCycleState);
return updatedWorkflowCommons.getCurrentLifeCycleState();
}
+
+ //
+ // Generic tests applicable to all services.
+ //
+
+ @Override
+ @Test(dataProvider="testName", dataProviderClass=AbstractPoxServiceTestImpl.class, dependsOnMethods = {"create", "testSubmitRequest"})
+ public void createWithEmptyEntityBody(String testName) throws Exception {
+ // Perform setup.
+ setupCreateWithEmptyEntityBody();
+
+ // Submit the request to the service and store the response.
+ String method = testRequestType.httpMethodName();
+ String url = getServiceRootURL();
+ String mediaType = MediaType.APPLICATION_XML;
+ final String entity = "";
+ int statusCode = submitRequest(method, url, mediaType, entity);
+
+ // Check the status code of the response: does it match
+ // the expected response(s)?
+ if (logger.isDebugEnabled()) {
+ logger.debug("createWithEmptyEntityBody url=" + url + " status=" + statusCode);
+ }
+ Assert.assertTrue(testRequestType.isValidStatusCode(statusCode),
+ invalidStatusCodeMessage(testRequestType, statusCode));
+ Assert.assertEquals(statusCode, testExpectedStatusCode);
+ }
+
+ @Override
+ @Test(dataProvider="testName", dataProviderClass=AbstractPoxServiceTestImpl.class, dependsOnMethods = {"create", "testSubmitRequest"})
+ public void createWithMalformedXml(String testName) throws Exception {
+ // Perform setup.
+ setupCreateWithMalformedXml();
+
+ // Submit the request to the service and store the response.
+ String method = testRequestType.httpMethodName();
+ String url = getServiceRootURL();
+ String mediaType = MediaType.APPLICATION_XML;
+ final String entity = MALFORMED_XML_DATA; // Constant from base class.
+ int statusCode = submitRequest(method, url, mediaType, entity);
+
+ // Check the status code of the response: does it match
+ // the expected response(s)?
+ if (logger.isDebugEnabled()) {
+ logger.debug(testName + ": url=" + url + " status=" + statusCode);
+ }
+ Assert.assertTrue(testRequestType.isValidStatusCode(statusCode),
+ invalidStatusCodeMessage(testRequestType, statusCode));
+ Assert.assertEquals(statusCode, testExpectedStatusCode);
+ }
+
+ @Override
+ @Test(dataProvider="testName", dataProviderClass=AbstractPoxServiceTestImpl.class, dependsOnMethods = {"create", "testSubmitRequest"})
+ public void createWithWrongXmlSchema(String testName) throws Exception {
+ // Perform setup.
+ setupCreateWithWrongXmlSchema();
+
+ // Submit the request to the service and store the response.
+ String method = testRequestType.httpMethodName();
+ String url = getServiceRootURL();
+ String mediaType = MediaType.APPLICATION_XML;
+ final String entity = WRONG_XML_SCHEMA_DATA;
+ int statusCode = submitRequest(method, url, mediaType, entity);
+
+ // Check the status code of the response: does it match
+ // the expected response(s)?
+ if(logger.isDebugEnabled()){
+ logger.debug(testName + ": url=" + url + " status=" + statusCode);
+ }
+ Assert.assertTrue(testRequestType.isValidStatusCode(statusCode),
+ invalidStatusCodeMessage(testRequestType, statusCode));
+ Assert.assertEquals(statusCode, testExpectedStatusCode);
+ }
}
public BaseServiceTest() {
super();
}
+
+ protected int getExpectedStatusCode() {
+ return this.testExpectedStatusCode;
+ }
+
+ protected ServiceRequestType getRequestType() {
+ return testRequestType;
+ }
//
// Decide if cleanup should happen
return result;
}
-
- /** The number of authority references expected. */
- private final int NUM_AUTH_REFS_EXPECTED = 7;
// ---------------------------------------------------------------
// CRUD tests : CREATE tests
//
CollectionObjectClient collectionObjectClient = new CollectionObjectClient();
Response res = collectionObjectClient.read(knownResourceId);
- CollectionobjectsCommon collectionObject = null;
try {
assertStatusCode(res, testName);
PoxPayloadIn input = new PoxPayloadIn((String)res.readEntity(String.class));
- collectionObject = (CollectionobjectsCommon) extractPart(input,
+ CollectionobjectsCommon collectionObject = (CollectionobjectsCommon) extractPart(input,
collectionObjectClient.getCommonPartName(), CollectionobjectsCommon.class);
Assert.assertNotNull(collectionObject);
+ // Check a sample of one or more person authority ref fields
+ Assert.assertEquals(collectionObject.getOwners().getOwner().get(0), ownerRefName);
+ Assert.assertEquals(collectionObject.getFieldCollectionSources().getFieldCollectionSource().get(0),
+ fieldCollectionSourceRefName);
+
+ // Check a sample of one or more organization authority ref fields
+ Assert.assertEquals(collectionObject.getContentOrganizations().getContentOrganization().get(0),
+ contentOrganizationRefName);
+ Assert.assertEquals(collectionObject.getAssocEventOrganizations().getAssocEventOrganization().get(0),
+ assocEventOrganizationRefName);
} finally {
if (res != null) {
res.close();
}
}
+ int expectAuthRefs = personIdsCreated.size() + orgIdsCreated.size();
List<AuthorityRefList.AuthorityRefItem> items = list.getAuthorityRefItem();
int numAuthRefsFound = items.size();
- if(logger.isDebugEnabled()){
- logger.debug("Expected " + NUM_AUTH_REFS_EXPECTED +
- " authority references, found " + numAuthRefsFound);
- }
- Assert.assertEquals(numAuthRefsFound, NUM_AUTH_REFS_EXPECTED,
- "Did not find all expected authority references! " +
- "Expected " + NUM_AUTH_REFS_EXPECTED + ", found " + numAuthRefsFound);
-
- // Check a sample of one or more person authority ref fields
- // Assert.assertEquals(collectionObject.getAssocPersons().getAssocPerson().get(0), assocPersonRefName);
- Assert.assertEquals(collectionObject.getOwners().getOwner().get(0), ownerRefName);
- Assert.assertEquals(collectionObject.getFieldCollectionSources().getFieldCollectionSource().get(0), fieldCollectionSourceRefName);
-
- // Check a sample of one or more organization authority ref fields
- Assert.assertEquals(collectionObject.getContentOrganizations().getContentOrganization().get(0), contentOrganizationRefName);
- Assert.assertEquals(collectionObject.getAssocEventOrganizations().getAssocEventOrganization().get(0), assocEventOrganizationRefName);
+ if (logger.isDebugEnabled()) {
+ logger.debug("Expected " + expectAuthRefs + " authority references, found " + numAuthRefsFound);
+ }
// Optionally output additional data about list members for debugging.
logger.info(this.toString());
boolean iterateThroughList = true;
- if(iterateThroughList && logger.isDebugEnabled()){;
+ if (iterateThroughList && logger.isDebugEnabled()) {
int i = 0;
- for(AuthorityRefList.AuthorityRefItem item : items){
+ for (AuthorityRefList.AuthorityRefItem item : items) {
logger.debug(testName + ": list-item[" + i + "] Field:" +
item.getSourceField() + " =" +
" item display name = " + item.getAuthDisplayName() +
" auth display name = " + item.getItemDisplayName());
- logger.debug(testName + ": list-item[" + i + "] refName=" +
- item.getRefName());
- logger.debug(testName + ": list-item[" + i + "] URI=" +
- item.getUri());
+ logger.debug(testName + ": list-item[" + i + "] refName=" + item.getRefName());
+ logger.debug(testName + ": list-item[" + i + "] URI=" + item.getUri());
i++;
}
}
+
+ //
+ // Make sure we saw the correct number of authRefs
+ Assert.assertEquals(numAuthRefsFound, expectAuthRefs,
+ "Did not find all expected authority references! " + "Expected " + expectAuthRefs + ", found " + numAuthRefsFound);
}
// Note: Any non-success responses are ignored and not reported.
collectionObjectClient.delete(resourceId).close();
}
- // Note: Any non-success response is ignored and not reported.
- PersonAuthorityClient personAuthClient = new PersonAuthorityClient();
+
+ //
// Delete persons before PersonAuth
+ PersonAuthorityClient personAuthClient = new PersonAuthorityClient();
for (String resourceId : personIdsCreated) {
// Note: Any non-success responses are ignored and not reported.
personAuthClient.deleteItem(personAuthCSID, resourceId).close();
}
personAuthClient.delete(personAuthCSID).close();
- // Note: Any non-success response is ignored and not reported.
- OrgAuthorityClient orgAuthClient = new OrgAuthorityClient();
+
+ //
// Delete organizations before OrgAuth
+ OrgAuthorityClient orgAuthClient = new OrgAuthorityClient();
for (String resourceId : orgIdsCreated) {
// Note: Any non-success responses are ignored and not reported.
orgAuthClient.deleteItem(orgAuthCSID, resourceId).close();
--- /dev/null
+package org.collectionspace.services.collectionobject.nuxeo;
+
+import org.collectionspace.services.client.CollectionObjectClient;
+import org.collectionspace.services.client.CollectionSpaceClient;
+
+public class CollectionObjectBotGardenConstants {
+ public final static String CORE_SCHEMA_NAME = CollectionObjectClient.COLLECTIONSPACE_CORE_SCHEMA;
+ public static final String COMMON_SCHEMA_NAME = CollectionObjectClient.SERVICE_NAME + CollectionObjectClient.PART_LABEL_SEPARATOR + CollectionObjectClient.PART_COMMON_LABEL;
+
+ public static final String BOTGARDEN_PROFILE_NAME = CollectionSpaceClient.BOTGARDEN_PROFILE_NAME;
+ public static final String BOTGARDEN_SCHEMA_NAME = CollectionObjectClient.SERVICE_NAME + CollectionObjectClient.PART_LABEL_SEPARATOR + BOTGARDEN_PROFILE_NAME;
+ public final static String NATURALHISTORY_SCHEMA_NAME = CollectionObjectClient.SERVICE_NAME + CollectionObjectClient.PART_LABEL_SEPARATOR + CollectionSpaceClient.NATURALHISTORY_EXTENSION_NAME;
+
+ public final static String URI_SCHEMA_NAME = CORE_SCHEMA_NAME;
+ public final static String URI_FIELD_NAME = CollectionSpaceClient.COLLECTIONSPACE_CORE_URI; //"uri";
+
+ public final static String WORKFLOW_STATE_SCHEMA_NAME = CORE_SCHEMA_NAME;
+ public final static String WORKFLOW_STATE_FIELD_NAME = CollectionSpaceClient.COLLECTIONSPACE_CORE_WORKFLOWSTATE;
+
+ public final static String FIELD_COLLECTION_PLACE_SCHEMA_NAME = NATURALHISTORY_SCHEMA_NAME;
+ public final static String FIELD_COLLECTION_PLACE_FIELD_NAME = "localityGroupList/localityGroup/fieldLocPlace";
+
+ public final static String TAXONOMIC_RANGE_SCHEMA_NAME = NATURALHISTORY_SCHEMA_NAME;
+ public final static String TAXONOMIC_RANGE_FIELD_NAME = "localityGroupList/localityGroup/taxonomicRange";
+
+ public final static String COMMENT_SCHEMA_NAME = COMMON_SCHEMA_NAME;
+ public final static String COMMENT_FIELD_NAME = "comments/comment";
+
+ public final static String DEAD_FLAG_SCHEMA_NAME = BOTGARDEN_SCHEMA_NAME;
+ public final static String DEAD_FLAG_FIELD_NAME = "deadFlag";
+
+ public final static String DEAD_DATE_SCHEMA_NAME = BOTGARDEN_SCHEMA_NAME;
+ public final static String DEAD_DATE_FIELD_NAME = "deadDate";
+
+ public final static String TAXON_SCHEMA_NAME = NATURALHISTORY_SCHEMA_NAME;
+ public final static String TAXON_FIELD_NAME = "taxonomicIdentGroupList/taxonomicIdentGroup/taxon";
+ public final static String PRIMARY_TAXON_FIELD_NAME = "taxonomicIdentGroupList/taxonomicIdentGroup[0]/taxon";
+
+ public final static String DETERMINATION_BY_SCHEMA_NAME = NATURALHISTORY_SCHEMA_NAME;
+ public final static String DETERMINATION_BY_FIELD_NAME = "taxonomicIdentGroupList/taxonomicIdentGroup/identBy";
+
+ public final static String DETERMINATION_DATE_SCHEMA_NAME = NATURALHISTORY_SCHEMA_NAME;
+ public final static String DETERMINATION_DATE_FIELD_NAME = "taxonomicIdentGroupList/taxonomicIdentGroup/identDateGroup/dateDisplayDate";
+
+ public final static String DETERMINATION_INSTITUTION_SCHEMA_NAME = NATURALHISTORY_SCHEMA_NAME;
+ public final static String DETERMINATION_INSTITUTION_FIELD_NAME = "taxonomicIdentGroupList/taxonomicIdentGroup/institution";
+
+ public final static String DETERMINATION_KIND_SCHEMA_NAME = NATURALHISTORY_SCHEMA_NAME;
+ public final static String DETERMINATION_KIND_FIELD_NAME = "taxonomicIdentGroupList/taxonomicIdentGroup/identKind";
+ public final static String DETERMINATION_KIND_DETERMINATION_VALUE = "determination";
+
+ public final static String HYBRID_FLAG_SCHEMA_NAME = NATURALHISTORY_SCHEMA_NAME;
+ public final static String HYBRID_FLAG_FIELD_NAME = "taxonomicIdentGroupList/taxonomicIdentGroup/hybridFlag";
+
+ public final static String RARE_FLAG_SCHEMA_NAME = NATURALHISTORY_SCHEMA_NAME;
+ public final static String RARE_FLAG_FIELD_NAME = "rare";
+
+ public final static String HYBRID_PARENT_SCHEMA_NAME = NATURALHISTORY_SCHEMA_NAME;
+ public final static String HYBRID_PARENT_FIELD_NAME = "taxonomicIdentGroupList/taxonomicIdentGroup/taxonomicIdentHybridParentGroupList/taxonomicIdentHybridParentGroup/taxonomicIdentHybridParent";
+
+ public final static String HYBRID_QUALIFIER_SCHEMA_NAME = NATURALHISTORY_SCHEMA_NAME;
+ public final static String HYBRID_QUALIFIER_FIELD_NAME = "taxonomicIdentGroupList/taxonomicIdentGroup/taxonomicIdentHybridParentGroupList/taxonomicIdentHybridParentGroup/taxonomicIdentHybridParentQualifier";
+
+ public final static String HYBRID_QUALIFIER_FEMALE_VALUE = "female";
+ public final static String HYBRID_QUALIFIER_MALE_VALUE = "male";
+}
*/
package org.collectionspace.services.collectionobject.nuxeo;
+import org.collectionspace.services.client.CollectionSpaceClient;
+
/**
* CollectionObjectConstants processes CollectionObject document
*
public final static String NUXEO_SCHEMA_NAME = "collectionobject";
public final static String NUXEO_DC_TITLE = "CollectionSpace-CollectionObject";
+ public static final String WORKFLOW_STATE_SCHEMA_NAME = CollectionSpaceClient.COLLECTIONSPACE_CORE_SCHEMA;
+ public static final String WORKFLOW_STATE_FIELD_NAME = CollectionSpaceClient.COLLECTIONSPACE_CORE_WORKFLOWSTATE; //"workflowState";
}
<name>services.common-api</name>
<packaging>jar</packaging>
<dependencies>
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-lang3</artifactId>
+ <version>3.1</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.gbif</groupId>
+ <artifactId>name-parser</artifactId>
+ <version>2.0</version>
+ </dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<artifactId>jaxen</artifactId>
<version>1.1.6</version>
</dependency>
-
</dependencies>
+
+ <!-- Global Biodiversity Information Facility - http://www.gbif.org/ -->
+ <repositories>
+ <repository>
+ <id>gbif-all</id>
+ <name>gbif-all</name>
+ <url>http://repository.gbif.org/content/groups/gbif</url>
+ </repository>
+ </repositories>
</project>
--- /dev/null
+package org.collectionspace.services.common.api;
+
+import java.util.regex.MatchResult;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.commons.lang3.StringUtils;
+import org.gbif.api.model.checklistbank.ParsedName;
+import org.gbif.nameparser.NameParser;
+import org.gbif.nameparser.UnparsableException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class TaxonFormatter {
+ private static final Logger logger = LoggerFactory.getLogger(TaxonFormatter.class);
+ private static final Pattern HYBRID_FORMULA_PATTERN = Pattern.compile("^(.*?)(\\s[×xX]\\s)(.*)$");
+ private static final Pattern BROKEN_HYBRID_FORMULA_PATTERN = Pattern.compile("^×\\s*|\\s*×$");
+ private static final Pattern ADJACENT_ITALIC_TAG_PATTERN = Pattern.compile("</i>(\\s*)<i>");
+ private static final Pattern STARTS_WITH_INFRASPECIFIC_RANK_PATTERN = Pattern.compile("^\\s*(var|subsp|cv|aff)\\.");
+ private static final String SUBSPECIES_QUALIFIER_MARKER_REGEXP = "(section|subsection|ser\\.|sser\\.)";
+ private static final Pattern SUBSPECIES_WITH_QUALIFIER_PATTERN = Pattern.compile("(\\s|^)(subsp\\.\\s+)" + SUBSPECIES_QUALIFIER_MARKER_REGEXP + "(\\s)(.*?)(\\s|$)", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE);
+ private static final Pattern PARENTHESIZED_SUBSPECIES_WITH_QUALIFIER_PATTERN = Pattern.compile("(\\s|^)(subsp\\.\\s+)\\(" + SUBSPECIES_QUALIFIER_MARKER_REGEXP + "(.*?)\\)", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE);
+ private static final Pattern FAMILY_NAME_PATTERN = Pattern.compile("^[A-Z]+$");
+
+ private NameParser nameParser;
+
+ public TaxonFormatter() {
+ this.nameParser = new NameParser();
+ }
+
+ public String format(String name) {
+ if (StringUtils.isBlank(name)) {
+ return name;
+ }
+
+ if (FAMILY_NAME_PATTERN.matcher(name).matches()) {
+ // Per Barbara Keller, family names are never italicized.
+ return name;
+ }
+
+ Matcher hybridMatcher = HYBRID_FORMULA_PATTERN.matcher(name);
+
+ if (hybridMatcher.matches()) {
+ String parentName1 = hybridMatcher.group(1);
+ String separator = hybridMatcher.group(2);
+ String parentName2 = hybridMatcher.group(3);
+
+ logger.info("hybrid formula: parentName1=" + parentName1 + " parentName2=" + parentName2);
+
+ return (format(parentName1) + separator + format(parentName2));
+ }
+
+ String normalizedName = name;
+
+ if (BROKEN_HYBRID_FORMULA_PATTERN.matcher(normalizedName).find()) {
+ logger.info("broken hybrid: name=" + name + " normalizedName=" + normalizedName);
+
+ normalizedName = BROKEN_HYBRID_FORMULA_PATTERN.matcher(normalizedName).replaceAll("");
+ logger.info("normalized to:" + normalizedName);
+ }
+
+ if (PARENTHESIZED_SUBSPECIES_WITH_QUALIFIER_PATTERN.matcher(normalizedName).find()) {
+ logger.info("parenthesized qualified subspecies: name=" + name + " normalizedName=" + normalizedName);
+
+ normalizedName = PARENTHESIZED_SUBSPECIES_WITH_QUALIFIER_PATTERN.matcher(normalizedName).replaceFirst("$1$2$3$4");
+ logger.info("normalized to:" + normalizedName);
+ }
+
+ Matcher subspeciesWithQualifierMatcher = SUBSPECIES_WITH_QUALIFIER_PATTERN.matcher(normalizedName);
+
+ if (subspeciesWithQualifierMatcher.find()) {
+ logger.info("qualified subspecies: name=" + name + " normalizedName=" + normalizedName);
+
+ MatchResult matchResult = subspeciesWithQualifierMatcher.toMatchResult();
+
+ // Remove the qualifier (e.g. section, ser., sser.). In some data from SAGE, the latin name
+ // following the qualifier is capitalized, which the GBIF parser won't handle, so lowercase it.
+ String replacement = matchResult.group(1) + matchResult.group(2) + matchResult.group(5).toLowerCase() + matchResult.group(6);
+ normalizedName = normalizedName.substring(0, matchResult.start()) + replacement + normalizedName.substring(matchResult.end());
+ logger.info("normalized to:" + normalizedName);
+ }
+
+ if (STARTS_WITH_INFRASPECIFIC_RANK_PATTERN.matcher(normalizedName).find()) {
+ /*
+ * There are some non-standard taxon names in SAGE data, where there is an infraspecific rank/epithet, but no genus/species, e.g.
+ * subsp. occidentalis (J.T. Howell) C.B. Wolf
+ *
+ * Since the GBIF parser can't handle this, we'll temporarily prepend an arbitrary genus and species for parsing purposes.
+ */
+ logger.info("name starts with infraspecific rank: name=" + name + " normalizedName=" + normalizedName);
+
+ normalizedName = "Tempgenus tempspecies " + normalizedName;
+ logger.info("normalized to:" + normalizedName);
+ }
+
+ ParsedName parsedName = null;
+
+ try {
+ parsedName = nameParser.parse(normalizedName);
+ }
+ catch (UnparsableException e) {
+ /*
+ * Some non-standard taxon names in SAGE data have a species, but no genus. Try to account for these by
+ * temporarily prepending an arbitrary genus.
+ */
+
+ logger.info("Unparsable name, trying with a temp genus: name=" + name + " normalizedName=" + normalizedName);
+
+ normalizedName = "Tempgenus " + normalizedName;
+
+ try {
+ parsedName = nameParser.parse(normalizedName);
+ }
+ catch (UnparsableException ex) {
+ logger.error("error parsing name: name=" + name + " normalizedName=" + normalizedName + " message=" + e.getMessage());
+ }
+ }
+
+ if (parsedName != null) {
+ String genusOrAbove = parsedName.getGenusOrAbove();
+ String specificEpithet = parsedName.getSpecificEpithet();
+ String infraSpecificEpithet = parsedName.getInfraSpecificEpithet();
+
+ logger.debug("parsed name: genusOrAbove=" + genusOrAbove + " specificEpithet=" + specificEpithet + " infraSpecificEpithet=" + infraSpecificEpithet);
+
+ if (StringUtils.isNotBlank(genusOrAbove)) {
+ name = italicize(name, genusOrAbove);
+ }
+
+ if (StringUtils.isNotBlank(specificEpithet)) {
+ name = italicize(name, specificEpithet);
+ }
+
+ if (StringUtils.isNotBlank(infraSpecificEpithet)) {
+ name = italicize(name, infraSpecificEpithet);
+ }
+
+ name = compressTags(name);
+ }
+
+ return name;
+ }
+
+ private String italicize(String string, String substring) {
+ return Pattern.compile("(\\s|\\(|^)(" + Pattern.quote(substring) + ")(\\s|\\)|$)", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE).matcher(string).replaceAll("$1<i>$2</i>$3");
+ }
+
+ private String compressTags(String html) {
+ html = ADJACENT_ITALIC_TAG_PATTERN.matcher(html).replaceAll("$1");
+
+ return html;
+ }
+}
import javax.ws.rs.core.UriInfo;
import org.collectionspace.services.client.CollectionSpaceClient;
-import org.collectionspace.services.client.PoxPayloadIn;
-import org.collectionspace.services.client.PoxPayloadOut;
import org.collectionspace.services.common.CSWebApplicationException;
import org.collectionspace.services.common.api.Tools;
import org.collectionspace.services.common.config.ServiceConfigUtils;
import org.collectionspace.services.config.service.ServiceBindingType;
import org.collectionspace.services.config.service.DocHandlerParams.Params;
import org.collectionspace.services.description.ServiceDescription;
+
import org.jboss.resteasy.spi.HttpRequest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
package org.collectionspace.services.common;
+import org.collectionspace.services.client.PoxPayloadIn;
+import org.collectionspace.services.client.PoxPayloadOut;
+
public interface ResourceMapHolder {
- public ResourceMap getResourceMap();
+ public ResourceMap<PoxPayloadIn, PoxPayloadOut> getResourceMap();
}
import org.collectionspace.services.common.invocable.InvocationContext;
import java.util.List;
+
+import org.collectionspace.services.client.PoxPayloadIn;
+import org.collectionspace.services.client.PoxPayloadOut;
import org.collectionspace.services.common.api.Tools;
import org.collectionspace.services.common.context.ServiceContext;
/*
* Save a handle to the JAX-RS related service context
*/
- void setServiceContext(ServiceContext context);
+ void setServiceContext(ServiceContext<PoxPayloadIn, PoxPayloadOut> context);
- ServiceContext getServiceContext();
+ ServiceContext<PoxPayloadIn, PoxPayloadOut> getServiceContext();
InvocationContext getInvocationContext();
}
*/
package org.collectionspace.services.common.relation.nuxeo;
+import org.collectionspace.services.client.IRelationsManager;
+import org.collectionspace.services.relation.RelationshipType;
/**
* RelationConstants processes Relation document
/** The Constant REL_NUXEO_SCHEMA_ROOT_ELEMENT. */
final public static String NUXEO_SCHEMA_ROOT_ELEMENT = "relationtype";
+ public final static String COMMON_SCHEMA_NAME = IRelationsManager.SERVICE_COMMONPART_NAME;
+
+ public final static String SUBJECT_CSID_SCHEMA_NAME = COMMON_SCHEMA_NAME;
+ public final static String SUBJECT_CSID_FIELD_NAME = IRelationsManager.SUBJECT;
+
+ public final static String SUBJECT_DOCTYPE_SCHEMA_NAME = COMMON_SCHEMA_NAME;
+ public final static String SUBJECT_DOCTYPE_FIELD_NAME = IRelationsManager.SUBJECT_DOCTYPE;
+
+ public final static String OBJECT_CSID_SCHEMA_NAME = COMMON_SCHEMA_NAME;
+ public final static String OBJECT_CSID_FIELD_NAME = IRelationsManager.OBJECT;
+
+ public final static String OBJECT_DOCTYPE_SCHEMA_NAME = COMMON_SCHEMA_NAME;
+ public final static String OBJECT_DOCTYPE_FIELD_NAME = IRelationsManager.OBJECT_DOCTYPE;
+
+ public final static String TYPE_SCHEMA_NAME = COMMON_SCHEMA_NAME;
+ public final static String TYPE_FIELD_NAME = IRelationsManager.RELATIONSHIP_TYPE;
+ public final static String AFFECTS_TYPE = RelationshipType.AFFECTS.toString();
+ public final static String BROADER_TYPE = RelationshipType.HAS_BROADER.toString();
}
session.save();
}
- protected void runAsyncBatch(Set<String> asyncIds)
- {
+ protected void runAsyncBatch(Set<String> asyncIds) {
if (asyncIds.isEmpty()) {
return;
}
FROM id_generators
);
+-- CLAIM_NUMBER
+
+INSERT INTO id_generators
+ (csid, displayname, description, priority, last_generated_id, id_generator_state)
+ SELECT
+ 'a253d167-4f1a-4be3-a477-a2bd8a30cd7f',
+ 'Claim Number',
+ 'Identifies a Claim.',
+ '9',
+ '',
+'<org.collectionspace.services.id.SettableIDGenerator>
+ <parts>
+ <org.collectionspace.services.id.StringIDGeneratorPart>
+ <initialValue>CL</initialValue>
+ <currentValue>CL</currentValue>
+ </org.collectionspace.services.id.StringIDGeneratorPart>
+ <org.collectionspace.services.id.YearIDGeneratorPart>
+ <currentValue></currentValue>
+ </org.collectionspace.services.id.YearIDGeneratorPart>
+ <org.collectionspace.services.id.StringIDGeneratorPart>
+ <initialValue>.</initialValue>
+ <currentValue>.</currentValue>
+ </org.collectionspace.services.id.StringIDGeneratorPart>
+ <org.collectionspace.services.id.NumericIDGeneratorPart>
+ <maxLength>6</maxLength>
+ <initialValue>1</initialValue>
+ <currentValue>-1</currentValue>
+ </org.collectionspace.services.id.NumericIDGeneratorPart>
+ <org.collectionspace.services.id.StringIDGeneratorPart>
+ <initialValue>.</initialValue>
+ <currentValue>.</currentValue>
+ </org.collectionspace.services.id.StringIDGeneratorPart>
+ <org.collectionspace.services.id.NumericIDGeneratorPart>
+ <maxLength>6</maxLength>
+ <initialValue>1</initialValue>
+ <currentValue>-1</currentValue>
+ </org.collectionspace.services.id.NumericIDGeneratorPart>
+ </parts>
+</org.collectionspace.services.id.SettableIDGenerator>'
+ WHERE 'a253d167-4f1a-4be3-a477-a2bd8a30cd7f' NOT IN
+ (
+ SELECT csid
+ FROM id_generators
+ );
+
-- CONDITIONCHECK_NUMBER
INSERT INTO id_generators
try {
String parentcsid = lookupParentCSID(parentspecifier, "getContact(parent)", "GET_ITEM_CONTACT", null);
- ServiceContext itemCtx = createServiceContext(getItemServiceName());
+ ServiceContext<PoxPayloadIn, PoxPayloadOut> itemCtx = createServiceContext(getItemServiceName());
String itemcsid = lookupItemCSID(itemCtx, itemspecifier, parentcsid, "getContact(item)", "GET_ITEM_CONTACT");
// Note that we have to create the service context and document handler for the Contact service, not the main service.
ServiceContext<PoxPayloadIn, PoxPayloadOut> ctx = createServiceContext(getContactServiceName());
DocumentHandler handler = createContactDocumentHandler(ctx, parentcsid, itemcsid);
getRepositoryClient(ctx).get(ctx, csid, handler);
- result = ctx.getOutput();
+ result = (PoxPayloadOut) ctx.getOutput();
} catch (Exception e) {
throw bigReThrow(e, "Get failed, the requested Contact CSID:" + csid
+ ": or one of the specifiers for authority:" + parentspecifier
PoxPayloadIn theUpdate = new PoxPayloadIn(xmlPayload);
String parentcsid = lookupParentCSID(parentspecifier, "updateContact(authority)", "UPDATE_CONTACT", null);
- ServiceContext itemCtx = createServiceContext(getItemServiceName());
+ ServiceContext<PoxPayloadIn, PoxPayloadOut> itemCtx = createServiceContext(getItemServiceName());
String itemcsid = lookupItemCSID(itemCtx, itemspecifier, parentcsid, "updateContact(item)", "UPDATE_CONTACT");
ServiceContext<PoxPayloadIn, PoxPayloadOut> ctx = null;
ctx = createServiceContext(getContactServiceName(), theUpdate);
DocumentHandler handler = createContactDocumentHandler(ctx, parentcsid, itemcsid);
getRepositoryClient(ctx).update(ctx, csid, handler);
- result = ctx.getOutput();
+ result = (PoxPayloadOut) ctx.getOutput();
} catch (Exception e) {
throw bigReThrow(e, "Update failed, the requested Contact CSID:" + csid
+ ": or one of the specifiers for authority:" + parentspecifier
try {
String parentcsid = lookupParentCSID(parentspecifier, "deleteContact(authority)", "DELETE_CONTACT", null);
- ServiceContext itemCtx = createServiceContext(getItemServiceName());
+ ServiceContext<PoxPayloadIn, PoxPayloadOut> itemCtx = createServiceContext(getItemServiceName());
String itemcsid = lookupItemCSID(itemCtx, itemspecifier, parentcsid, "deleteContact(item)", "DELETE_CONTACT");
//NOTE: itemcsid is not used below. Leaving the above call in for possible side effects??? CSPACE-3175
private String conditionCheckerOrAssessorRefName = null;
private String insurerRefName = null;
private String valuerRefName = null;
- private final int NUM_AUTH_REFS_EXPECTED = 5;
- private final static String CURRENT_DATE_UTC =
- GregorianCalendarDateTimeUtils.currentDateUTC();
+ private final static String CURRENT_DATE_UTC = GregorianCalendarDateTimeUtils.currentDateUTC();
@Override
protected String getServiceName() {
}
// Success outcomes
- @Test(dataProvider="testName",
- dependsOnMethods = {"createWithAuthRefs"})
+ @Test(dataProvider="testName", dependsOnMethods = {"createWithAuthRefs"})
public void readAndCheckAuthRefs(String testName) throws Exception {
// Perform setup.
testSetup(STATUS_OK, ServiceRequestType.READ);
try {
assertStatusCode(res, testName);
PoxPayloadIn input = new PoxPayloadIn(res.readEntity(String.class));
- IntakesCommon intake = (IntakesCommon) extractPart(input,
- intakeClient.getCommonPartName(), IntakesCommon.class);
+ IntakesCommon intake = (IntakesCommon) extractPart(input, intakeClient.getCommonPartName(), IntakesCommon.class);
Assert.assertNotNull(intake);
// Check a couple of fields
Assert.assertEquals(intake.getCurrentOwner(), currentOwnerRefName);
List<AuthorityRefList.AuthorityRefItem> items = list.getAuthorityRefItem();
int numAuthRefsFound = items.size();
- if(logger.isDebugEnabled()){
- logger.debug("Expected " + NUM_AUTH_REFS_EXPECTED +
- " authority references, found " + numAuthRefsFound);
+ if (logger.isDebugEnabled()) {
+ logger.debug("Expected " + personIdsCreated.size() + " authority references, found " + numAuthRefsFound);
}
- Assert.assertEquals(numAuthRefsFound, NUM_AUTH_REFS_EXPECTED,
- "Did not find all expected authority references! " +
- "Expected " + NUM_AUTH_REFS_EXPECTED + ", found " + numAuthRefsFound);
// Optionally output additional data about list members for debugging.
boolean iterateThroughList = true;
- if(iterateThroughList && logger.isDebugEnabled()){
+ if (iterateThroughList && logger.isDebugEnabled()) {
int i = 0;
for(AuthorityRefList.AuthorityRefItem item : items){
logger.debug(testName + ": list-item[" + i + "] Field:" +
item.getSourceField() + "= " +
item.getAuthDisplayName() +
item.getItemDisplayName());
- logger.debug(testName + ": list-item[" + i + "] refName=" +
- item.getRefName());
- logger.debug(testName + ": list-item[" + i + "] URI=" +
- item.getUri());
+ logger.debug(testName + ": list-item[" + i + "] refName=" + item.getRefName());
+ logger.debug(testName + ": list-item[" + i + "] URI=" + item.getUri());
i++;
}
}
+ //
+ // Ensure we got the correct number of authRefs
+ Assert.assertEquals(numAuthRefsFound, personIdsCreated.size(),
+ "Did not find all expected authority references! " + "Expected " + personIdsCreated.size() + ", found " + numAuthRefsFound);
}
@AfterClass(alwaysRun=true)
public void cleanUp() throws Exception {
String noTest = System.getProperty("noTestCleanup");
- if(Boolean.TRUE.toString().equalsIgnoreCase(noTest)) {
+ if (Boolean.TRUE.toString().equalsIgnoreCase(noTest)) {
if (logger.isDebugEnabled()) {
logger.debug("Skipping Cleanup phase ...");
}
for (String resourceId : intakeIdsCreated) {
intakeClient.delete(resourceId).close();
}
+ //
+ // Delete all the person records then the parent resource
PersonAuthorityClient personAuthClient = new PersonAuthorityClient();
- // Delete persons before PersonAuth
for (String resourceId : personIdsCreated) {
personAuthClient.deleteItem(personAuthCSID, resourceId).close();
}
protected String getServiceName() {
return IntakeClient.SERVICE_NAME;
}
-
- // ---------------------------------------------------------------
- // CRUD tests : CREATE tests
- // ---------------------------------------------------------------
-
- // See Issue CSPACE-401.
- /* (non-Javadoc)
- * @see org.collectionspace.services.client.test.AbstractServiceTestImpl#createWithEmptyEntityBody(java.lang.String)
- */
- @Override
- public void createWithEmptyEntityBody(String testName) throws Exception {
- //Should this really be empty?
- }
-
- /* (non-Javadoc)
- * @see org.collectionspace.services.client.test.AbstractServiceTestImpl#createWithMalformedXml(java.lang.String)
- */
- @Override
- public void createWithMalformedXml(String testName) throws Exception {
- //Should this really be empty?
- }
-
- /* (non-Javadoc)
- * @see org.collectionspace.services.client.test.AbstractServiceTestImpl#createWithWrongXmlSchema(java.lang.String)
- */
- @Override
- public void createWithWrongXmlSchema(String testName) throws Exception {
- //Should this really be empty?
- }
-
- /*
- @Override
- @Test(dataProvider="testName", dataProviderClass=AbstractServiceTest.class,
- dependsOnMethods = {"create", "testSubmitRequest"})
- public void createWithEmptyEntityBody(String testName) throws Exception {
-
- if (logger.isDebugEnabled()) {
- logger.debug(testBanner(testName, CLASS_NAME));
- }
- // Perform setup.
- setupCreateWithEmptyEntityBody();
-
- // Submit the request to the service and store the response.
- String method = REQUEST_TYPE.httpMethodName();
- String url = getServiceRootURL();
- String mediaType = MediaType.APPLICATION_XML;
- final String entity = "";
- int statusCode = submitRequest(method, url, mediaType, entity);
-
- // Check the status code of the response: does it match
- // the expected response(s)?
- if(logger.isDebugEnabled()){
- logger.debug("createWithEmptyEntityBody url=" + url +
- " status=" + statusCode);
- }
- Assert.assertTrue(REQUEST_TYPE.isValidStatusCode(statusCode),
- invalidStatusCodeMessage(REQUEST_TYPE, statusCode));
- Assert.assertEquals(statusCode, EXPECTED_STATUS_CODE);
- }
-
- @Override
- @Test(dataProvider="testName", dataProviderClass=AbstractServiceTest.class,
- dependsOnMethods = {"create", "testSubmitRequest"})
- public void createWithMalformedXml(String testName) throws Exception {
-
- if (logger.isDebugEnabled()) {
- logger.debug(testBanner(testName, CLASS_NAME));
- }
- // Perform setup.
- setupCreateWithMalformedXml();
-
- // Submit the request to the service and store the response.
- String method = REQUEST_TYPE.httpMethodName();
- String url = getServiceRootURL();
- String mediaType = MediaType.APPLICATION_XML;
- final String entity = MALFORMED_XML_DATA; // Constant from base class.
- int statusCode = submitRequest(method, url, mediaType, entity);
-
- // Check the status code of the response: does it match
- // the expected response(s)?
- if(logger.isDebugEnabled()){
- logger.debug(testName + ": url=" + url +
- " status=" + statusCode);
- }
- Assert.assertTrue(REQUEST_TYPE.isValidStatusCode(statusCode),
- invalidStatusCodeMessage(REQUEST_TYPE, statusCode));
- Assert.assertEquals(statusCode, EXPECTED_STATUS_CODE);
- }
-
- @Override
- @Test(dataProvider="testName", dataProviderClass=AbstractServiceTest.class,
- dependsOnMethods = {"create", "testSubmitRequest"})
- public void createWithWrongXmlSchema(String testName) throws Exception {
-
- if (logger.isDebugEnabled()) {
- logger.debug(testBanner(testName, CLASS_NAME));
- }
- // Perform setup.
- setupCreateWithWrongXmlSchema(testName, logger);
-
- // Submit the request to the service and store the response.
- String method = REQUEST_TYPE.httpMethodName();
- String url = getServiceRootURL();
- String mediaType = MediaType.APPLICATION_XML;
- final String entity = WRONG_XML_SCHEMA_DATA;
- int statusCode = submitRequest(method, url, mediaType, entity);
-
- // Check the status code of the response: does it match
- // the expected response(s)?
- if(logger.isDebugEnabled()){
- logger.debug(testName + ": url=" + url +
- " status=" + statusCode);
- }
- Assert.assertTrue(REQUEST_TYPE.isValidStatusCode(statusCode),
- invalidStatusCodeMessage(REQUEST_TYPE, statusCode));
- Assert.assertEquals(statusCode, EXPECTED_STATUS_CODE);
- }
- */
// ---------------------------------------------------------------
// CRUD tests : READ tests
// ---------------------------------------------------------------
-
+ @Override
protected void compareReadInstances(IntakesCommon original, IntakesCommon fromRead) throws Exception {
// Verify the number and contents of values in repeatable fields,
// as created in the instance record used for testing.
+ "' does not match expected data '" + getUTF8DataFragment());
}
- // Failure outcomes
-
@Override
public void delete(String testName) throws Exception {
// Do nothing because this test is not ready to delete the "knownResourceId".
}
- // Failure outcomes
- // Placeholders until the three tests below can be uncommented.
-
- // See Issue CSPACE-401.
- /* (non-Javadoc)
- * @see org.collectionspace.services.client.test.AbstractServiceTestImpl#updateWithEmptyEntityBody(java.lang.String)
- */
- @Override
- public void updateWithEmptyEntityBody(String testName) throws Exception {
- //Should this really be empty?
- }
-
- /* (non-Javadoc)
- * @see org.collectionspace.services.client.test.AbstractServiceTestImpl#updateWithMalformedXml(java.lang.String)
- */
- @Override
- public void updateWithMalformedXml(String testName) throws Exception {
- //Should this really be empty?
- }
-
- /* (non-Javadoc)
- * @see org.collectionspace.services.client.test.AbstractServiceTestImpl#updateWithWrongXmlSchema(java.lang.String)
- */
- @Override
- public void updateWithWrongXmlSchema(String testName) throws Exception {
- //Should this really be empty?
- }
-
- /*
- @Override
- @Test(dataProvider="testName", dataProviderClass=AbstractServiceTest.class,
- dependsOnMethods = {"create", "update", "testSubmitRequest"})
- public void updateWithEmptyEntityBody(String testName) throws Exception {
-
- if (logger.isDebugEnabled()) {
- logger.debug(testBanner(testName, CLASS_NAME));
- }
- // Perform setup.
- setupUpdateWithEmptyEntityBody();
-
- // Submit the request to the service and store the response.
- String method = REQUEST_TYPE.httpMethodName();
- String url = getResourceURL(knownResourceId);
- String mediaType = MediaType.APPLICATION_XML;
- final String entity = "";
- int statusCode = submitRequest(method, url, mediaType, entity);
-
- // Check the status code of the response: does it match
- // the expected response(s)?
- if(logger.isDebugEnabled()){
- logger.debug(testName + ": url=" + url +
- " status=" + statusCode);
- }
- Assert.assertTrue(REQUEST_TYPE.isValidStatusCode(statusCode),
- invalidStatusCodeMessage(REQUEST_TYPE, statusCode));
- Assert.assertEquals(statusCode, EXPECTED_STATUS_CODE);
- }
-
- @Override
- @Test(dataProvider="testName", dataProviderClass=AbstractServiceTest.class,
- dependsOnMethods = {"create", "update", "testSubmitRequest"})
- public void updateWithMalformedXml(String testName) throws Exception {
-
- if (logger.isDebugEnabled()) {
- logger.debug(testBanner(testName, CLASS_NAME));
- }
- // Perform setup.
- setupUpdateWithMalformedXml();
-
- // Submit the request to the service and store the response.
- String method = REQUEST_TYPE.httpMethodName();
- String url = getResourceURL(knownResourceId);
- String mediaType = MediaType.APPLICATION_XML;
- final String entity = MALFORMED_XML_DATA;
- int statusCode = submitRequest(method, url, mediaType, entity);
-
- // Check the status code of the response: does it match
- // the expected response(s)?
- if(logger.isDebugEnabled()){
- logger.debug(testName + ": url=" + url +
- " status=" + statusCode);
- }
- Assert.assertTrue(REQUEST_TYPE.isValidStatusCode(statusCode),
- invalidStatusCodeMessage(REQUEST_TYPE, statusCode));
- Assert.assertEquals(statusCode, EXPECTED_STATUS_CODE);
- }
-
- @Override
- @Test(dataProvider="testName", dataProviderClass=AbstractServiceTest.class,
- dependsOnMethods = {"create", "update", "testSubmitRequest"})
- public void updateWithWrongXmlSchema(String testName) throws Exception {
-
- if (logger.isDebugEnabled()) {
- logger.debug(testBanner(testName, CLASS_NAME));
- }
- // Perform setup.
- setupUpdateWithWrongXmlSchema();
-
- // Submit the request to the service and store the response.
- String method = REQUEST_TYPE.httpMethodName();
- String url = getResourceURL(knownResourceId);
- String mediaType = MediaType.APPLICATION_XML;
- final String entity = WRONG_XML_SCHEMA_DATA;
- int statusCode = submitRequest(method, url, mediaType, entity);
-
- // Check the status code of the response: does it match
- // the expected response(s)?
- if(logger.isDebugEnabled()){
- logger.debug(testName + ": url=" + url +
- " status=" + statusCode);
- }
- Assert.assertTrue(REQUEST_TYPE.isValidStatusCode(statusCode),
- invalidStatusCodeMessage(REQUEST_TYPE, statusCode));
- Assert.assertEquals(statusCode, EXPECTED_STATUS_CODE);
- }
- */
-
// ---------------------------------------------------------------
// Utility tests : tests of code used in tests above
// ---------------------------------------------------------------
CurrentLocationGroupList currentLocationGroupList = intakesCommon.getCurrentLocationGroupList();
Assert.assertNotNull(currentLocationGroupList);
+
List<CurrentLocationGroup> currentLocationGroups = currentLocationGroupList.getCurrentLocationGroup();
- Assert.assertNotNull(currentLocationGroups);
+ Assert.assertNotNull(currentLocationGroups);
Assert.assertTrue(currentLocationGroups.size() > 0);
+
CurrentLocationGroup currentLocationGroup = currentLocationGroups.get(0);
Assert.assertNotNull(currentLocationGroup);
+
String currentLocationNote = currentLocationGroup.getCurrentLocationNote();
Assert.assertNotNull(currentLocationNote);
+
String updatedCurrentLocationNote = "updated-" + currentLocationNote;
currentLocationGroups.get(0).setCurrentLocationNote(updatedCurrentLocationNote);
result.setCurrentLocationGroupList(currentLocationGroupList);
dependsOnMethods = {
"org.collectionspace.services.client.test.AbstractServiceTestImpl.baseCRUDTests"})
public void CRUDTests(String testName) {
- // TODO Auto-generated method stub
+ // Needed for TestNG dependency chain.
}
}
<artifactId>org.collectionspace.services.loanout.jaxb</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.loanout.client</artifactId>
+ <version>${project.version}</version>
+ </dependency>
<dependency>
<groupId>org.collectionspace.services</groupId>
<artifactId>org.collectionspace.services.collectionobject.jaxb</artifactId>
</exclusion>
</exclusions>
</dependency>
-
</dependencies>
<build>
--- /dev/null
+package org.collectionspace.services.loanout.nuxeo;
+
+import org.collectionspace.services.client.CollectionSpaceClient;
+import org.collectionspace.services.client.LoanoutClient;
+
+public class LoanoutBotGardenConstants {
+ public static final String BOTGARDEN_PROFILE_NAME = CollectionSpaceClient.BOTGARDEN_PROFILE_NAME;
+ public static final String BOTGARDEN_SCHEMA_NAME = LoanoutClient.SERVICE_NAME + CollectionSpaceClient.PART_LABEL_SEPARATOR + BOTGARDEN_PROFILE_NAME;
+
+ public static final String LABEL_REQUESTED_SCHEMA_NAME = BOTGARDEN_SCHEMA_NAME;
+ public static final String LABEL_REQUESTED_FIELD_NAME = "labelRequested";
+ public static final String LABEL_REQUESTED_YES_VALUE = "Yes";
+ public static final String LABEL_REQUESTED_NO_VALUE = "No";
+
+ public static final String STYLED_NAME_SCHEMA_NAME = BOTGARDEN_SCHEMA_NAME;
+ public static final String STYLED_NAME_FIELD_NAME = "styledName";
+}
--- /dev/null
+package org.collectionspace.services.movement.nuxeo;
+
+import org.collectionspace.services.client.MovementClient;
+import org.collectionspace.services.client.CollectionSpaceClient;
+
+/**
+ * Constants related to the official Botanical Garden profile.
+ * @author remillet
+ *
+ */
+public class MovementBotGardenConstants {
+ public static final String BOTGARDEN_PROFILE_NAME = MovementClient.BOTGARDEN_PROFILE_NAME;
+ public static final String BOTGARDEN_SCHEMA_NAME = MovementClient.SERVICE_NAME + CollectionSpaceClient.PART_LABEL_SEPARATOR + BOTGARDEN_PROFILE_NAME;
+
+ public static final String ACTION_CODE_SCHEMA_NAME = MovementConstants.COMMON_SCHEMA_NAME;
+ public static final String ACTION_CODE_FIELD_NAME = "reasonForMove";
+
+ public static final String ACTION_DATE_SCHEMA_NAME = MovementConstants.COMMON_SCHEMA_NAME;
+ public static final String ACTION_DATE_FIELD_NAME = "locationDate";
+
+ public static final String DEAD_ACTION_CODE = "Dead";
+ public static final String REVIVED_ACTION_CODE = "Revived";
+ public static final String OTHER_ACTION_CODE = "Other";
+
+ public static final String LABEL_REQUESTED_SCHEMA_NAME = BOTGARDEN_SCHEMA_NAME;
+ public static final String LABEL_REQUESTED_FIELD_NAME = "labelRequested";
+ public static final String LABEL_REQUESTED_YES_VALUE = "Yes";
+ public static final String LABEL_REQUESTED_NO_VALUE = "No";
+}
*/
package org.collectionspace.services.movement.nuxeo;
+import org.collectionspace.services.client.CollectionSpaceClient;
+import org.collectionspace.services.client.MovementClient;
+
/**
* MovementConstants specifies constants for the Movement service
*
public final static String NUXEO_DOCTYPE = "Movement";
public final static String NUXEO_SCHEMA_NAME = "movement";
public final static String NUXEO_DC_TITLE = "CollectionSpace-Movement";
+
+ public final static String CORE_SCHEMA_NAME = CollectionSpaceClient.COLLECTIONSPACE_CORE_SCHEMA;
+ public static final String COMMON_SCHEMA_NAME = MovementClient.SERVICE_NAME + CollectionSpaceClient.PART_LABEL_SEPARATOR + CollectionSpaceClient.PART_COMMON_LABEL;
+
+ public static final String CURRENT_LOCATION_SCHEMA_NAME = COMMON_SCHEMA_NAME;
+ public static final String CURRENT_LOCATION_FIELD_NAME = "currentLocation";
+
+ public static final String PREVIOUS_LOCATION_SCHEMA_NAME = COMMON_SCHEMA_NAME;
+ public static final String PREVIOUS_LOCATION_FIELD_NAME = "previousLocation";
+
+ public static final String WORKFLOW_STATE_SCHEMA_NAME = MovementConstants.CORE_SCHEMA_NAME;
+ public static final String WORKFLOW_STATE_FIELD_NAME = CollectionSpaceClient.COLLECTIONSPACE_CORE_WORKFLOWSTATE;
+
+ public static final String NONE_LOCATION = null;
}
--- /dev/null
+package org.collectionspace.services.place.nuxeo;
+
+public class PlaceBotGardenConstants {
+ public final static String REVERSE_TERM_TYPE = "Reverse Qualified Name";
+}
*/
package org.collectionspace.services.place.nuxeo;
+import org.collectionspace.services.client.PlaceAuthorityClient;
+
/**
* PlaceConstants processes CollectionObject document
*
public final static String NUXEO_DOCTYPE = "Place";
public final static String NUXEO_SCHEMA_NAME = "place";
public final static String NUXEO_DC_TITLE = "CollectionSpace-Place";
+
+ public static final String COMMON_SCHEMA_NAME = PlaceAuthorityClient.SERVICE_COMMON_PART_NAME;
+ public final static String COMMON_ITEM_SCHEMA_NAME = PlaceAuthorityClient.SERVICE_ITEM_COMMON_PART_NAME;
+
+ public final static String DISPLAY_NAME_SCHEMA_NAME = COMMON_ITEM_SCHEMA_NAME;
+ public final static String DISPLAY_NAME_FIELD_NAME = "placeTermGroupList/placeTermGroup/termDisplayName";
+
+ public final static String TERM_TYPE_SCHEMA_NAME = COMMON_ITEM_SCHEMA_NAME;
+ public final static String TERM_TYPE_FIELD_NAME = "placeTermGroupList/placeTermGroup/termType";
+
}
<properties>
<annox.version>0.5.0</annox.version>
<jaxb2-basics.version>0.6.2</jaxb2-basics.version>
- <maven-jaxb2-plugin.version>0.12.3</maven-jaxb2-plugin.version>
+ <maven-jaxb2-plugin.version>0.13.1</maven-jaxb2-plugin.version>
<jaxb.version>2.2.11</jaxb.version>
<resteasy.version>3.0.19.Final</resteasy.version>
<mysql.driver.version>5.1.8</mysql.driver.version>
<module>jaxb</module>
<module>hyperjaxb</module>
<module>common</module>
+ <module>structureddate</module>
<module>authority</module>
<module>authorization-mgt</module> <!-- relies on authorization -->
<module>common-test</module>
<module>intake</module>
<module>loanin</module>
<module>loanout</module>
+ <module>claim</module>
<module>exhibition</module>
<module>conditioncheck</module>
<module>conservation</module>
<module>valuationcontrol</module>
<module>objectexit</module>
+ <module>propagation</module>
+ <module>pottag</module>
<module>batch</module>
<module>imports</module>
<module>location</module>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<launchConfiguration type="org.eclipse.ant.AntBuilderLaunchConfigurationType">
+<booleanAttribute key="org.eclipse.ui.externaltools.ATTR_BUILDER_ENABLED" value="false"/>
+<stringAttribute key="org.eclipse.ui.externaltools.ATTR_DISABLED_BUILDER" value="org.eclipse.jdt.core.javabuilder"/>
+<mapAttribute key="org.eclipse.ui.externaltools.ATTR_TOOL_ARGUMENTS"/>
+<booleanAttribute key="org.eclipse.ui.externaltools.ATTR_TRIGGERS_CONFIGURED" value="true"/>
+</launchConfiguration>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<launchConfiguration type="org.eclipse.ant.AntBuilderLaunchConfigurationType">
+<booleanAttribute key="org.eclipse.ui.externaltools.ATTR_BUILDER_ENABLED" value="false"/>
+<stringAttribute key="org.eclipse.ui.externaltools.ATTR_DISABLED_BUILDER" value="org.maven.ide.eclipse.maven2Builder"/>
+<mapAttribute key="org.eclipse.ui.externaltools.ATTR_TOOL_ARGUMENTS"/>
+<booleanAttribute key="org.eclipse.ui.externaltools.ATTR_TRIGGERS_CONFIGURED" value="true"/>
+</launchConfiguration>
--- /dev/null
+
+<project name="pottag.3rdparty" default="package" basedir=".">
+ <description>
+ pottag service 3rdparty
+ </description>
+ <!-- set global properties for this build -->
+ <property name="services.trunk" value="../../.."/>
+ <!-- enviornment should be declared before reading build.properties -->
+ <property environment="env" />
+ <property file="${services.trunk}/build.properties" />
+ <property name="mvn.opts" value="-V" />
+ <property name="src" location="src"/>
+
+ <condition property="osfamily-unix">
+ <os family="unix" />
+ </condition>
+ <condition property="osfamily-windows">
+ <os family="windows" />
+ </condition>
+
+ <target name="init" >
+ <!-- Create the time stamp -->
+ <tstamp/>
+ </target>
+
+ <target name="package" depends="package-unix,package-windows"
+ description="Package CollectionSpace Services" />
+ <target name="package-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="package-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="install" depends="install-unix,install-windows"
+ description="Install" />
+ <target name="install-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="install-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="clean" depends="clean-unix,clean-windows"
+ description="Delete target directories" >
+ <delete dir="${build}"/>
+ </target>
+ <target name="clean-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="clean" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="clean-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="clean" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="test" depends="test-unix,test-windows" description="Run tests" />
+ <target name="test-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="test" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="test-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="test" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="deploy" depends="install"
+ description="deploy pottag in ${jee.server.nuxeo}">
+ <!-- This target is obsolete. The Nuxeo artifacts are now created and deployed using the "csmake" tool
+ <ant antfile="nuxeo-platform-cs-pottag/build.xml" target="deploy" inheritall="false"/>
+ -->
+ </target>
+
+ <target name="undeploy"
+ description="undeploy pottag from ${jee.server.nuxeo}">
+ <!-- This target is obsolete. The Nuxeo artifacts are now created and deployed using the "csmake" tool
+ leaving this only for backwards compatibility reasons. -->
+ <ant antfile="nuxeo-platform-cs-pottag/build.xml" target="undeploy" inheritall="false"/>
+ </target>
+
+ <target name="dist"
+ description="generate distribution for pottag" depends="package">
+ <ant antfile="nuxeo-platform-cs-pottag/build.xml" target="dist" inheritall="false"/>
+ </target>
+
+
+</project>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<project name="nuxeo-platform-cs-pottag" default="package" basedir=".">
+ <description>
+ pottag nuxeo document type
+ </description>
+ <!-- set global properties for this build -->
+ <property name="services.trunk" value="../../../.."/>
+ <!-- environment should be declared before reading build.properties -->
+ <property environment="env" />
+ <property file="${services.trunk}/build.properties" />
+ <property name="mvn.opts" value="-V" />
+ <property name="src" location="src"/>
+
+ <!-- JAR files used by CollectionSpace 4.0 and later -->
+ <property name="nuxeo.pottag.doctype.jars.all"
+ value="collectionspace.pottag.doctype.*.jar"/>
+ <property name="nuxeo.pottag.schema.jars.all"
+ value="collectionspace.pottag.schema.*.jar"/>
+ <!-- Legacy JAR files used by CollectionSpace 3.3 and earlier -->
+ <property name="nuxeo.pottag.legacy.jars.all"
+ value="org.collectionspace.services.pottag.3rdparty.nuxeo-*.jar"/>
+ <property name="nuxeo.pottag.legacy.jar"
+ value="org.collectionspace.services.pottag.3rdparty.nuxeo-${cspace.release}.jar"/>
+
+ <condition property="osfamily-unix">
+ <os family="unix" />
+ </condition>
+ <condition property="osfamily-windows">
+ <os family="windows" />
+ </condition>
+
+ <target name="init" >
+ <!-- Create the time stamp -->
+ <tstamp/>
+ </target>
+
+ <target name="package" depends="package-unix,package-windows"
+ description="Package CollectionSpace Services" />
+ <target name="package-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="package-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="install" depends="install-unix,install-windows"
+ description="Install" />
+ <target name="install-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="install-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="clean" depends="clean-unix,clean-windows"
+ description="Delete target directories" >
+ <delete dir="${build}"/>
+ </target>
+ <target name="clean-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="clean" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="clean-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="clean" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="test" depends="test-unix,test-windows" description="Run tests" />
+ <target name="test-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="test" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="test-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="test" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="deploy" depends="install"
+ description="deploy pottag doctype in ${jee.server.nuxeo}">
+ <!-- This target is obsolete. The Nuxeo artifacts are now created and deployed using the "csmake" tool
+ leaving this only for backwards compatibility reasons. -->
+ <copy file="${basedir}/target/${nuxeo.pottag.legacy.jar}"
+ todir="${jee.deploy.nuxeo.plugins}"/>
+ </target>
+
+ <target name="undeploy"
+ description="undeploy pottag doctype from ${jee.server.nuxeo}">
+ <delete>
+ <!-- Undeploy doctype and schema artifacts -->
+ <fileset dir="${jee.deploy.nuxeo.plugins}">
+ <include name="${nuxeo.pottag.doctype.jars.all}"/>
+ </fileset>
+ <fileset dir="${jee.deploy.nuxeo.plugins}">
+ <include name="${nuxeo.pottag.schema.jars.all}"/>
+ </fileset>
+ <!-- Undeploy legacy artifacts -->
+ <fileset dir="${jee.deploy.nuxeo.plugins}">
+ <include name="${nuxeo.pottag.legacy.jars.all}"/>
+ </fileset>
+ </delete>
+ <!-- Undeploy legacy artifacts from old deployment location through release 0.6 -->
+ <delete quiet="true">
+ <fileset dir="${jee.deploy.nuxeo.system}">
+ <include name="${nuxeo.pottag.legacy.jars.all}"/>
+ </fileset>
+ </delete>
+ </target>
+
+ <target name="dist"
+ description="generate distribution for pottag doctype" depends="package">
+ <copy todir="${services.trunk}/${dist.deploy.nuxeo.plugins}">
+ <fileset file="${basedir}/target/${nuxeo.pottag.legacy.jar}"/>
+ </copy>
+ </target>
+
+</project>
+
--- /dev/null
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <parent>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.pottag.3rdparty</artifactId>
+ <version>4.1.1</version>
+ </parent>
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.pottag.3rdparty.nuxeo</artifactId>
+ <name>services.pottag.3rdparty.nuxeo</name>
+ <packaging>jar</packaging>
+ <description>
+ Pottag Nuxeo Document Type
+ </description>
+
+ <properties>
+ <ServiceName>pottag</ServiceName>
+ <NuxeoDocTypeName>Pottag</NuxeoDocTypeName>
+ <CommonSchemaName>pottags_common</CommonSchemaName>
+ <Lifecycle>cs_default</Lifecycle>
+ </properties>
+
+ <build>
+ <resources>
+ <resource>
+ <directory>src/main/resources</directory>
+ <filtering>true</filtering>
+ </resource>
+ <resource>
+ <directory>../../../../3rdparty/nuxeo/nuxeo-doctype/src/main/resources</directory>
+ <filtering>true</filtering>
+ </resource>
+ </resources>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <configuration>
+ <archive>
+ <manifestFile>target/classes/META-INF/MANIFEST.MF</manifestFile>
+ <manifestEntries>
+ <Bundle-Version>${eclipseVersion}</Bundle-Version>
+ <Bundle-ManifestVersion>2</Bundle-ManifestVersion>
+ </manifestEntries>
+ </archive>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
--- /dev/null
+<?xml version="1.0"?>
+
+<!--
+layouts-contrib.xml
+
+Layout file for configuring screen layouts in the
+user interface of Nuxeo EP's web application, for
+viewing or editing CollectionSpace records stored
+in the Nuxeo repository,
+
+See the "Nuxeo Book" for an introductory description
+of how to edit this file. For instance, for Nuxeo EP 5.3:
+http://doc.nuxeo.org/5.3/books/nuxeo-book/html/
+
+$LastChangedRevision: $
+$LastChangedDate: $
+-->
+
+<component name="org.collectionspace.pottag.layouts.webapp">
+
+ <extension target="org.nuxeo.ecm.platform.forms.layout.WebLayoutManager"
+ point="layouts">
+
+ <layout name="pottag">
+ <templates>
+ <template mode="any">/layouts/layout_default_template.xhtml</template>
+ </templates>
+ <rows>
+ <row><widget>family</widget></row>
+ <row><widget>commonName</widget></row>
+ <row><widget>locale</widget></row>
+ <row><widget>taxonName</widget></row>
+ <row><widget>labelData</widget></row>
+ <row><widget>numberOfLabels</widget></row>
+
+ </rows>
+
+ <widget name="family" type="text">
+ <labels>
+ <label mode="any">family</label>
+ </labels>
+ <translated>true</translated>
+ <fields>
+ <field schema="pottags_common">family</field>
+ </fields>
+ <properties widgetMode="edit">
+ <property name="styleClass">dataInputText</property>
+ </properties>
+ </widget>
+
+ <widget name="commonName" type="text">
+ <labels>
+ <label mode="any">commonName</label>
+ </labels>
+ <translated>true</translated>
+ <fields>
+ <field schema="pottags_common">commonName</field>
+ </fields>
+ <properties widgetMode="edit">
+ <property name="styleClass">dataInputText</property>
+ </properties>
+ </widget>
+
+ <widget name="locale" type="text">
+ <labels>
+ <label mode="any">locale</label>
+ </labels>
+ <translated>true</translated>
+ <fields>
+ <field schema="pottags_common">locale</field>
+ </fields>
+ <properties widgetMode="edit">
+ <property name="styleClass">dataInputText</property>
+ </properties>
+ </widget>
+
+ <widget name="taxonName" type="text">
+ <labels>
+ <label mode="any">taxonName</label>
+ </labels>
+ <translated>true</translated>
+ <fields>
+ <field schema="pottags_common">taxonName</field>
+ </fields>
+ <properties widgetMode="edit">
+ <property name="styleClass">dataInputText</property>
+ </properties>
+ </widget>
+
+ <widget name="numberOfLabels" type="text">
+ <labels>
+ <label mode="any">numberOfLabels</label>
+ </labels>
+ <translated>true</translated>
+ <fields>
+ <field schema="pottags_common">numberOfLabels</field>
+ </fields>
+ <properties widgetMode="edit">
+ <property name="styleClass">dataInputText</property>
+ </properties>
+ </widget>
+
+ <widget name="labelData" type="text">
+ <labels>
+ <label mode="any">labelData</label>
+ </labels>
+ <translated>true</translated>
+ <fields>
+ <field schema="pottags_common">labelData</field>
+ </fields>
+ <properties widgetMode="edit">
+ <property name="styleClass">dataInputText</property>
+ </properties>
+ </widget>
+
+ </layout>
+ </extension>
+</component>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+
+<!--
+ Plant Label schema (XSD)
+
+ Entity : Pottag
+ Part : Common
+ Used for: Nuxeo EP core document type
+
+ $LastChangedRevision$
+ $LastChangedDate$
+-->
+
+<xs:schema
+ xmlns:xs="http://www.w3.org/2001/XMLSchema"
+ xmlns:ns="http://collectionspace.org/pottag/"
+ xmlns="http://collectionspace.org/pottag/"
+ targetNamespace="http://collectionspace.org/pottag/"
+ version="0.1">
+
+ <!-- See http://wiki.collectionspace.org/display/collectionspace/Loans+In+Schema -->
+
+ <!-- Volunteer Propagator Information -->
+ <xs:element name="family" type="xs:string"/>
+ <xs:element name="commonName" type="xs:string"/>
+ <xs:element name="locale" type="xs:string"/>
+ <xs:element name="taxonName" type="xs:string"/>
+ <xs:element name="labelData" type="xs:string"/>
+ <xs:element name="numberOfLabels" type="xs:integer"/>
+ <xs:element name="printLabels" type="xs:string"/>
+</xs:schema>
--- /dev/null
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <artifactId>org.collectionspace.services.pottag</artifactId>
+ <groupId>org.collectionspace.services</groupId>
+ <version>4.5-SNAPSHOT</version>
+ </parent>
+
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>org.collectionspace.services.pottag.3rdparty</artifactId>
+ <name>services.pottag.3rdparty</name>
+ <packaging>pom</packaging>
+
+ <description>
+ 3rd party build for pottag service
+ </description>
+
+ <properties>
+ <ServiceName>pottag</ServiceName>
+ <NuxeoDocTypeName>Pottag</NuxeoDocTypeName>
+ <CommonSchemaName>pottags_common</CommonSchemaName>
+ <TenantPrefix>Tenant</TenantPrefix>
+ </properties>
+
+ <modules>
+ <!-- This module is obsolete. The Nuxeo artifacts are now created and deployed using the "csmake" tool
+ <module>nuxeo-platform-cs-pottag</module>
+ -->
+ </modules>
+</project>
--- /dev/null
+
+<project name="pottag" default="package" basedir=".">
+ <description>
+ pottag service
+ </description>
+ <!-- set global properties for this build -->
+ <property name="services.trunk" value="../.."/>
+ <!-- enviornment should be declared before reading build.properties -->
+ <property environment="env" />
+ <property file="${services.trunk}/build.properties" />
+ <property name="mvn.opts" value="-V" />
+ <property name="src" location="src"/>
+
+ <condition property="osfamily-unix">
+ <os family="unix" />
+ </condition>
+ <condition property="osfamily-windows">
+ <os family="windows" />
+ </condition>
+
+ <target name="package" depends="package-unix,package-windows"
+ description="Package CollectionSpace Services" />
+
+ <target name="package-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="package-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+
+ <target name="install" depends="install-unix,install-windows"
+ description="Install" />
+ <target name="install-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="install-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="clean" depends="clean-unix,clean-windows"
+ description="Delete target directories" >
+ <delete dir="${build}"/>
+ </target>
+ <target name="clean-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="clean" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="clean-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="clean" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="test" depends="test-unix,test-windows" description="Run tests" />
+ <target name="test-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="test" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="test-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="test" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="deploy" depends="install"
+ description="deploy pottag service">
+ <ant antfile="3rdparty/build.xml" target="deploy" inheritall="false"/>
+ </target>
+
+ <target name="undeploy"
+ description="undeploy pottag service">
+ <ant antfile="3rdparty/build.xml" target="undeploy" inheritall="false"/>
+ </target>
+
+ <target name="dist" depends="package"
+ description="distribute pottag service">
+ <ant antfile="3rdparty/build.xml" target="dist" inheritall="false"/>
+ </target>
+
+</project>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.pottag</artifactId>
+ <version>4.5-SNAPSHOT</version>
+ </parent>
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.pottag.client</artifactId>
+ <name>services.pottag.client</name>
+
+ <dependencies>
+ <!-- keep slf4j dependencies on the top -->
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ <scope>test</scope>
+ </dependency>
+<!-- CollectionSpace dependencies -->
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.jaxb</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.common</artifactId>
+ <optional>true</optional>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.client</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.pottag.jaxb</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.person.client</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.authority.jaxb</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+<!-- External dependencies -->
+ <dependency>
+ <groupId>org.testng</groupId>
+ <artifactId>testng</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.resteasy</groupId>
+ <artifactId>resteasy-jaxrs</artifactId>
+ <!-- filter out unwanted jars -->
+ <exclusions>
+ <exclusion>
+ <groupId>tjws</groupId>
+ <artifactId>webserver</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.resteasy</groupId>
+ <artifactId>resteasy-jaxb-provider</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.resteasy</groupId>
+ <artifactId>resteasy-multipart-provider</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>commons-httpclient</groupId>
+ <artifactId>commons-httpclient</artifactId>
+ <version>3.1</version>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>collectionspace-services-pottag-client</finalName>
+ </build>
+</project>
--- /dev/null
+/**
+ * This document is a part of the source code and related artifacts
+ * for CollectionSpace, an open source collections management system
+ * for museums and related institutions:
+ *
+ * http://www.collectionspace.org
+ * http://wiki.collectionspace.org
+ *
+ * Copyright (c) 2009 Regents of the University of California
+ *
+ * Licensed under the Educational Community License (ECL), Version 2.0.
+ * You may not use this file except in compliance with this License.
+ *
+ * You may obtain a copy of the ECL 2.0 License at
+ * https://source.collectionspace.org/collection-space/LICENSE.txt
+ */
+package org.collectionspace.services.client;
+
+import org.collectionspace.services.pottag.PottagsCommon;
+
+/**
+ * PottagClient.java
+ *
+ * $LastChangedRevision$
+ * $LastChangedDate$
+ *
+ */
+public class PottagClient extends AbstractCommonListPoxServiceClientImpl<PottagProxy, PottagsCommon> {
+
+ public static final String SERVICE_NAME = "pottags";
+ public static final String SERVICE_PATH_COMPONENT = SERVICE_NAME;
+ public static final String SERVICE_PATH = "/" + SERVICE_PATH_COMPONENT;
+ public static final String SERVICE_PATH_PROXY = SERVICE_PATH + "/";
+ public static final String SERVICE_PAYLOAD_NAME = SERVICE_NAME;
+
+ public PottagClient() throws Exception {
+ super();
+ }
+
+ /* (non-Javadoc)
+ * @see org.collectionspace.services.client.AbstractServiceClientImpl#getServicePathComponent()
+ */
+ @Override
+ public String getServicePathComponent() {
+ return SERVICE_PATH_COMPONENT;
+ }
+
+ @Override
+ public String getServiceName() {
+ return SERVICE_NAME;
+ }
+
+ @Override
+ public Class<PottagProxy> getProxyClass() {
+ return PottagProxy.class;
+ }
+
+}
--- /dev/null
+/**
+ * This document is a part of the source code and related artifacts
+ * for CollectionSpace, an open source collections management system
+ * for museums and related institutions:
+ *
+ * http://www.collectionspace.org
+ * http://wiki.collectionspace.org
+ *
+ * Copyright (c) 2009 Regents of the University of California
+ *
+ * Licensed under the Educational Community License (ECL), Version 2.0.
+ * You may not use this file except in compliance with this License.
+ *
+ * You may obtain a copy of the ECL 2.0 License at
+ * https://source.collectionspace.org/collection-space/LICENSE.txt
+ */
+package org.collectionspace.services.client;
+
+import javax.ws.rs.Consumes;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+
+/**
+ * PottagProxy.java
+ *
+ * $LastChangedRevision$
+ * $LastChangedDate$
+ *
+ */
+@Path(PottagClient.SERVICE_PATH_PROXY)
+@Produces({"application/xml"})
+@Consumes({"application/xml"})
+public interface PottagProxy extends CollectionSpaceCommonListPoxProxy {
+}
--- /dev/null
+/**
+ * This document is a part of the source code and related artifacts
+ * for CollectionSpace, an open source collections management system
+ * for museums and related institutions:
+ *
+ * http://www.collectionspace.org
+ * http://wiki.collectionspace.org
+ *
+ * Copyright © 2009 Regents of the University of California
+ *
+ * Licensed under the Educational Community License (ECL), Version 2.0.
+ * You may not use this file except in compliance with this License.
+ *
+ * You may obtain a copy of the ECL 2.0 License at
+ * https://source.collectionspace.org/collection-space/LICENSE.txt
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.collectionspace.services.client.test;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.ws.rs.core.Response;
+
+import org.collectionspace.services.PersonJAXBSchema;
+import org.collectionspace.services.client.CollectionSpaceClient;
+import org.collectionspace.services.client.PottagClient;
+import org.collectionspace.services.client.PersonAuthorityClient;
+import org.collectionspace.services.client.PersonAuthorityClientUtils;
+import org.collectionspace.services.client.PayloadOutputPart;
+import org.collectionspace.services.client.PoxPayloadIn;
+import org.collectionspace.services.client.PoxPayloadOut;
+import org.collectionspace.services.common.authorityref.AuthorityRefList;
+import org.collectionspace.services.jaxb.AbstractCommonList;
+import org.collectionspace.services.pottag.PottagsCommon;
+import org.collectionspace.services.person.PersonTermGroup;
+
+import org.testng.Assert;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.Test;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * PottagAuthRefsTest, carries out Authority References tests against a
+ * deployed and running Pottag (aka Pot Tag) Service.
+ *
+ * $LastChangedRevision$
+ * $LastChangedDate$
+ */
+public class PottagAuthRefsTest extends BaseServiceTest<AbstractCommonList> {
+
+ private final String CLASS_NAME = PottagAuthRefsTest.class.getName();
+ private final Logger logger = LoggerFactory.getLogger(CLASS_NAME);
+
+ // Instance variables specific to this test.
+ final String PERSON_AUTHORITY_NAME = "TestPersonAuth";
+ private List<String> pottagIdsCreated = new ArrayList<String>();
+ private List<String> personIdsCreated = new ArrayList<String>();
+ private String personAuthCSID = null;
+ private String taggedByAuthRef;
+
+ /* (non-Javadoc)
+ * @see org.collectionspace.services.client.test.BaseServiceTest#getClientInstance()
+ */
+ @Override
+ protected CollectionSpaceClient getClientInstance() {
+ throw new UnsupportedOperationException(); //method not supported (or needed) in this test class
+ }
+
+ /* (non-Javadoc)
+ * @see org.collectionspace.services.client.test.BaseServiceTest#getAbstractCommonList(org.jboss.resteasy.client.ClientResponse)
+ */
+ @Override
+ protected AbstractCommonList getCommonList(Response response) {
+ throw new UnsupportedOperationException(); //method not supported (or needed) in this test class
+ }
+
+ // ---------------------------------------------------------------
+ // CRUD tests : CREATE tests
+ // ---------------------------------------------------------------
+ // Success outcomes
+ @Test(dataProvider="testName", dataProviderClass=AbstractServiceTestImpl.class)
+ public void createWithAuthRefs(String testName) throws Exception {
+ testSetup(STATUS_CREATED, ServiceRequestType.CREATE);
+
+ // Submit the request to the service and store the response.
+ String identifier = createIdentifier();
+
+ // Create all the person refs and entities
+ createPersonRefs();
+
+ // Create a new Loans In resource.
+ //
+ // One or more fields in this resource will be PersonAuthority
+ // references, and will refer to Person resources by their refNames.
+ PottagClient pottagClient = new PottagClient();
+ PoxPayloadOut pottageInstance = createPottagInstance("familyName-" + identifier, this.taggedByAuthRef,
+ "commonName-" + identifier);
+ Response response = pottagClient.create(pottageInstance);
+ try {
+ assertStatusCode(response, testName);
+ // Store the ID returned from the first resource created for additional tests below.
+ if (knownResourceId == null) {
+ knownResourceId = extractId(response);
+ }
+
+ // Store the IDs from every resource created by tests,
+ // so they can be deleted after tests have been run.
+ pottagIdsCreated.add(extractId(response));
+ } finally {
+ response.close();
+ }
+ }
+
+ protected void createPersonRefs() throws Exception {
+ PersonAuthorityClient personAuthClient = new PersonAuthorityClient();
+ // Create a temporary PersonAuthority resource, and its corresponding
+ // refName by which it can be identified.
+ PoxPayloadOut multipart = PersonAuthorityClientUtils.createPersonAuthorityInstance(
+ PERSON_AUTHORITY_NAME, PERSON_AUTHORITY_NAME, personAuthClient.getCommonPartName());
+ Response res = personAuthClient.create(multipart);
+ try {
+ int statusCode = res.getStatus();
+ Assert.assertTrue(testRequestType.isValidStatusCode(statusCode), invalidStatusCodeMessage(testRequestType, statusCode));
+ Assert.assertEquals(statusCode, STATUS_CREATED);
+ personAuthCSID = extractId(res);
+ } finally {
+ res.close();
+ }
+ String authRefName = PersonAuthorityClientUtils.getAuthorityRefName(personAuthCSID, personAuthClient);
+
+ // Create temporary Person resource, and a corresponding refName from which it can be identified.
+ String csid = createPerson("Harry", "Potter", "harryPotter", authRefName);
+ this.personIdsCreated.add(csid);
+ this.taggedByAuthRef = PersonAuthorityClientUtils.getPersonRefName(personAuthCSID, csid, personAuthClient);
+ }
+
+ protected String createPerson(String firstName, String surName, String shortId, String authRefName ) throws Exception {
+ PersonAuthorityClient personAuthClient = new PersonAuthorityClient();
+ Map<String, String> personInfo = new HashMap<String,String>();
+ personInfo.put(PersonJAXBSchema.FORE_NAME, firstName);
+ personInfo.put(PersonJAXBSchema.SUR_NAME, surName);
+ personInfo.put(PersonJAXBSchema.SHORT_IDENTIFIER, shortId);
+ List<PersonTermGroup> personTerms = new ArrayList<PersonTermGroup>();
+ PersonTermGroup term = new PersonTermGroup();
+ String termName = firstName + " " + surName;
+ term.setTermDisplayName(termName);
+ term.setTermName(termName);
+ personTerms.add(term);
+ PoxPayloadOut multipart =
+ PersonAuthorityClientUtils.createPersonInstance(personAuthCSID,
+ authRefName, personInfo, personTerms, personAuthClient.getItemCommonPartName());
+
+ Response res = personAuthClient.createItem(personAuthCSID, multipart);
+ try {
+ int statusCode = res.getStatus();
+ Assert.assertTrue(testRequestType.isValidStatusCode(statusCode),
+ invalidStatusCodeMessage(testRequestType, statusCode));
+ Assert.assertEquals(statusCode, STATUS_CREATED);
+ return extractId(res);
+ } finally {
+ res.close();
+ }
+ }
+
+ // Success outcomes
+ @Test(dataProvider="testName", dataProviderClass=AbstractServiceTestImpl.class,
+ dependsOnMethods = {"createWithAuthRefs"})
+ public void readAndCheckAuthRefs(String testName) throws Exception {
+ // Perform setup.
+ testSetup(STATUS_OK, ServiceRequestType.READ);
+
+ // Submit the request to the service and store the response.
+ PottagClient pottagClient = new PottagClient();
+ Response res = pottagClient.read(knownResourceId);
+ try {
+ assertStatusCode(res, testName);
+ // Extract the common part from the response.
+ PoxPayloadIn input = new PoxPayloadIn(res.readEntity(String.class));
+ PottagsCommon pottagCommon = (PottagsCommon) extractPart(input, pottagClient.getCommonPartName(),
+ PottagsCommon.class);
+ Assert.assertNotNull(pottagCommon);
+ } finally {
+ if (res != null) {
+ res.close();
+ }
+ }
+
+ // Get the auth refs and check them
+ res = pottagClient.getAuthorityRefs(knownResourceId);
+ AuthorityRefList list = null;
+ try {
+ assertStatusCode(res, testName);
+ list = res.readEntity(AuthorityRefList.class);
+ Assert.assertNotNull(list);
+ } finally {
+ if (res != null) {
+ res.close();
+ }
+ }
+
+ int expectedAuthRefs = personIdsCreated.size();
+ List<AuthorityRefList.AuthorityRefItem> items = list.getAuthorityRefItem();
+ int numAuthRefsFound = items.size();
+ if (logger.isDebugEnabled()) {
+ logger.debug("Expected " + expectedAuthRefs + " authority references, found " + numAuthRefsFound);
+ }
+
+ // Optionally output additional data about list members for debugging.
+ boolean iterateThroughList = true;
+ if (iterateThroughList && logger.isDebugEnabled()) {
+ int i = 0;
+ for(AuthorityRefList.AuthorityRefItem item : items){
+ logger.debug(testName + ": list-item[" + i + "] Field:" +
+ item.getSourceField() + "= " +
+ item.getAuthDisplayName() +
+ item.getItemDisplayName());
+ logger.debug(testName + ": list-item[" + i + "] refName=" +
+ item.getRefName());
+ logger.debug(testName + ": list-item[" + i + "] URI=" +
+ item.getUri());
+ i++;
+ }
+ }
+
+ Assert.assertEquals(numAuthRefsFound, expectedAuthRefs,
+ "Did not find all expected authority references! " + "Expected " + expectedAuthRefs + ", found " + numAuthRefsFound);
+ }
+
+
+ // ---------------------------------------------------------------
+ // Cleanup of resources created during testing
+ // ---------------------------------------------------------------
+
+ /**
+ * Deletes all resources created by tests, after all tests have been run.
+ *
+ * This cleanup method will always be run, even if one or more tests fail.
+ * For this reason, it attempts to remove all resources created
+ * at any point during testing, even if some of those resources
+ * may be expected to be deleted by certain tests.
+ * @throws Exception
+ */
+ @AfterClass(alwaysRun=true)
+ public void cleanUp() throws Exception {
+ String noTest = System.getProperty("noTestCleanup");
+ if (Boolean.TRUE.toString().equalsIgnoreCase(noTest)) {
+ if (logger.isDebugEnabled()) {
+ logger.debug("Skipping Cleanup phase ...");
+ }
+ return;
+ }
+ if (logger.isDebugEnabled()) {
+ logger.debug("Cleaning up temporary resources created for testing ...");
+ }
+
+ //
+ // Delete all the pottag records we created
+ PottagClient pottagClient = new PottagClient();
+ for (String resourceId : pottagIdsCreated) {
+ // Note: Any non-success responses are ignored and not reported.
+ pottagClient.delete(resourceId).close(); // alternative to pottagClient.delete(resourceId).releaseConnection();
+ }
+
+ //
+ // Delete Person resource(s) (before PersonAuthority resources).
+ PersonAuthorityClient personAuthClient = new PersonAuthorityClient();
+ for (String resourceId : personIdsCreated) {
+ // Note: Any non-success responses are ignored and not reported.
+ personAuthClient.deleteItem(personAuthCSID, resourceId).close();
+ }
+ if (personAuthCSID != null) {
+ personAuthClient.delete(personAuthCSID).close();
+ }
+ }
+
+ // ---------------------------------------------------------------
+ // Utility methods used by tests above
+ // ---------------------------------------------------------------
+ public String getServiceName() {
+ return PottagClient.SERVICE_NAME;
+ }
+
+ @Override
+ public String getServicePathComponent() {
+ return PottagClient.SERVICE_PATH_COMPONENT;
+ }
+
+ private PoxPayloadOut createPottagInstance(String familyName,
+ String taggedBy,
+ String commonName) throws Exception {
+ PottagsCommon pottagCommon = new PottagsCommon();
+ pottagCommon.setFamily(familyName);
+ pottagCommon.setTaggedBy(taggedBy);
+ pottagCommon.setCommonName(commonName);
+
+ PoxPayloadOut multipart = new PoxPayloadOut(this.getServicePathComponent());
+ PayloadOutputPart commonPart =
+ multipart.addPart(new PottagClient().getCommonPartName(), pottagCommon);
+
+ if(logger.isDebugEnabled()){
+ logger.debug("to be created, pottag common");
+ logger.debug(objectAsXmlString(pottagCommon, PottagsCommon.class));
+ }
+
+ return multipart;
+ }
+
+ @Override
+ protected Class<AbstractCommonList> getCommonListType() {
+ return AbstractCommonList.class;
+ }
+
+ @Override
+ protected CollectionSpaceClient getClientInstance(String clientPropertiesFilename) throws Exception {
+ // TODO Auto-generated method stub
+ return null;
+ }
+}
--- /dev/null
+/**
+ * This document is a part of the source code and related artifacts
+ * for CollectionSpace, an open source collections management system
+ * for museums and related institutions:
+ *
+ * http://www.collectionspace.org
+ * http://wiki.collectionspace.org
+ *
+ * Copyright © 2009 Regents of the University of California
+ *
+ * Licensed under the Educational Community License (ECL), Version 2.0.
+ * You may not use this file except in compliance with this License.
+ *
+ * You may obtain a copy of the ECL 2.0 License at
+ * https://source.collectionspace.org/collection-space/LICENSE.txt
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.collectionspace.services.client.test;
+
+import org.collectionspace.services.client.CollectionSpaceClient;
+import org.collectionspace.services.client.PottagClient;
+import org.collectionspace.services.client.PayloadOutputPart;
+import org.collectionspace.services.client.PoxPayloadOut;
+import org.collectionspace.services.common.api.GregorianCalendarDateTimeUtils;
+import org.collectionspace.services.jaxb.AbstractCommonList;
+import org.collectionspace.services.pottag.PottagsCommon;
+
+import org.testng.Assert;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * PottagServiceTest, carries out tests against a
+ * deployed and running Pottag (aka Pot Tag) Service.
+ *
+ * $LastChangedRevision$
+ * $LastChangedDate$
+ */
+public class PottagServiceTest extends AbstractPoxServiceTestImpl<AbstractCommonList, PottagsCommon> {
+
+ /** The logger. */
+ private final String CLASS_NAME = PottagServiceTest.class.getName();
+ private final Logger logger = LoggerFactory.getLogger(CLASS_NAME);
+ // Instance variables specific to this test.
+ private final static String CURRENT_DATE_UTC = GregorianCalendarDateTimeUtils.currentDateUTC();
+
+ /* (non-Javadoc)
+ * @see org.collectionspace.services.client.test.BaseServiceTest#getClientInstance()
+ */
+ @Override
+ protected CollectionSpaceClient getClientInstance() throws Exception {
+ return new PottagClient();
+ }
+
+ @Override
+ protected void compareReadInstances(PottagsCommon original, PottagsCommon pottagCommon) throws Exception {
+ if (logger.isDebugEnabled()) {
+ logger.debug("UTF-8 data sent=" + getUTF8DataFragment() + "\n"
+ + "UTF-8 data received=" + pottagCommon.getLabelData());
+ }
+
+ Assert.assertEquals(pottagCommon.getLabelData(), getUTF8DataFragment(),
+ "UTF-8 data retrieved '" + pottagCommon.getLabelData()
+ + "' does not match expected data '" + getUTF8DataFragment());
+ }
+
+ @Override
+ protected void compareUpdatedInstances(PottagsCommon pottagCommon,
+ PottagsCommon updatedPottagCommon) throws Exception {
+ // Check selected fields in the updated common part.
+ Assert.assertEquals(updatedPottagCommon.getFamily(), pottagCommon.getFamily(),
+ "Data in updated object did not match submitted data.");
+
+ if (logger.isDebugEnabled()) {
+ logger.debug("UTF-8 data sent=" + pottagCommon.getLabelData() + "\n"
+ + "UTF-8 data received=" + updatedPottagCommon.getLabelData());
+ }
+ Assert.assertTrue(updatedPottagCommon.getLabelData().contains(getUTF8DataFragment()),
+ "UTF-8 data retrieved '" + updatedPottagCommon.getLabelData() + "' does not contain expected data '" + getUTF8DataFragment());
+ Assert.assertEquals(updatedPottagCommon.getLabelData(),
+ pottagCommon.getLabelData(), "Data in updated object did not match submitted data.");
+ }
+
+ // ---------------------------------------------------------------
+ // Utility methods used by tests above
+ // ---------------------------------------------------------------
+
+ @Override
+ public String getServiceName() {
+ return PottagClient.SERVICE_NAME;
+ }
+
+ /* (non-Javadoc)
+ * @see org.collectionspace.services.client.test.BaseServiceTest#getServicePathComponent()
+ */
+ @Override
+ public String getServicePathComponent() {
+ return PottagClient.SERVICE_PATH_COMPONENT;
+ }
+
+ @Override
+ protected PoxPayloadOut createInstance(String identifier) throws Exception {
+ return createPottagInstance(identifier);
+ }
+
+ /**
+ * Creates the pottag instance.
+ *
+ * @param identifier the identifier
+ * @return the multipart output
+ * @throws Exception
+ */
+ private PoxPayloadOut createPottagInstance(String identifier) throws Exception {
+ return createPottagInstance(
+ "family-" + identifier,
+ "returnDate-" + identifier);
+ }
+
+ /**
+ * Creates the pottag instance.
+ *
+ * @param familyName the pottag family
+ * @param returnDate the return date
+ * @return the multipart output
+ * @throws Exception
+ */
+ private PoxPayloadOut createPottagInstance(String familyName,
+ String returnDate) throws Exception {
+
+ PottagsCommon pottagCommon = new PottagsCommon();
+ pottagCommon.setFamily(familyName);
+ pottagCommon.setLocale("Mexico");
+ pottagCommon.setLabelData(getUTF8DataFragment());
+
+ PoxPayloadOut multipart = new PoxPayloadOut(this.getServicePathComponent());
+ PayloadOutputPart commonPart =
+ multipart.addPart(new PottagClient().getCommonPartName(), pottagCommon);
+
+ if (logger.isDebugEnabled()) {
+ logger.debug("to be created, pottag common");
+ logger.debug(objectAsXmlString(pottagCommon, PottagsCommon.class));
+ }
+
+ return multipart;
+ }
+
+ @Override
+ public void CRUDTests(String testName) {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ protected PoxPayloadOut createInstance(String commonPartName,
+ String identifier) throws Exception {
+ PoxPayloadOut result = createPottagInstance(identifier);
+ return result;
+ }
+
+ @Override
+ protected PottagsCommon updateInstance(PottagsCommon commonPartObject) {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ protected CollectionSpaceClient getClientInstance(String clientPropertiesFilename) throws Exception {
+ // TODO Auto-generated method stub
+ return null;
+ }
+}
--- /dev/null
+log4j.rootLogger=debug, stdout, R
+
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+
+# Pattern to output the caller's file name and line number.
+log4j.appender.stdout.layout.ConversionPattern=%d %-5p [%t] [%c:%L] %m%n
+
+log4j.appender.R=org.apache.log4j.RollingFileAppender
+log4j.appender.R.File=target/test-client.log
+
+log4j.appender.R.MaxFileSize=100KB
+# Keep one backup file
+log4j.appender.R.MaxBackupIndex=1
+
+log4j.appender.R.layout=org.apache.log4j.PatternLayout
+log4j.appender.R.layout.ConversionPattern=%d %-5p [%t] [%c:%L] %m%n
+
+#packages
+log4j.logger.org.collectionspace=DEBUG
+log4j.logger.org.apache=INFO
+log4j.logger.httpclient=INFO
+log4j.logger.org.jboss.resteasy=INFO
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <artifactId>org.collectionspace.services.pottag</artifactId>
+ <groupId>org.collectionspace.services</groupId>
+ <version>4.5-SNAPSHOT</version>
+ </parent>
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.pottag.jaxb</artifactId>
+ <name>services.pottag.jaxb</name>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.jaxb</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>collectionspace-services-pottag-jaxb</finalName>
+ <defaultGoal>install</defaultGoal>
+ <plugins>
+ <plugin>
+ <groupId>org.jvnet.jaxb2.maven2</groupId>
+ <artifactId>maven-jaxb2-plugin</artifactId>
+ </plugin>
+ </plugins>
+ </build>
+</project>
+
--- /dev/null
+/**
+ *
+ */
+package org.collectionspace.services;
+
+public interface PottagJAXBSchema {
+ final static String FAMILY = "family";
+ final static String COMMON_NAME = "commonName";
+ final static String LOCALE = "locale";
+ final static String TAXON_NAME = "taxonName";
+ final static String LABEL_DATA = "labelData";
+ final static String NUMBER_OF_LABELS = "numberOfLabels";
+}
+
+
--- /dev/null
+package org.collectionspace.services;
+
+public interface PottagListItemJAXBSchema {
+ final static String FAMILY = "family";
+ final static String PRINT_LABELS = "printLabels";
+ final static String NUMBER_OF_LABELS = "numberOfLabels";
+ final static String CSID = "csid";
+ final static String URI = "url";
+}
--- /dev/null
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+
+<!--
+ Plant Label schema (XSD)
+
+ Entity : Pottag
+ Part : Common
+ Used for: JAXB binding between XML and Java objects
+
+ $LastChangedRevision$
+ $LastChangedDate$
+-->
+
+<xs:schema
+ xmlns:xs="http://www.w3.org/2001/XMLSchema"
+ xmlns:jaxb="http://java.sun.com/xml/ns/jaxb"
+ jaxb:version="1.0" elementFormDefault="unqualified"
+ xmlns:ns="http://collectionspace.org/services/pottag"
+ xmlns="http://collectionspace.org/services/pottag"
+ targetNamespace="http://collectionspace.org/services/pottag"
+ version="0.1"
+>
+
+<!--
+ Avoid XmlRootElement nightmare:
+ See http://weblogs.java.net/blog/kohsuke/archive/2006/03/why_does_jaxb_p.html
+-->
+<!-- See http://wiki.collectionspace.org/display/collectionspace/Plant+Label+Schema -->
+
+ <!-- Plant Label Information Group -->
+ <xs:element name="pottags_common">
+ <xs:complexType>
+ <xs:sequence>
+ <!-- Volunteer Propagator Information -->
+ <xs:element name="family" type="xs:string"/>
+ <xs:element name="commonName" type="xs:string"/>
+ <xs:element name="locale" type="xs:string"/>
+ <xs:element name="taxonName" type="xs:string"/>
+ <xs:element name="labelData" type="xs:string"/>
+ <xs:element name="numberOfLabels" type="xs:integer"/>
+ <xs:element name="printLabels" type="xs:string"/>
+ <xs:element name="taggedBy" type="xs:string"/>
+ </xs:sequence>
+
+ </xs:complexType>
+ </xs:element>
+
+</xs:schema>
+
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- A comment. -->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.main</artifactId>
+ <version>4.5-SNAPSHOT</version>
+ </parent>
+
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>org.collectionspace.services.pottag</artifactId>
+ <name>services.pottag</name>
+ <packaging>pom</packaging>
+
+ <dependencies>
+ </dependencies>
+
+ <modules>
+ <module>jaxb</module>
+ <module>service</module>
+ <module>3rdparty</module>
+ <module>client</module>
+ </modules>
+
+</project>
+
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <parent>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.pottag</artifactId>
+ <version>4.5-SNAPSHOT</version>
+ </parent>
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.pottag.service</artifactId>
+ <name>services.pottag.service</name>
+ <packaging>jar</packaging>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.pottag.jaxb</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.pottag.client</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.collectionobject.jaxb</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <!-- External dependencies -->
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.1</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.testng</groupId>
+ <artifactId>testng</artifactId>
+ </dependency>
+
+ <!-- javax -->
+
+ <dependency>
+ <groupId>javax.security</groupId>
+ <artifactId>jaas</artifactId>
+ <version>1.0.01</version>
+ <scope>provided</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>dom4j</groupId>
+ <artifactId>dom4j</artifactId>
+ <version>1.6.1</version>
+ <scope>provided</scope>
+ </dependency>
+
+ <!-- jboss -->
+
+ <dependency>
+ <groupId>org.jboss.resteasy</groupId>
+ <artifactId>resteasy-jaxrs</artifactId>
+ <exclusions>
+ <exclusion>
+ <groupId>tjws</groupId>
+ <artifactId>webserver</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.resteasy</groupId>
+ <artifactId>resteasy-jaxb-provider</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.resteasy</groupId>
+ <artifactId>resteasy-multipart-provider</artifactId>
+ </dependency>
+
+ <!-- nuxeo -->
+
+ <dependency>
+ <groupId>org.nuxeo.ecm.core</groupId>
+ <artifactId>nuxeo-core-api</artifactId>
+ <version>${nuxeo.core.version}</version>
+ <exclusions>
+ <exclusion>
+ <artifactId>jboss-remoting</artifactId>
+ <groupId>jboss</groupId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
+ </dependencies>
+
+ <build>
+ <finalName>collectionspace-services-pottag</finalName>
+ </build>
+</project>
+
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<profilesXml xmlns="http://maven.apache.org/PROFILES/1.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/PROFILES/1.0.0 http://maven.apache.org/xsd/profiles-1.0.0.xsd">
+</profilesXml>
\ No newline at end of file
--- /dev/null
+/**
+ * This document is a part of the source code and related artifacts
+ * for CollectionSpace, an open source collections management system
+ * for museums and related institutions:
+
+ * http://www.collectionspace.org
+ * http://wiki.collectionspace.org
+
+ * Copyright 2009 University of California at Berkeley
+
+ * Licensed under the Educational Community License (ECL), Version 2.0.
+ * You may not use this file except in compliance with this License.
+
+ * You may obtain a copy of the ECL 2.0 License at
+
+ * https://source.collectionspace.org/collection-space/LICENSE.txt
+
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.collectionspace.services.pottag;
+
+import org.collectionspace.services.client.PottagClient;
+import org.collectionspace.services.common.NuxeoBasedResource;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.ws.rs.Consumes;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+
+@Path(PottagClient.SERVICE_PATH)
+@Consumes("application/xml")
+@Produces("application/xml")
+public class PottagResource extends NuxeoBasedResource {
+
+ final Logger logger = LoggerFactory.getLogger(PottagResource.class);
+
+ @Override
+ protected String getVersionString() {
+ final String lastChangeRevision = "$LastChangedRevision$";
+ return lastChangeRevision;
+ }
+
+ @Override
+ public String getServiceName() {
+ return PottagClient.SERVICE_NAME;
+ }
+
+ @Override
+ public Class<PottagsCommon> getCommonPartClass() {
+ return PottagsCommon.class;
+ }
+}
+
+
+
+
+
+
--- /dev/null
+/**
+ * This document is a part of the source code and related artifacts
+ * for CollectionSpace, an open source collections management system
+ * for museums and related institutions:
+
+ * http://www.collectionspace.org
+ * http://wiki.collectionspace.org
+
+ * Copyright 2009 University of California at Berkeley
+
+ * Licensed under the Educational Community License (ECL), Version 2.0.
+ * You may not use this file except in compliance with this License.
+
+ * You may obtain a copy of the ECL 2.0 License at
+
+ * https://source.collectionspace.org/collection-space/LICENSE.txt
+
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.collectionspace.services.pottag.nuxeo;
+
+/**
+ * PottagConstants specifies constants for the Loans In service
+ *
+ */
+public class PottagConstants {
+
+ public final static String NUXEO_DOCTYPE = "Pottag";
+ public final static String NUXEO_SCHEMA_NAME = "pottag";
+ public final static String NUXEO_DC_TITLE = "CollectionSpace-Pottag";
+
+ public static final String COMMON_SCHEMA_NAME = "pottags_common";
+
+ public final static String LABEL_REQUESTED_SCHEMA_NAME = COMMON_SCHEMA_NAME;
+ public final static String LABEL_REQUESTED_FIELD_NAME = "printLabels";
+ public final static String LABEL_REQUESTED_YES_VALUE = "yes";
+ public final static String LABEL_REQUESTED_NO_VALUE = "no";
+}
--- /dev/null
+/**
+ * This document is a part of the source code and related artifacts
+ * for CollectionSpace, an open source collections management system
+ * for museums and related institutions:
+
+ * http://www.collectionspace.org
+ * http://wiki.collectionspace.org
+
+ * Copyright 2009 University of California at Berkeley
+
+ * Licensed under the Educational Community License (ECL), Version 2.0.
+ * You may not use this file except in compliance with this License.
+
+ * You may obtain a copy of the ECL 2.0 License at
+
+ * https://source.collectionspace.org/collection-space/LICENSE.txt
+
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.collectionspace.services.pottag.nuxeo;
+
+import org.collectionspace.services.pottag.PottagsCommon;
+import org.collectionspace.services.nuxeo.client.java.NuxeoDocumentModelHandler;
+
+/** PottagDocumentModelHandler
+ * $LastChangedRevision$
+ * $LastChangedDate$
+ */
+public class PottagDocumentModelHandler
+ extends NuxeoDocumentModelHandler<PottagsCommon> {
+}
+
--- /dev/null
+package org.collectionspace.services.pottag.nuxeo;
+
+import org.collectionspace.services.common.context.ServiceContext;
+import org.collectionspace.services.common.document.InvalidDocumentException;
+import org.collectionspace.services.common.document.ValidatorHandler;
+import org.collectionspace.services.common.document.DocumentHandler.Action;
+
+public class PottagValidatorHandler implements ValidatorHandler {
+
+ @Override
+ public void validate(Action action, ServiceContext ctx)
+ throws InvalidDocumentException {
+ // TODO Auto-generated method stub
+ System.out.println("PottagValidatorHandler executed.");
+
+ }
+
+}
--- /dev/null
+package org.collectionspace.services.test;
+
+//import org.collectionspace.services.pottag.Pottag;
+//import org.collectionspace.services.pottag.PottagList;
+
+/**
+ * Placeholder for server-side testing of Loan In service code.
+ *
+ * @version $Revision$
+ */
+public class PottagServiceTest {
+ //empty
+}
--- /dev/null
+<?xml version="1.0" encoding="UTF-8" ?>
+<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
+<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
+
+ <appender name="console" class="org.apache.log4j.ConsoleAppender">
+ <param name="Target" value="System.out" />
+ <layout class="org.apache.log4j.TTCCLayout">
+ <param name="DateFormat" value="ISO8601" />
+ </layout>
+ </appender>
+
+
+ <appender name="unit-tests"
+ class="org.apache.log4j.RollingFileAppender">
+ <param name="File" value="./target/unit-tests.log" />
+ <param name="MaxFileSize" value="10240KB" />
+ <param name="MaxBackupIndex" value="6" />
+ <layout class="org.apache.log4j.TTCCLayout">
+ <param name="DateFormat" value="ISO8601" />
+ </layout>
+ </appender>
+
+ <logger name="org.apache.commons.httpclient" additivity="false">
+ <level value="warn" />
+ <appender-ref ref="console" />
+ <appender-ref ref="unit-tests" />
+ </logger>
+
+ <logger name="httpclient.wire" additivity="false">
+ <level value="info" />
+ <appender-ref ref="console" />
+ <appender-ref ref="unit-tests" />
+ </logger>
+
+ <root>
+ <priority value="debug" />
+ <appender-ref ref="console" />
+ <appender-ref ref="unit-tests" />
+ </root>
+
+</log4j:configuration>
+
+
+
+
--- /dev/null
+
+<project name="propagation.3rdparty" default="package" basedir=".">
+ <description>
+ propagation service 3rdparty
+ </description>
+ <!-- set global properties for this build -->
+ <property name="services.trunk" value="../../.."/>
+ <!-- enviornment should be declared before reading build.properties -->
+ <property environment="env" />
+ <property file="${services.trunk}/build.properties" />
+ <property name="mvn.opts" value="-V" />
+ <property name="src" location="src"/>
+
+ <condition property="osfamily-unix">
+ <os family="unix" />
+ </condition>
+ <condition property="osfamily-windows">
+ <os family="windows" />
+ </condition>
+
+ <target name="init" >
+ <!-- Create the time stamp -->
+ <tstamp/>
+ </target>
+
+ <target name="package" depends="package-unix,package-windows"
+ description="Package CollectionSpace Services" />
+ <target name="package-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="package-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="install" depends="install-unix,install-windows"
+ description="Install" />
+ <target name="install-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="install-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="clean" depends="clean-unix,clean-windows"
+ description="Delete target directories" >
+ <delete dir="${build}"/>
+ </target>
+ <target name="clean-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="clean" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="clean-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="clean" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="test" depends="test-unix,test-windows" description="Run tests" />
+ <target name="test-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="test" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="test-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="test" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="deploy" depends="install"
+ description="deploy propagation in ${jee.server.nuxeo}">
+ <!-- This target is obsolete. The Nuxeo artifacts are now created and deployed using the "csmake" tool
+ <ant antfile="nuxeo-platform-cs-propagation/build.xml" target="deploy" inheritall="false"/>
+ -->
+ </target>
+
+ <target name="undeploy"
+ description="undeploy propagation from ${jee.server.nuxeo}">
+ <!-- This target is obsolete. The Nuxeo artifacts are now created and deployed using the "csmake" tool
+ leaving this only for backwards compatibility reasons. -->
+ <ant antfile="nuxeo-platform-cs-propagation/build.xml" target="undeploy" inheritall="false"/>
+ </target>
+
+ <target name="dist"
+ description="generate distribution for propagation" depends="package">
+ <ant antfile="nuxeo-platform-cs-propagation/build.xml" target="dist" inheritall="false"/>
+ </target>
+
+
+</project>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<project name="nuxeo-platform-cs-propagation" default="package" basedir=".">
+ <description>
+ propagation nuxeo document type
+ </description>
+ <!-- set global properties for this build -->
+ <property name="services.trunk" value="../../../.."/>
+ <!-- environment should be declared before reading build.properties -->
+ <property environment="env" />
+ <property file="${services.trunk}/build.properties" />
+ <property name="mvn.opts" value="-V" />
+ <property name="src" location="src"/>
+
+ <!-- JAR files used by CollectionSpace 4.0 and later -->
+ <property name="nuxeo.propagation.doctype.jars.all"
+ value="collectionspace.propagation.doctype.*.jar"/>
+ <property name="nuxeo.propagation.schema.jars.all"
+ value="collectionspace.propagation.schema.*.jar"/>
+ <!-- Legacy JAR files used by CollectionSpace 3.3 and earlier -->
+ <property name="nuxeo.propagation.legacy.jars.all"
+ value="org.collectionspace.services.propagation.3rdparty.nuxeo-*.jar"/>
+ <property name="nuxeo.propagation.legacy.jar"
+ value="org.collectionspace.services.propagation.3rdparty.nuxeo-${cspace.release}.jar"/>
+
+ <condition property="osfamily-unix">
+ <os family="unix" />
+ </condition>
+ <condition property="osfamily-windows">
+ <os family="windows" />
+ </condition>
+
+ <target name="init" >
+ <!-- Create the time stamp -->
+ <tstamp/>
+ </target>
+
+ <target name="package" depends="package-unix,package-windows"
+ description="Package CollectionSpace Services" />
+ <target name="package-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="package-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="install" depends="install-unix,install-windows"
+ description="Install" />
+ <target name="install-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="install-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="clean" depends="clean-unix,clean-windows"
+ description="Delete target directories" >
+ <delete dir="${build}"/>
+ </target>
+ <target name="clean-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="clean" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="clean-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="clean" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="test" depends="test-unix,test-windows" description="Run tests" />
+ <target name="test-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="test" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="test-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="test" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="deploy" depends="install"
+ description="deploy propagation doctype in ${jee.server.nuxeo}">
+ <!-- This target is obsolete. The Nuxeo artifacts are now created and deployed using the "csmake" tool
+ leaving this only for backwards compatibility reasons. -->
+ <copy file="${basedir}/target/${nuxeo.propagation.legacy.jar}"
+ todir="${jee.deploy.nuxeo.plugins}"/>
+ </target>
+
+ <target name="undeploy"
+ description="undeploy propagation doctype from ${jee.server.nuxeo}">
+ <delete>
+ <!-- Undeploy doctype and schema artifacts -->
+ <fileset dir="${jee.deploy.nuxeo.plugins}">
+ <include name="${nuxeo.propagation.doctype.jars.all}"/>
+ </fileset>
+ <fileset dir="${jee.deploy.nuxeo.plugins}">
+ <include name="${nuxeo.propagation.schema.jars.all}"/>
+ </fileset>
+ <!-- Undeploy legacy artifacts -->
+ <fileset dir="${jee.deploy.nuxeo.plugins}">
+ <include name="${nuxeo.propagation.legacy.jars.all}"/>
+ </fileset>
+ </delete>
+ <!-- Undeploy legacy artifacts from old deployment location through release 0.6 -->
+ <delete quiet="true">
+ <fileset dir="${jee.deploy.nuxeo.system}">
+ <include name="${nuxeo.propagation.legacy.jars.all}"/>
+ </fileset>
+ </delete>
+ </target>
+
+ <target name="dist"
+ description="generate distribution for propagation doctype" depends="package">
+ <copy todir="${services.trunk}/${dist.deploy.nuxeo.plugins}">
+ <fileset file="${basedir}/target/${nuxeo.propagation.legacy.jar}"/>
+ </copy>
+ </target>
+
+</project>
+
--- /dev/null
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <parent>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.propagation.3rdparty</artifactId>
+ <version>4.5-SNAPSHOT</version>
+ </parent>
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.propagation.3rdparty.nuxeo</artifactId>
+ <name>services.propagation.3rdparty.nuxeo</name>
+ <packaging>jar</packaging>
+ <description>
+ Propagation Nuxeo Document Type
+ </description>
+
+ <properties>
+ <ServiceName>propagation</ServiceName>
+ <NuxeoDocTypeName>Propagation</NuxeoDocTypeName>
+ <CommonSchemaName>propagations_common</CommonSchemaName>
+ <Lifecycle>cs_default</Lifecycle>
+ </properties>
+
+ <build>
+ <resources>
+ <resource>
+ <directory>src/main/resources</directory>
+ <filtering>true</filtering>
+ </resource>
+ <resource>
+ <directory>../../../../3rdparty/nuxeo/nuxeo-doctype/src/main/resources</directory>
+ <filtering>true</filtering>
+ </resource>
+ </resources>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <configuration>
+ <archive>
+ <manifestFile>target/classes/META-INF/MANIFEST.MF</manifestFile>
+ <manifestEntries>
+ <Bundle-Version>${eclipseVersion}</Bundle-Version>
+ <Bundle-ManifestVersion>2</Bundle-ManifestVersion>
+ </manifestEntries>
+ </archive>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
--- /dev/null
+<?xml version="1.0"?>
+
+<!--
+layouts-contrib.xml
+
+Layout file for configuring screen layouts in the
+user interface of Nuxeo EP's web application, for
+viewing or editing CollectionSpace records stored
+in the Nuxeo repository,
+
+See the "Nuxeo Book" for an introductory description
+of how to edit this file. For instance, for Nuxeo EP 5.3:
+http://doc.nuxeo.org/5.3/books/nuxeo-book/html/
+
+$LastChangedRevision: $
+$LastChangedDate: $
+-->
+
+<component name="org.collectionspace.propagation.layouts.webapp">
+
+ <extension target="org.nuxeo.ecm.platform.forms.layout.WebLayoutManager"
+ point="layouts">
+
+ <layout name="propagation">
+ <templates>
+ <template mode="any">/layouts/layout_default_template.xhtml</template>
+ </templates>
+ <rows>
+ <row><widget>propNumber</widget></row>
+ <row><widget>propType</widget></row>
+ <!-- <row><widget>propDate</widget></row> -->
+ <row><widget>germinationDate</widget></row>
+ <row><widget>propComments</widget></row>
+ <row><widget>propReason</widget></row>
+ </rows>
+
+ <widget name="propNumber" type="text">
+ <labels>
+ <label mode="any">propNumber</label>
+ </labels>
+ <translated>true</translated>
+ <fields>
+ <field schema="propagations_common">propNumber</field>
+ </fields>
+ <properties widgetMode="edit">
+ <property name="styleClass">dataInputText</property>
+ </properties>
+ </widget>
+
+ <widget name="propType" type="text">
+ <labels>
+ <label mode="any">propType</label>
+ </labels>
+ <translated>true</translated>
+ <fields>
+ <field schema="propagations_common">propType</field>
+ </fields>
+ <properties widgetMode="edit">
+ <property name="styleClass">dataInputText</property>
+ </properties>
+ </widget>
+
+ <!-- <widget name="propDate" type="text">
+ <labels>
+ <label mode="any">propDate</label>
+ </labels>
+ <translated>true</translated>
+ <fields>
+ <field schema="propagations_common">propDate</field>
+ </fields>
+ <properties widgetMode="edit">
+ <property name="styleClass">dataInputText</property>
+ </properties>
+ </widget> -->
+
+ <widget name="germinationDate" type="text">
+ <labels>
+ <label mode="any">germinationDate</label>
+ </labels>
+ <translated>true</translated>
+ <fields>
+ <field schema="propagations_common">germinationDate</field>
+ </fields>
+ <properties widgetMode="edit">
+ <property name="styleClass">dataInputText</property>
+ </properties>
+ </widget>
+
+ <widget name="propComments" type="text">
+ <labels>
+ <label mode="any">propComments</label>
+ </labels>
+ <translated>true</translated>
+ <fields>
+ <field schema="propagations_common">propComments</field>
+ </fields>
+ <properties widgetMode="edit">
+ <property name="styleClass">dataInputText</property>
+ </properties>
+ </widget>
+
+ <widget name="propReason" type="text">
+ <labels>
+ <label mode="any">propReason</label>
+ </labels>
+ <translated>true</translated>
+ <fields>
+ <field schema="propagations_common">propReason</field>
+ </fields>
+ <properties widgetMode="edit">
+ <property name="styleClass">dataInputText</property>
+ </properties>
+ </widget>
+
+ </layout>
+ </extension>
+</component>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+
+<!--
+ Loan In schema (XSD)
+
+ Entity : Propagation
+ Part : Common
+ Used for: Nuxeo EP core document type
+
+ $LastChangedRevision$
+ $LastChangedDate$
+-->
+
+<xs:schema
+ xmlns:xs="http://www.w3.org/2001/XMLSchema"
+ xmlns:ns="http://collectionspace.org/propagation/"
+ xmlns="http://collectionspace.org/propagation/"
+ targetNamespace="http://collectionspace.org/propagation/"
+ version="0.1">
+
+ <!-- See http://wiki.collectionspace.org/display/collectionspace/Loans+In+Schema -->
+
+ <!-- Propagation Information -->
+ <xs:element name="propNumber" type="xs:string"/>
+ <xs:element name="propDate" type="structuredDateGroup"/>
+ <xs:element name="propReason" type="xs:string"/>
+ <xs:element name="propType" type="xs:string"/>
+ <xs:element name="propComments" type="xs:string"/>
+ <xs:element name="numStarted" type="xs:integer"/>
+
+ <!-- Seed propagation information -->
+ <xs:element name="scarStratGroupList" type="scarStratGroupList"/>
+ <xs:element name="extraSeeds" type="xs:boolean"/>
+ <xs:element name="spores" type="xs:boolean"/>
+
+ <!-- Cutting propagation information -->
+ <xs:element name="cuttingType" type="xs:string"/>
+ <xs:element name="hormone" type="xs:string"/>
+ <xs:element name="concentration" type="xs:string"/>
+ <xs:element name="wounded" type="xs:boolean"/>
+
+ <!-- Living plant material information -->
+ <xs:element name="plantType" type="xs:string"/>
+
+ <xs:element name="propActivityGroupList" type="propActivityGroupList"/>
+
+ <!-- Success of the propagation -->
+ <xs:element name="germinationDate" type="xs:date"/>
+ <xs:element name="successRate" type="xs:string"/>
+
+ <xs:complexType name="scarStratGroupList">
+ <xs:sequence>
+ <xs:element name="scarStratGroup" type="scarStratGroup" minOccurs="0" maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+ <xs:complexType name="scarStratGroup">
+ <xs:sequence>
+ <xs:element name="scarStrat" type="xs:string"/>
+ <xs:element name="duration" type="xs:integer"/>
+ <xs:element name="durationUnit" type="xs:string"/>
+ </xs:sequence>
+ </xs:complexType>
+
+ <xs:complexType name="propActivityGroupList">
+ <xs:sequence>
+ <xs:element name="propActivityGroup" type="propActivityGroup" minOccurs="0"
+ maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+ <xs:complexType name="propActivityGroup">
+ <xs:sequence>
+ <xs:element name="order" type="xs:integer"/>
+ <xs:element name="activityDate" type="structuredDateGroup"/>
+ <xs:element name="activityType" type="xs:string"/>
+ <xs:element name="propCount" type="xs:integer"/>
+ <xs:element name="medium" type="xs:string"/>
+ <xs:element name="potSize" type="xs:string"/>
+ <xs:element name="nurseryLocation" type="xs:string"/>
+ <xs:element name="conditions" type="xs:string"/>
+ <xs:element name="chemicalApplied" type="xs:string"/>
+ <xs:element name="activityConcentration" type="xs:string"/>
+ <xs:element name="activityComments" type="xs:string"/>
+ </xs:sequence>
+ </xs:complexType>
+
+ <xs:complexType name="structuredDateGroup">
+ <xs:sequence>
+ <xs:element name="dateDisplayDate" type="xs:string"/>
+ <xs:element name="dateAssociation" type="xs:string"/>
+ <xs:element name="dateEarliestSingleYear" type="xs:integer"/>
+ <xs:element name="dateEarliestSingleMonth" type="xs:integer"/>
+ <xs:element name="dateEarliestSingleDay" type="xs:integer"/>
+ <xs:element name="dateEarliestSingleEra" type="xs:string"/>
+ <xs:element name="dateEarliestSingleCertainty" type="xs:string"/>
+ <xs:element name="dateEarliestSingleQualifier" type="xs:string"/>
+ <xs:element name="dateEarliestSingleQualifierValue" type="xs:integer"/>
+ <xs:element name="dateEarliestSingleQualifierUnit" type="xs:string"/>
+ <xs:element name="dateLatestYear" type="xs:integer"/>
+ <xs:element name="dateLatestMonth" type="xs:integer"/>
+ <xs:element name="dateLatestDay" type="xs:integer"/>
+ <xs:element name="dateLatestEra" type="xs:string"/>
+ <xs:element name="dateLatestCertainty" type="xs:string"/>
+ <xs:element name="dateLatestQualifier" type="xs:string"/>
+ <xs:element name="dateLatestQualifierValue" type="xs:integer"/>
+ <xs:element name="dateLatestQualifierUnit" type="xs:string"/>
+ <xs:element name="datePeriod" type="xs:string"/>
+ <xs:element name="dateNote" type="xs:string"/>
+ <xs:element name="dateEarliestScalarValue" type="xs:date"/>
+ <xs:element name="dateLatestScalarValue" type="xs:date"/>
+ <xs:element name="scalarValuesComputed" type="xs:boolean"/>
+ </xs:sequence>
+ </xs:complexType>
+</xs:schema>
--- /dev/null
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <artifactId>org.collectionspace.services.propagation</artifactId>
+ <groupId>org.collectionspace.services</groupId>
+ <version>4.5-SNAPSHOT</version>
+ </parent>
+
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>org.collectionspace.services.propagation.3rdparty</artifactId>
+ <name>services.propagation.3rdparty</name>
+ <packaging>pom</packaging>
+
+ <description>
+ 3rd party build for propagation service
+ </description>
+
+ <properties>
+ <ServiceName>propagation</ServiceName>
+ <NuxeoDocTypeName>Propagation</NuxeoDocTypeName>
+ <CommonSchemaName>propagations_common</CommonSchemaName>
+ <TenantPrefix>Tenant</TenantPrefix>
+ </properties>
+
+ <modules>
+ <!-- This module is obsolete. The Nuxeo artifacts are now created and deployed using the "csmake" tool
+ <module>nuxeo-platform-cs-propagation</module>
+ -->
+ </modules>
+</project>
--- /dev/null
+
+<project name="propagation" default="package" basedir=".">
+ <description>
+ propagation service
+ </description>
+ <!-- set global properties for this build -->
+ <property name="services.trunk" value="../.."/>
+ <!-- enviornment should be declared before reading build.properties -->
+ <property environment="env" />
+ <property file="${services.trunk}/build.properties" />
+ <property name="mvn.opts" value="-V" />
+ <property name="src" location="src"/>
+
+ <condition property="osfamily-unix">
+ <os family="unix" />
+ </condition>
+ <condition property="osfamily-windows">
+ <os family="windows" />
+ </condition>
+
+ <target name="package" depends="package-unix,package-windows"
+ description="Package CollectionSpace Services" />
+
+ <target name="package-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="package-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="package" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+
+ <target name="install" depends="install-unix,install-windows"
+ description="Install" />
+ <target name="install-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="install-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="install" />
+ <arg value="-Dmaven.test.skip=true" />
+ <arg value="-f" />
+ <arg value="${basedir}/pom.xml" />
+ <arg value="-N" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="clean" depends="clean-unix,clean-windows"
+ description="Delete target directories" >
+ <delete dir="${build}"/>
+ </target>
+ <target name="clean-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="clean" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="clean-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="clean" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="test" depends="test-unix,test-windows" description="Run tests" />
+ <target name="test-unix" if="osfamily-unix">
+ <exec executable="mvn" failonerror="true">
+ <arg value="test" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+ <target name="test-windows" if="osfamily-windows">
+ <exec executable="cmd" failonerror="true">
+ <arg value="/c" />
+ <arg value="mvn.bat" />
+ <arg value="test" />
+ <arg value="${mvn.opts}" />
+ </exec>
+ </target>
+
+ <target name="deploy" depends="install"
+ description="deploy propagation service">
+ <ant antfile="3rdparty/build.xml" target="deploy" inheritall="false"/>
+ </target>
+
+ <target name="undeploy"
+ description="undeploy propagation service">
+ <ant antfile="3rdparty/build.xml" target="undeploy" inheritall="false"/>
+ </target>
+
+ <target name="dist" depends="package"
+ description="distribute propagation service">
+ <ant antfile="3rdparty/build.xml" target="dist" inheritall="false"/>
+ </target>
+
+</project>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.propagation</artifactId>
+ <version>4.5-SNAPSHOT</version>
+ </parent>
+
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>org.collectionspace.services.propagation.client</artifactId>
+ <name>services.propagation.client</name>
+
+ <dependencies>
+ <!-- keep slf4j dependencies on the top -->
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ <scope>test</scope>
+ </dependency>
+<!-- CollectionSpace dependencies -->
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.jaxb</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.common</artifactId>
+ <optional>true</optional>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.client</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.propagation.jaxb</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.person.client</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.authority.jaxb</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+<!-- External dependencies -->
+ <dependency>
+ <groupId>org.testng</groupId>
+ <artifactId>testng</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.resteasy</groupId>
+ <artifactId>resteasy-jaxrs</artifactId>
+ <!-- filter out unwanted jars -->
+ <exclusions>
+ <exclusion>
+ <groupId>tjws</groupId>
+ <artifactId>webserver</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.resteasy</groupId>
+ <artifactId>resteasy-jaxb-provider</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.resteasy</groupId>
+ <artifactId>resteasy-multipart-provider</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>commons-httpclient</groupId>
+ <artifactId>commons-httpclient</artifactId>
+ <version>3.1</version>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>collectionspace-services-propagation-client</finalName>
+ </build>
+</project>
--- /dev/null
+/**
+ * This document is a part of the source code and related artifacts
+ * for CollectionSpace, an open source collections management system
+ * for museums and related institutions:
+ *
+ * http://www.collectionspace.org
+ * http://wiki.collectionspace.org
+ *
+ * Copyright (c) 2009 Regents of the University of California
+ *
+ * Licensed under the Educational Community License (ECL), Version 2.0.
+ * You may not use this file except in compliance with this License.
+ *
+ * You may obtain a copy of the ECL 2.0 License at
+ * https://source.collectionspace.org/collection-space/LICENSE.txt
+ */
+package org.collectionspace.services.client;
+
+/**
+ * PropagationClient.java
+ *
+ * $LastChangedRevision$
+ * $LastChangedDate$
+ *
+ */
+public class PropagationClient extends AbstractCommonListPoxServiceClientImpl<PropagationProxy, Object> {
+
+ public static final String SERVICE_NAME = "propagations";
+ public static final String SERVICE_PATH_COMPONENT = SERVICE_NAME;
+ public static final String SERVICE_PATH = "/" + SERVICE_PATH_COMPONENT;
+ public static final String SERVICE_PATH_PROXY = SERVICE_PATH + "/";
+ public static final String SERVICE_PAYLOAD_NAME = SERVICE_NAME;
+
+ public PropagationClient() throws Exception {
+ super();
+ }
+
+ /* (non-Javadoc)
+ * @see org.collectionspace.services.client.AbstractServiceClientImpl#getServicePathComponent()
+ */
+ @Override
+ public String getServicePathComponent() {
+ return SERVICE_PATH_COMPONENT;
+ }
+
+ @Override
+ public String getServiceName() {
+ return SERVICE_NAME;
+ }
+
+ @Override
+ public Class<PropagationProxy> getProxyClass() {
+ return PropagationProxy.class;
+ }
+
+}
--- /dev/null
+/**
+ * This document is a part of the source code and related artifacts
+ * for CollectionSpace, an open source collections management system
+ * for museums and related institutions:
+ *
+ * http://www.collectionspace.org
+ * http://wiki.collectionspace.org
+ *
+ * Copyright (c) 2009 Regents of the University of California
+ *
+ * Licensed under the Educational Community License (ECL), Version 2.0.
+ * You may not use this file except in compliance with this License.
+ *
+ * You may obtain a copy of the ECL 2.0 License at
+ * https://source.collectionspace.org/collection-space/LICENSE.txt
+ */
+package org.collectionspace.services.client;
+
+import javax.ws.rs.Consumes;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+
+/**
+ * PropagationProxy.java
+ *
+ * $LastChangedRevision$
+ * $LastChangedDate$
+ *
+ */
+@Path(PropagationClient.SERVICE_PATH_PROXY)
+@Produces({"application/xml"})
+@Consumes({"application/xml"})
+public interface PropagationProxy extends CollectionSpaceCommonListPoxProxy {
+}
--- /dev/null
+/**
+ * This document is a part of the source code and related artifacts
+ * for CollectionSpace, an open source collections management system
+ * for museums and related institutions:
+ *
+ * http://www.collectionspace.org
+ * http://wiki.collectionspace.org
+ *
+ * Copyright © 2009 Regents of the University of California
+ *
+ * Licensed under the Educational Community License (ECL), Version 2.0.
+ * You may not use this file except in compliance with this License.
+ *
+ * You may obtain a copy of the ECL 2.0 License at
+ * https://source.collectionspace.org/collection-space/LICENSE.txt
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.collectionspace.services.client.test;
+
+import java.math.BigInteger;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.ws.rs.core.Response;
+
+import org.collectionspace.services.PersonJAXBSchema;
+import org.collectionspace.services.client.CollectionSpaceClient;
+import org.collectionspace.services.client.PropagationClient;
+import org.collectionspace.services.client.PersonAuthorityClient;
+import org.collectionspace.services.client.PersonAuthorityClientUtils;
+import org.collectionspace.services.client.PayloadOutputPart;
+import org.collectionspace.services.client.PoxPayloadIn;
+import org.collectionspace.services.client.PoxPayloadOut;
+import org.collectionspace.services.common.authorityref.AuthorityRefList;
+import org.collectionspace.services.common.api.GregorianCalendarDateTimeUtils;
+import org.collectionspace.services.jaxb.AbstractCommonList;
+import org.collectionspace.services.propagation.PropActivityGroup;
+import org.collectionspace.services.propagation.PropActivityGroupList;
+import org.collectionspace.services.propagation.PropagationsCommon;
+import org.collectionspace.services.person.PersonTermGroup;
+
+import org.testng.Assert;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.Test;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * PropagationAuthRefsTest, carries out Authority References tests against a
+ * deployed and running Propagation (aka Loans In) Service.
+ *
+ * $LastChangedRevision$
+ * $LastChangedDate$
+ */
+public class PropagationAuthRefsTest extends BaseServiceTest<AbstractCommonList> {
+
+ private final String CLASS_NAME = PropagationAuthRefsTest.class.getName();
+ private final Logger logger = LoggerFactory.getLogger(CLASS_NAME);
+
+ // Instance variables specific to this test.
+ final String PERSON_AUTHORITY_NAME = "TestPersonAuth";
+ private String knownResourceId = null;
+ private List<String> propagationIdsCreated = new ArrayList<String>();
+ private List<String> personIdsCreated = new ArrayList<String>();
+ private String personAuthCSID = null;
+ private String propagatedByRefName = null; // an authRef field
+ private final static String CURRENT_DATE_UTC = GregorianCalendarDateTimeUtils.currentDateUTC();
+
+ /* (non-Javadoc)
+ * @see org.collectionspace.services.client.test.BaseServiceTest#getClientInstance()
+ */
+ @Override
+ protected CollectionSpaceClient getClientInstance() {
+ throw new UnsupportedOperationException(); //method not supported (or needed) in this test class
+ }
+
+ // ---------------------------------------------------------------
+ // CRUD tests : CREATE tests
+ // ---------------------------------------------------------------
+ // Success outcomes
+ @Test(dataProvider="testName", dataProviderClass=AbstractServiceTestImpl.class)
+ public void createWithAuthRefs(String testName) throws Exception {
+ testSetup(STATUS_CREATED, ServiceRequestType.CREATE);
+
+ // Submit the request to the service and store the response.
+ String identifier = createIdentifier();
+
+ // Create all the person refs and entities
+ createPersonRefs();
+
+ // Create a new Loans In resource.
+ //
+ // One or more fields in this resource will be PersonAuthority
+ // references, and will refer to Person resources by their refNames.
+ PropagationClient propagationClient = new PropagationClient();
+ PoxPayloadOut propagationInstance = createPropagationInstance(
+ "propagationNumber-" + identifier,
+ this.propagatedByRefName,
+ CURRENT_DATE_UTC);
+ Response response = propagationClient.create(propagationInstance);
+ try {
+ int statusCode = response.getStatus();
+ if (logger.isDebugEnabled()) {
+ logger.debug(testName + ": status = " + statusCode);
+ }
+ Assert.assertTrue(testRequestType.isValidStatusCode(statusCode), invalidStatusCodeMessage(testRequestType, statusCode));
+ Assert.assertEquals(statusCode, testExpectedStatusCode);
+
+ // Store the ID returned from the first resource created
+ // for additional tests below.
+ if (knownResourceId == null) {
+ knownResourceId = extractId(response);
+ }
+
+ // Store the IDs from every resource created by tests,
+ // so they can be deleted after tests have been run.
+ propagationIdsCreated.add(extractId(response));
+ } finally {
+ response.close();
+ }
+ }
+
+ /**
+ * Create one or more Person records that will be used to create refNames (referenced terms) in our
+ * test propagation records.
+ *
+ * @throws Exception
+ */
+ protected void createPersonRefs() throws Exception {
+ PersonAuthorityClient personAuthClient = new PersonAuthorityClient();
+ // Create a temporary PersonAuthority resource, and its corresponding
+ // refName by which it can be identified.
+ PoxPayloadOut multipart = PersonAuthorityClientUtils.createPersonAuthorityInstance(
+ PERSON_AUTHORITY_NAME, PERSON_AUTHORITY_NAME, personAuthClient.getCommonPartName());
+ Response res = personAuthClient.create(multipart);
+ try {
+ int statusCode = res.getStatus();
+ Assert.assertTrue(testRequestType.isValidStatusCode(statusCode),
+ invalidStatusCodeMessage(testRequestType, statusCode));
+ Assert.assertEquals(statusCode, STATUS_CREATED);
+ personAuthCSID = extractId(res);
+ } finally {
+ res.close();
+ }
+
+ // Create temporary Person resources, and their corresponding refNames
+ // by which they can be identified.
+ String authRefName = PersonAuthorityClientUtils.getAuthorityRefName(personAuthCSID, personAuthClient);
+ String csid = createPerson("Propye", "ThePropagator", "proppy", authRefName);
+ personIdsCreated.add(csid);
+
+ // Safe the refName for later use -see createWithAuthRefs() method
+ this.propagatedByRefName = PersonAuthorityClientUtils.getPersonRefName(personAuthCSID, csid, personAuthClient);
+ }
+
+ protected String createPerson(String firstName, String surName, String shortId, String authRefName ) throws Exception {
+ PersonAuthorityClient personAuthClient = new PersonAuthorityClient();
+ Map<String, String> personInfo = new HashMap<String,String>();
+ personInfo.put(PersonJAXBSchema.FORE_NAME, firstName);
+ personInfo.put(PersonJAXBSchema.SUR_NAME, surName);
+ personInfo.put(PersonJAXBSchema.SHORT_IDENTIFIER, shortId);
+ List<PersonTermGroup> personTerms = new ArrayList<PersonTermGroup>();
+ PersonTermGroup term = new PersonTermGroup();
+ String termName = firstName + " " + surName;
+ term.setTermDisplayName(termName);
+ term.setTermName(termName);
+ personTerms.add(term);
+ PoxPayloadOut multipart =
+ PersonAuthorityClientUtils.createPersonInstance(personAuthCSID,
+ authRefName, personInfo, personTerms, personAuthClient.getItemCommonPartName());
+
+ Response res = personAuthClient.createItem(personAuthCSID, multipart);
+ try {
+ int statusCode = res.getStatus();
+
+ Assert.assertTrue(testRequestType.isValidStatusCode(statusCode),
+ invalidStatusCodeMessage(testRequestType, statusCode));
+ Assert.assertEquals(statusCode, STATUS_CREATED);
+ return extractId(res);
+ } finally {
+ res.close();
+ }
+ }
+
+ // Success outcomes
+ @Test(dataProvider="testName", dataProviderClass=AbstractServiceTestImpl.class,
+ dependsOnMethods = {"createWithAuthRefs"})
+ public void readAndCheckAuthRefs(String testName) throws Exception {
+ // Perform setup.
+ testSetup(STATUS_OK, ServiceRequestType.READ);
+
+ PropagationClient propagationClient = new PropagationClient();
+ Response res = propagationClient.read(knownResourceId);
+ try {
+ assertStatusCode(res, testName);
+ // Extract the common part from the response.
+ PoxPayloadIn input = new PoxPayloadIn((String)res.readEntity(String.class));
+ PropagationsCommon propagationCommon = (PropagationsCommon) extractPart(input,
+ propagationClient.getCommonPartName(), PropagationsCommon.class);
+ Assert.assertNotNull(propagationCommon);
+ if (logger.isDebugEnabled()){
+ logger.debug(objectAsXmlString(propagationCommon, PropagationsCommon.class));
+ }
+ } finally {
+ if (res != null) {
+ res.close();
+ }
+ }
+
+ // Get the authority references
+ res = propagationClient.getAuthorityRefs(knownResourceId); // AuthorityRefList
+ AuthorityRefList list = null;
+ try {
+ assertStatusCode(res, testName);
+ list = (AuthorityRefList) res.getEntity();
+ Assert.assertNotNull(list);
+ } finally {
+ if (res != null) {
+ res.close();
+ }
+ }
+
+ int expectedAuthRefs = personIdsCreated.size();
+ List<AuthorityRefList.AuthorityRefItem> items = list.getAuthorityRefItem();
+ int numAuthRefsFound = items.size();
+ if(logger.isDebugEnabled()){
+ logger.debug("Expected " + expectedAuthRefs + " authority references, found " + numAuthRefsFound);
+ }
+
+ // Optionally output additional data about list members for debugging.
+ boolean iterateThroughList = true;
+ if(iterateThroughList && logger.isDebugEnabled()){
+ int i = 0;
+ for(AuthorityRefList.AuthorityRefItem item : items){
+ logger.debug(testName + ": list-item[" + i + "] Field:" +
+ item.getSourceField() + "= " +
+ item.getAuthDisplayName() +
+ item.getItemDisplayName());
+ logger.debug(testName + ": list-item[" + i + "] refName=" +
+ item.getRefName());
+ logger.debug(testName + ": list-item[" + i + "] URI=" +
+ item.getUri());
+ i++;
+ }
+ }
+
+ Assert.assertEquals(numAuthRefsFound, expectedAuthRefs,
+ "Did not find all expected authority references! " + "Expected " + expectedAuthRefs + ", found " + numAuthRefsFound);
+ }
+
+
+ // ---------------------------------------------------------------
+ // Cleanup of resources created during testing
+ // ---------------------------------------------------------------
+
+ /**
+ * Deletes all resources created by tests, after all tests have been run.
+ *
+ * This cleanup method will always be run, even if one or more tests fail.
+ * For this reason, it attempts to remove all resources created
+ * at any point during testing, even if some of those resources
+ * may be expected to be deleted by certain tests.
+ * @throws Exception
+ */
+ @AfterClass(alwaysRun=true)
+ public void cleanUp() throws Exception {
+ String noTest = System.getProperty("noTestCleanup");
+ if (Boolean.TRUE.toString().equalsIgnoreCase(noTest)) {
+ if (logger.isDebugEnabled()) {
+ logger.debug("Skipping Cleanup phase ...");
+ }
+ return;
+ }
+ if (logger.isDebugEnabled()) {
+ logger.debug("Cleaning up temporary resources created for testing ...");
+ }
+
+ //
+ // Delete all the propagation records we created
+ PropagationClient propagationClient = new PropagationClient();
+ for (String resourceId : propagationIdsCreated) {
+ // Note: Any non-success responses are ignored and not reported.
+ propagationClient.delete(resourceId).close(); // alternative to propagationClient.delete(resourceId).releaseConnection();
+ }
+
+ //
+ // Delete Person resource(s) (before PersonAuthority resources).
+ PersonAuthorityClient personAuthClient = new PersonAuthorityClient();
+ for (String resourceId : personIdsCreated) {
+ // Note: Any non-success responses are ignored and not reported.
+ personAuthClient.deleteItem(personAuthCSID, resourceId).close();
+ }
+ if (personAuthCSID != null) {
+ personAuthClient.delete(personAuthCSID).close();
+ }
+ }
+
+ // ---------------------------------------------------------------
+ // Utility methods used by tests above
+ // ---------------------------------------------------------------
+ public String getServiceName() {
+ return PropagationClient.SERVICE_NAME;
+ }
+
+ @Override
+ public String getServicePathComponent() {
+ return PropagationClient.SERVICE_PATH_COMPONENT;
+ }
+
+ private PoxPayloadOut createPropagationInstance(String propagationNumber,
+ String propagatedBy,
+ String returnDate) throws Exception {
+ PropagationsCommon propagationCommon = new PropagationsCommon();
+ propagationCommon.setPropNumber(propagationNumber);
+ propagationCommon.setPropBy(propagatedBy);
+ propagationCommon.setPropNumber(returnDate);
+
+ PropActivityGroupList propActivityGroupList = new PropActivityGroupList();
+ PropActivityGroup propActivityGroup = new PropActivityGroup();
+ propActivityGroup.setOrder(BigInteger.valueOf(42));
+ propActivityGroupList.getPropActivityGroup().add(propActivityGroup);
+ propagationCommon.setPropActivityGroupList(propActivityGroupList);
+
+ PoxPayloadOut multipart = new PoxPayloadOut(this.getServicePathComponent());
+ PayloadOutputPart commonPart = multipart.addPart(new PropagationClient().getCommonPartName(), propagationCommon);
+
+ if (logger.isDebugEnabled()) {
+ logger.debug("to be created, propagation common");
+ logger.debug(objectAsXmlString(propagationCommon, PropagationsCommon.class));
+ }
+
+ return multipart;
+ }
+
+ @Override
+ protected Class<AbstractCommonList> getCommonListType() {
+ return AbstractCommonList.class;
+ }
+
+ @Override
+ protected CollectionSpaceClient getClientInstance(String clientPropertiesFilename) throws Exception {
+ // TODO Auto-generated method stub
+ throw new UnsupportedOperationException(); //method not supported (or needed) in this test class
+ }
+}
--- /dev/null
+/**
+ * This document is a part of the source code and related artifacts
+ * for CollectionSpace, an open source collections management system
+ * for museums and related institutions:
+ *
+ * http://www.collectionspace.org
+ * http://wiki.collectionspace.org
+ *
+ * Copyright © 2009 Regents of the University of California
+ *
+ * Licensed under the Educational Community License (ECL), Version 2.0.
+ * You may not use this file except in compliance with this License.
+ *
+ * You may obtain a copy of the ECL 2.0 License at
+ * https://source.collectionspace.org/collection-space/LICENSE.txt
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.collectionspace.services.client.test;
+
+//import java.util.ArrayList;
+import java.util.List;
+import javax.ws.rs.core.Response;
+
+import org.collectionspace.services.client.CollectionSpaceClient;
+import org.collectionspace.services.client.PropagationClient;
+import org.collectionspace.services.client.PayloadOutputPart;
+import org.collectionspace.services.client.PoxPayloadOut;
+import org.collectionspace.services.jaxb.AbstractCommonList;
+import org.collectionspace.services.propagation.PropActivityGroup;
+import org.collectionspace.services.propagation.PropActivityGroupList;
+import org.collectionspace.services.propagation.PropagationsCommon;
+
+import org.testng.Assert;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * PropagationServiceTest, carries out tests against a
+ * deployed and running Propagation (aka Loans In) Service.
+ *
+ * $LastChangedRevision$
+ * $LastChangedDate$
+ */
+public class PropagationServiceTest extends AbstractPoxServiceTestImpl<AbstractCommonList, PropagationsCommon> {
+
+ /** The logger. */
+ private final String CLASS_NAME = PropagationServiceTest.class.getName();
+ private final Logger logger = LoggerFactory.getLogger(CLASS_NAME);
+ // Instance variables specific to this test.
+ /** The service path component. */
+
+ /* (non-Javadoc)
+ * @see org.collectionspace.services.client.test.BaseServiceTest#getClientInstance()
+ */
+ @Override
+ protected CollectionSpaceClient getClientInstance() throws Exception {
+ return new PropagationClient();
+ }
+
+ // ---------------------------------------------------------------
+ // CRUD tests : CREATE tests
+ // ---------------------------------------------------------------
+
+ // Success outcomes
+
+ /* (non-Javadoc)
+ * @see org.collectionspace.services.client.test.ServiceTest#create(java.lang.String)
+ */
+ @Override
+// @Test(dataProvider = "testName", dataProviderClass = AbstractServiceTestImpl.class)
+ public void create(String testName) throws Exception {
+ // Perform setup, such as initializing the type of service request
+ // (e.g. CREATE, DELETE), its valid and expected status codes, and
+ // its associated HTTP method name (e.g. POST, DELETE).
+ setupCreate();
+
+ // Submit the request to the service and store the response.
+ PropagationClient client = new PropagationClient();
+ String identifier = createIdentifier();
+ PoxPayloadOut multipart = createPropagationInstance(identifier);
+ String newID = null;
+ Response res = client.create(multipart);
+ try {
+ int statusCode = res.getStatus();
+
+ // Check the status code of the response: does it match
+ // the expected response(s)?
+ //
+ // Specifically:
+ // Does it fall within the set of valid status codes?
+ // Does it exactly match the expected status code?
+ if (logger.isDebugEnabled()) {
+ logger.debug(testName + ": status = " + statusCode);
+ }
+ Assert.assertTrue(testRequestType.isValidStatusCode(statusCode),
+ invalidStatusCodeMessage(testRequestType, statusCode));
+ Assert.assertEquals(statusCode, testExpectedStatusCode);
+
+ newID = extractId(res);
+ } finally {
+ if (res != null) {
+ res.close();
+ }
+ }
+
+ // Store the ID returned from the first resource created
+ // for additional tests below.
+ if (knownResourceId == null) {
+ knownResourceId = newID;
+ if (logger.isDebugEnabled()) {
+ logger.debug(testName + ": knownResourceId=" + knownResourceId);
+ }
+ }
+
+ // Store the IDs from every resource created by tests,
+ // so they can be deleted after tests have been run.
+ allResourceIdsCreated.add(newID);
+ }
+
+ // ---------------------------------------------------------------
+ // CRUD tests : READ tests
+ // ---------------------------------------------------------------
+
+ @Override
+ protected void compareReadInstances(PropagationsCommon original, PropagationsCommon fromRead) throws Exception {
+ PropActivityGroupList propActivityGroupList = fromRead.getPropActivityGroupList();
+ Assert.assertNotNull(propActivityGroupList);
+
+ List<PropActivityGroup> propActivityGroups = propActivityGroupList.getPropActivityGroup();
+ Assert.assertNotNull(propActivityGroups);
+ Assert.assertTrue(propActivityGroups.size() > 0);
+
+ if (logger.isDebugEnabled()) {
+ logger.debug("UTF-8 data sent=" + getUTF8DataFragment() + "\n"
+ + "UTF-8 data received=" + fromRead.getPropComments());
+ }
+
+ Assert.assertEquals(fromRead.getPropComments(), getUTF8DataFragment(),
+ "UTF-8 data retrieved '" + fromRead.getPropComments() + "' does not match expected data '" + getUTF8DataFragment());
+ }
+
+ @Override
+ protected void compareUpdatedInstances(PropagationsCommon propagationCommon,
+ PropagationsCommon updatedPropagationCommon) throws Exception {
+ // Check selected fields in the updated common part.
+ Assert.assertEquals(updatedPropagationCommon.getPropNumber(),
+ propagationCommon.getPropNumber(),
+ "Data in updated object did not match submitted data.");
+
+ if (logger.isDebugEnabled()) {
+ logger.debug("UTF-8 data sent=" + propagationCommon.getPropComments() + "\n"
+ + "UTF-8 data received=" + updatedPropagationCommon.getPropComments());
+ }
+ Assert.assertTrue(updatedPropagationCommon.getPropComments().contains(getUTF8DataFragment()),
+ "UTF-8 data retrieved '" + updatedPropagationCommon.getPropComments()
+ + "' does not contain expected data '" + getUTF8DataFragment());
+ Assert.assertEquals(updatedPropagationCommon.getPropComments(),
+ propagationCommon.getPropComments(),
+ "Data in updated object did not match submitted data.");
+ }
+
+ // ---------------------------------------------------------------
+ // Utility methods used by tests above
+ // ---------------------------------------------------------------
+
+ @Override
+ public String getServiceName() {
+ return PropagationClient.SERVICE_NAME;
+ }
+
+ /* (non-Javadoc)
+ * @see org.collectionspace.services.client.test.BaseServiceTest#getServicePathComponent()
+ */
+ @Override
+ public String getServicePathComponent() {
+ return PropagationClient.SERVICE_PATH_COMPONENT;
+ }
+
+ @Override
+ protected PoxPayloadOut createInstance(String identifier) throws Exception {
+ return createPropagationInstance(identifier);
+ }
+
+ /**
+ * Creates the propagation instance.
+ *
+ * @param identifier the identifier
+ * @return the multipart output
+ * @throws Exception
+ */
+ private PoxPayloadOut createPropagationInstance(String identifier) throws Exception {
+ return createPropagationInstance(
+ "propNumber-" + identifier,
+ "returnDate-" + identifier);
+ }
+
+ /**
+ * Creates the propagation instance.
+ *
+ * @param propNumber the propagation number
+ * @param returnDate the return date
+ * @return the multipart output
+ * @throws Exception
+ */
+ private PoxPayloadOut createPropagationInstance(String propNumber, String returnDate) throws Exception {
+
+ PropagationsCommon propagationCommon = new PropagationsCommon();
+ propagationCommon.setPropNumber(propNumber);
+ PropActivityGroupList propActivityGroupList = new PropActivityGroupList();
+ PropActivityGroup propActivityGroup = new PropActivityGroup();
+ propActivityGroupList.getPropActivityGroup().add(propActivityGroup);
+ propagationCommon.setPropActivityGroupList(propActivityGroupList);
+ propagationCommon.setPropReason("For Surfboards of the 1960s exhibition.");
+ propagationCommon.setPropComments(getUTF8DataFragment());
+
+ PoxPayloadOut multipart = new PoxPayloadOut(this.getServicePathComponent());
+ PayloadOutputPart commonPart = multipart.addPart(new PropagationClient().getCommonPartName(), propagationCommon);
+
+ if (logger.isDebugEnabled()) {
+ logger.debug("to be created, propagation common");
+ logger.debug(objectAsXmlString(propagationCommon, PropagationsCommon.class));
+ }
+
+ return multipart;
+ }
+
+ /*
+ * For convenience and terseness, this test method is the base of the test execution dependency chain. Other test methods may
+ * refer to this method in their @Test annotation declarations.
+ */
+ @Override
+ public void CRUDTests(String testName) {
+ // // Needed for TestNG dependency chain.
+
+ }
+
+ @Override
+ protected PoxPayloadOut createInstance(String commonPartName, String identifier) throws Exception {
+ PoxPayloadOut result = createPropagationInstance(identifier);
+ return result;
+ }
+
+ @Override
+ protected PropagationsCommon updateInstance(PropagationsCommon propagationCommon) {
+ // Update the content of this resource.
+ propagationCommon.setPropNumber("updated-" + propagationCommon.getPropNumber());
+ propagationCommon.setPropComments("updated-" + propagationCommon.getPropComments());
+
+ return propagationCommon;
+ }
+
+ @Override
+ protected CollectionSpaceClient getClientInstance(String clientPropertiesFilename) throws Exception {
+ return new PropagationClient();
+ }
+}
--- /dev/null
+log4j.rootLogger=debug, stdout, R
+
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+
+# Pattern to output the caller's file name and line number.
+log4j.appender.stdout.layout.ConversionPattern=%d %-5p [%t] [%c:%L] %m%n
+
+log4j.appender.R=org.apache.log4j.RollingFileAppender
+log4j.appender.R.File=target/test-client.log
+
+log4j.appender.R.MaxFileSize=100KB
+# Keep one backup file
+log4j.appender.R.MaxBackupIndex=1
+
+log4j.appender.R.layout=org.apache.log4j.PatternLayout
+log4j.appender.R.layout.ConversionPattern=%d %-5p [%t] [%c:%L] %m%n
+
+#packages
+log4j.logger.org.collectionspace=DEBUG
+log4j.logger.org.apache=INFO
+log4j.logger.httpclient=INFO
+log4j.logger.org.jboss.resteasy=INFO
--- /dev/null
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<bindings xmlns="http://java.sun.com/xml/ns/jaxb" if-exists="true" version="2.1">
+ <!--
+
+This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.11
+See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
+Any modifications to this file will be lost upon recompilation of the source schema.
+Generated on: 2017.03.15 at 05:25:57 PM PDT
+
+ -->
+ <bindings xmlns:tns="http://collectionspace.org/services/propagation" if-exists="true" scd="x-schema::tns">
+ <schemaBindings map="false">
+ <package name="org.collectionspace.services.propagation"/>
+ </schemaBindings>
+ <bindings if-exists="true" scd="tns:propagations_common">
+ <class ref="org.collectionspace.services.propagation.PropagationsCommon"/>
+ </bindings>
+ <bindings if-exists="true" scd="~tns:structuredDateGroup">
+ <class ref="org.collectionspace.services.propagation.StructuredDateGroup"/>
+ </bindings>
+ <bindings if-exists="true" scd="~tns:scarStratGroupList">
+ <class ref="org.collectionspace.services.propagation.ScarStratGroupList"/>
+ </bindings>
+ <bindings if-exists="true" scd="~tns:propActivityGroupList">
+ <class ref="org.collectionspace.services.propagation.PropActivityGroupList"/>
+ </bindings>
+ <bindings if-exists="true" scd="~tns:scarStratGroup">
+ <class ref="org.collectionspace.services.propagation.ScarStratGroup"/>
+ </bindings>
+ <bindings if-exists="true" scd="~tns:propActivityGroup">
+ <class ref="org.collectionspace.services.propagation.PropActivityGroup"/>
+ </bindings>
+ </bindings>
+</bindings>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <artifactId>org.collectionspace.services.propagation</artifactId>
+ <groupId>org.collectionspace.services</groupId>
+ <version>4.5-SNAPSHOT</version>
+ </parent>
+
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>org.collectionspace.services.propagation.jaxb</artifactId>
+ <name>services.propagation.jaxb</name>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.jaxb</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <finalName>collectionspace-services-propagation-jaxb</finalName>
+ <defaultGoal>install</defaultGoal>
+ <plugins>
+ <plugin>
+ <groupId>org.jvnet.jaxb2.maven2</groupId>
+ <artifactId>maven-jaxb2-plugin</artifactId>
+ </plugin>
+ </plugins>
+ </build>
+</project>
+
--- /dev/null
+/**
+ *
+ */
+package org.collectionspace.services;
+
+public interface PropagationJAXBSchema {
+ final static String PROP_NUMBER = "propNumber";
+ // final static String PROP_DATE = "propDate";
+ final static String PROP_REASON = "propReason";
+ final static String PROP_TYPE = "propType";
+ final static String PROP_COMMENTS = "propComments";
+ final static String GERMINATION_DATE = "germinationDate";
+}
+
+
--- /dev/null
+package org.collectionspace.services;
+
+public interface PropagationListItemJAXBSchema {
+ final static String PROP_NUMBER = "propNumber";
+ final static String PROP_ACTIVITY_GROUP_LIST = "propActivityGroupList";
+ final static String GERMINATION_DATE = "germinationDate";
+ final static String CSID = "csid";
+ final static String URI = "url";
+}
--- /dev/null
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+
+<!--
+ Loan In schema (XSD)
+
+ Entity : Propagation
+ Part : Common
+ Used for: JAXB binding between XML and Java objects
+
+ $LastChangedRevision$
+ $LastChangedDate$
+-->
+
+<xs:schema
+ xmlns:xs="http://www.w3.org/2001/XMLSchema"
+ xmlns:jaxb="http://java.sun.com/xml/ns/jaxb"
+ jaxb:version="1.0" elementFormDefault="unqualified"
+ xmlns:ns="http://collectionspace.org/services/propagation"
+ xmlns="http://collectionspace.org/services/propagation"
+ targetNamespace="http://collectionspace.org/services/propagation"
+ version="0.1"
+>
+
+<!--
+ Avoid XmlRootElement nightmare:
+ See http://weblogs.java.net/blog/kohsuke/archive/2006/03/why_does_jaxb_p.html
+-->
+<!-- See http://wiki.collectionspace.org/display/collectionspace/Loans+In+Schema -->
+
+ <!-- Loan In Information Group -->
+ <xs:element name="propagations_common">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element name="propNumber" type="xs:string"/>
+ <xs:element name="propDate" type="structuredDateGroup"/>
+ <xs:element name="propReason" type="xs:string"/>
+ <xs:element name="propType" type="xs:string"/>
+ <xs:element name="propBy" type="xs:string"/>
+ <xs:element name="propComments" type="xs:string"/>
+ <xs:element name="numStarted" type="xs:integer"/>
+
+ <!-- Seed propagation information -->
+ <xs:element name="scarStratGroupList" type="scarStratGroupList"/>
+ <xs:element name="extraSeeds" type="xs:boolean"/>
+ <xs:element name="spores" type="xs:boolean"/>
+
+ <!-- Cutting propagation information -->
+ <xs:element name="cuttingType" type="xs:string"/>
+ <xs:element name="hormone" type="xs:string"/>
+ <xs:element name="concentration" type="xs:string"/>
+ <xs:element name="wounded" type="xs:boolean"/>
+
+ <!-- Living plant material information -->
+ <xs:element name="plantType" type="xs:string"/>
+
+ <xs:element name="propActivityGroupList" type="propActivityGroupList"/>
+
+ <!-- Success of the propagation -->
+ <xs:element name="germinationDate" type="xs:date"/>
+ <xs:element name="successRate" type="xs:string"/>
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:complexType name="scarStratGroupList">
+ <xs:sequence>
+ <xs:element name="scarStratGroup" type="scarStratGroup" minOccurs="0" maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+
+ <xs:complexType name="scarStratGroup">
+ <xs:sequence>
+ <xs:element name="scarStrat" type="xs:string"/>
+ <xs:element name="duration" type="xs:integer"/>
+ <xs:element name="durationUnit" type="xs:string"/>
+ </xs:sequence>
+ </xs:complexType>
+
+ <xs:complexType name="propActivityGroupList">
+ <xs:sequence>
+ <xs:element name="propActivityGroup" type="propActivityGroup" minOccurs="0"
+ maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+
+ <xs:complexType name="propActivityGroup">
+ <xs:sequence>
+ <xs:element name="order" type="xs:integer"/>
+ <xs:element name="activityDate" type="structuredDateGroup"/>
+ <xs:element name="activityType" type="xs:string"/>
+ <xs:element name="propCount" type="xs:integer"/>
+ <xs:element name="medium" type="xs:string"/>
+ <xs:element name="potSize" type="xs:string"/>
+ <xs:element name="nurseryLocation" type="xs:string"/>
+ <xs:element name="conditions" type="xs:string"/>
+ <xs:element name="chemicalApplied" type="xs:string"/>
+ <xs:element name="activityComments" type="xs:string"/>
+ </xs:sequence>
+ </xs:complexType>
+
+ <xs:complexType name="structuredDateGroup">
+ <xs:sequence>
+ <xs:element name="dateDisplayDate" type="xs:string"/>
+ <xs:element name="dateAssociation" type="xs:string"/>
+ <xs:element name="dateEarliestSingleYear" type="xs:integer"/>
+ <xs:element name="dateEarliestSingleMonth" type="xs:integer"/>
+ <xs:element name="dateEarliestSingleDay" type="xs:integer"/>
+ <xs:element name="dateEarliestSingleEra" type="xs:string"/>
+ <xs:element name="dateEarliestSingleCertainty" type="xs:string"/>
+ <xs:element name="dateEarliestSingleQualifier" type="xs:string"/>
+ <xs:element name="dateEarliestSingleQualifierValue" type="xs:integer"/>
+ <xs:element name="dateEarliestSingleQualifierUnit" type="xs:string"/>
+ <xs:element name="dateLatestYear" type="xs:integer"/>
+ <xs:element name="dateLatestMonth" type="xs:integer"/>
+ <xs:element name="dateLatestDay" type="xs:integer"/>
+ <xs:element name="dateLatestEra" type="xs:string"/>
+ <xs:element name="dateLatestCertainty" type="xs:string"/>
+ <xs:element name="dateLatestQualifier" type="xs:string"/>
+ <xs:element name="dateLatestQualifierValue" type="xs:integer"/>
+ <xs:element name="dateLatestQualifierUnit" type="xs:string"/>
+ <xs:element name="datePeriod" type="xs:string"/>
+ <xs:element name="dateNote" type="xs:string"/>
+ <xs:element name="dateEarliestScalarValue" type="xs:date"/>
+ <xs:element name="dateLatestScalarValue" type="xs:date"/>
+ <xs:element name="scalarValuesComputed" type="xs:boolean"/>
+ </xs:sequence>
+ </xs:complexType>
+</xs:schema>
+
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- A comment. -->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.main</artifactId>
+ <version>4.5-SNAPSHOT</version>
+ </parent>
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.propagation</artifactId>
+ <name>services.propagation</name>
+ <packaging>pom</packaging>
+
+ <dependencies>
+ </dependencies>
+
+ <modules>
+ <module>jaxb</module>
+ <module>service</module>
+ <module>3rdparty</module>
+ <module>client</module>
+ </modules>
+
+</project>
+
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <parent>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.propagation</artifactId>
+ <version>4.5-SNAPSHOT</version>
+ </parent>
+
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>org.collectionspace.services.propagation.service</artifactId>
+ <name>services.propagation.service</name>
+ <packaging>jar</packaging>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.common</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.client</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.propagation.jaxb</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.propagation.client</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.collectionobject.jaxb</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <!-- External dependencies -->
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.testng</groupId>
+ <artifactId>testng</artifactId>
+ </dependency>
+
+ <!-- javax -->
+
+ <dependency>
+ <groupId>javax.security</groupId>
+ <artifactId>jaas</artifactId>
+ <version>1.0.01</version>
+ <scope>provided</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>dom4j</groupId>
+ <artifactId>dom4j</artifactId>
+ <scope>provided</scope>
+ </dependency>
+
+ <!-- jboss -->
+
+ <dependency>
+ <groupId>org.jboss.resteasy</groupId>
+ <artifactId>resteasy-jaxrs</artifactId>
+ <exclusions>
+ <exclusion>
+ <groupId>tjws</groupId>
+ <artifactId>webserver</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.resteasy</groupId>
+ <artifactId>resteasy-jaxb-provider</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.jboss.resteasy</groupId>
+ <artifactId>resteasy-multipart-provider</artifactId>
+ </dependency>
+
+ <!-- nuxeo -->
+
+ <dependency>
+ <groupId>org.nuxeo.ecm.core</groupId>
+ <artifactId>nuxeo-core-api</artifactId>
+ <exclusions>
+ <exclusion>
+ <artifactId>jboss-remoting</artifactId>
+ <groupId>jboss</groupId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
+ </dependencies>
+
+ <build>
+ <finalName>collectionspace-services-propagation</finalName>
+ </build>
+</project>
+
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<profilesXml xmlns="http://maven.apache.org/PROFILES/1.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/PROFILES/1.0.0 http://maven.apache.org/xsd/profiles-1.0.0.xsd">
+</profilesXml>
\ No newline at end of file
--- /dev/null
+/**
+ * This document is a part of the source code and related artifacts
+ * for CollectionSpace, an open source collections management system
+ * for museums and related institutions:
+
+ * http://www.collectionspace.org
+ * http://wiki.collectionspace.org
+
+ * Copyright 2009 University of California at Berkeley
+
+ * Licensed under the Educational Community License (ECL), Version 2.0.
+ * You may not use this file except in compliance with this License.
+
+ * You may obtain a copy of the ECL 2.0 License at
+
+ * https://source.collectionspace.org/collection-space/LICENSE.txt
+
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.collectionspace.services.propagation;
+
+import org.collectionspace.services.client.PropagationClient;
+import org.collectionspace.services.common.NuxeoBasedResource;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.ws.rs.Consumes;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+
+@Path(PropagationClient.SERVICE_PATH)
+@Consumes("application/xml")
+@Produces("application/xml")
+public class PropagationResource extends NuxeoBasedResource {
+
+ final Logger logger = LoggerFactory.getLogger(PropagationResource.class);
+
+ @Override
+ protected String getVersionString() {
+ final String lastChangeRevision = "$LastChangedRevision$";
+ return lastChangeRevision;
+ }
+
+ @Override
+ public String getServiceName() {
+ return PropagationClient.SERVICE_NAME;
+ }
+
+ @Override
+ public Class<PropagationsCommon> getCommonPartClass() {
+ return PropagationsCommon.class;
+ }
+}
+
+
+
+
+
+
--- /dev/null
+/**
+ * This document is a part of the source code and related artifacts
+ * for CollectionSpace, an open source collections management system
+ * for museums and related institutions:
+
+ * http://www.collectionspace.org
+ * http://wiki.collectionspace.org
+
+ * Copyright 2009 University of California at Berkeley
+
+ * Licensed under the Educational Community License (ECL), Version 2.0.
+ * You may not use this file except in compliance with this License.
+
+ * You may obtain a copy of the ECL 2.0 License at
+
+ * https://source.collectionspace.org/collection-space/LICENSE.txt
+
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.collectionspace.services.propagation.nuxeo;
+
+/**
+ * PropagationConstants specifies constants for the Loans In service
+ *
+ */
+public class PropagationConstants {
+
+ public final static String NUXEO_DOCTYPE = "Propagation";
+ public final static String NUXEO_SCHEMA_NAME = "propagation";
+ public final static String NUXEO_DC_TITLE = "CollectionSpace-Propagation";
+}
--- /dev/null
+/**
+ * This document is a part of the source code and related artifacts
+ * for CollectionSpace, an open source collections management system
+ * for museums and related institutions:
+
+ * http://www.collectionspace.org
+ * http://wiki.collectionspace.org
+
+ * Copyright 2009 University of California at Berkeley
+
+ * Licensed under the Educational Community License (ECL), Version 2.0.
+ * You may not use this file except in compliance with this License.
+
+ * You may obtain a copy of the ECL 2.0 License at
+
+ * https://source.collectionspace.org/collection-space/LICENSE.txt
+
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.collectionspace.services.propagation.nuxeo;
+
+import org.collectionspace.services.propagation.PropagationsCommon;
+import org.collectionspace.services.nuxeo.client.java.NuxeoDocumentModelHandler;
+
+/** PropagationDocumentModelHandler
+ * $LastChangedRevision$
+ * $LastChangedDate$
+ */
+public class PropagationDocumentModelHandler
+ extends NuxeoDocumentModelHandler<PropagationsCommon> {
+}
+
--- /dev/null
+package org.collectionspace.services.propagation.nuxeo;
+
+import org.collectionspace.services.common.context.ServiceContext;
+import org.collectionspace.services.common.document.InvalidDocumentException;
+import org.collectionspace.services.common.document.ValidatorHandler;
+import org.collectionspace.services.common.document.DocumentHandler.Action;
+
+public class PropagationValidatorHandler implements ValidatorHandler {
+
+ @Override
+ public void validate(Action action, ServiceContext ctx)
+ throws InvalidDocumentException {
+ // TODO Auto-generated method stub
+ System.out.println("PropagationValidatorHandler executed.");
+
+ }
+
+}
--- /dev/null
+package org.collectionspace.services.test;
+
+//import org.collectionspace.services.propagation.Propagation;
+//import org.collectionspace.services.propagation.PropagationList;
+
+/**
+ * Placeholder for server-side testing of Loan In service code.
+ *
+ * @version $Revision$
+ */
+public class PropagationServiceTest {
+ //empty
+}
--- /dev/null
+<?xml version="1.0" encoding="UTF-8" ?>
+<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
+<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
+
+ <appender name="console" class="org.apache.log4j.ConsoleAppender">
+ <param name="Target" value="System.out" />
+ <layout class="org.apache.log4j.TTCCLayout">
+ <param name="DateFormat" value="ISO8601" />
+ </layout>
+ </appender>
+
+
+ <appender name="unit-tests"
+ class="org.apache.log4j.RollingFileAppender">
+ <param name="File" value="./target/unit-tests.log" />
+ <param name="MaxFileSize" value="10240KB" />
+ <param name="MaxBackupIndex" value="6" />
+ <layout class="org.apache.log4j.TTCCLayout">
+ <param name="DateFormat" value="ISO8601" />
+ </layout>
+ </appender>
+
+ <logger name="org.apache.commons.httpclient" additivity="false">
+ <level value="warn" />
+ <appender-ref ref="console" />
+ <appender-ref ref="unit-tests" />
+ </logger>
+
+ <logger name="httpclient.wire" additivity="false">
+ <level value="info" />
+ <appender-ref ref="console" />
+ <appender-ref ref="unit-tests" />
+ </logger>
+
+ <root>
+ <priority value="debug" />
+ <appender-ref ref="console" />
+ <appender-ref ref="unit-tests" />
+ </root>
+
+</log4j:configuration>
+
+
+
+
*/
package org.collectionspace.services.report.nuxeo;
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
import java.io.InputStream;
+import java.nio.file.Files;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.HashMap;
// export report to pdf and build a response with the bytes
//JasperExportManager.exportReportToPdf(jasperprint);
- // Report will be to a byte output array.
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
-
JRExporter exporter = null;
// Strip extension from report filename.
String outputFilename = reportFileName;
// fill the report
JasperPrint jasperPrint = JasperFillManager.fillReport(fileStream, params,conn);
+ // Report will be to a temporary file.
+ File tempOutputFile = Files.createTempFile("report-", null).toFile();
+ FileOutputStream tempOutputStream = new FileOutputStream(tempOutputFile);
exporter.setParameter(JRExporterParameter.JASPER_PRINT, jasperPrint);
- exporter.setParameter(JRExporterParameter.OUTPUT_STREAM, baos);
+ exporter.setParameter(JRExporterParameter.OUTPUT_STREAM, tempOutputStream);
exporter.exportReport();
- result = new ByteArrayInputStream(baos.toByteArray());
-
+ tempOutputStream.close();
+
+ result = new FileInputStream(tempOutputFile);
return result;
} catch (SQLException sqle) {
// SQLExceptions can be chained. We have at least one exception, so
<artifactId>org.collectionspace.services.authorization-mgt.client</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.authentication.jaxb</artifactId>
+ <version>${project.version}</version>
+ </dependency>
<dependency>
<groupId>org.collectionspace.services</groupId>
<artifactId>org.collectionspace.services.collectionobject.client</artifactId>
http://java.sun.com/xml/ns/persistence/orm http://java.sun.com/xml/ns/persistence/orm_1_0.xsd" xmlns="http://java.sun.com/xml/ns/persistence" xmlns:orm="http://java.sun.com/xml/ns/persistence/orm" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<persistence-unit name="org.collectionspace.services.authentication">
<class>org.collectionspace.services.authentication.User</class>
- <class>org.collectionspace.services.authentication.Role</class>
- <class>org.collectionspace.services.authentication.UserRole</class>
+ <class>org.collectionspace.services.authorization.Role</class>
<properties>
<property name="hibernate.ejb.cfgfile" value="hibernate.cfg.xml"/>
--- /dev/null
+This project contains classes that model CollectionSpace structured dates, and a parser to create structured dates from a display date string.
+
+Maven 3 is required to build. ANTLR 4 is used for parser generation.
+
+The ANTLR 4 grammar is located in src/main/antlr4. During the build, source code is generated into target/generated-sources/antlr4. Be sure to add this as a source folder in your IDE, so that it will be able to find the generated classes.
+
+Parser tests are located in src/test/resources/test-dates.yaml. The comments in that file describe how to add tests.
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <parent>
+ <groupId>org.collectionspace.services</groupId>
+ <artifactId>org.collectionspace.services.main</artifactId>
+ <version>4.5-SNAPSHOT</version>
+ </parent>
+
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>org.collectionspace.services.structureddate</artifactId>
+ <name>services.structureddate</name>
+ <packaging>jar</packaging>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.testng</groupId>
+ <artifactId>testng</artifactId>
+ <scope>provided</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.yaml</groupId>
+ <artifactId>snakeyaml</artifactId>
+ <version>1.6</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>commons-beanutils</groupId>
+ <artifactId>commons-beanutils-core</artifactId>
+ <version>1.8.3</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ <version>2.6</version>
+ </dependency>
+ <dependency>
+ <groupId>joda-time</groupId>
+ <artifactId>joda-time</artifactId>
+ <version>2.3</version>
+ </dependency>
+ <dependency>
+ <groupId>org.antlr</groupId>
+ <artifactId>antlr4-runtime</artifactId>
+ <version>4.1</version>
+ </dependency>
+ <dependency>
+ <groupId>org.antlr</groupId>
+ <artifactId>antlr4-maven-plugin</artifactId>
+ <version>4.6</version>
+ </dependency>
+ <dependency>
+ <groupId>org.sonatype.m2e.extras</groupId>
+ <artifactId>org.sonatype.m2e.antlr</artifactId>
+ <version>0.13.0.20110622-1538</version>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.antlr</groupId>
+ <artifactId>antlr4-maven-plugin</artifactId>
+ <version>4.1</version>
+ <executions>
+ <execution>
+ <goals>
+ <goal>antlr4</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+
+ <pluginManagement>
+ <plugins>
+ <!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
+ <plugin>
+ <groupId>org.eclipse.m2e</groupId>
+ <artifactId>lifecycle-mapping</artifactId>
+ <version>1.0.0</version>
+ <configuration>
+ <lifecycleMappingMetadata>
+ <pluginExecutions>
+ <pluginExecution>
+ <pluginExecutionFilter>
+ <groupId>org.antlr</groupId>
+ <artifactId>
+ antlr4-maven-plugin
+ </artifactId>
+ <versionRange>
+ [4.1,)
+ </versionRange>
+ <goals>
+ <goal>antlr4</goal>
+ </goals>
+ </pluginExecutionFilter>
+ <action>
+ <configurator>
+ <id>org.sonatype.m2e.antlr.antlrConfigurator</id>
+ </configurator>
+ </action>
+ </pluginExecution>
+ </pluginExecutions>
+ </lifecycleMappingMetadata>
+ </configuration>
+ </plugin>
+ </plugins>
+ </pluginManagement>
+ </build>
+</project>
--- /dev/null
+grammar StructuredDate;
+
+/*
+ * This is a grammar for ANTLR 4 (http://www.antlr.org/).
+ *
+ */
+
+/*
+ * Parser rules
+ */
+
+oneDisplayDate: displayDate ( DOT | QUESTION )? EOF ;
+
+displayDate: uncertainDate
+| certainDate
+/* TODO: Need to decide what "before" and "after" actually mean
+| beforeOrAfterDate
+*/
+;
+
+uncertainDate: CIRCA certainDate ;
+
+certainDate: hyphenatedRange
+| singleInterval
+;
+
+beforeOrAfterDate: ( BEFORE | AFTER ) singleInterval ;
+
+hyphenatedRange: singleInterval ( HYPHEN | DASH ) singleInterval
+| nthCenturyRange
+| monthInYearRange
+| quarterInYearRange
+| strDayInMonthRange
+| numDayInMonthRange
+;
+
+singleInterval: yearSpanningWinter
+| partialYear
+| quarterYear
+| halfYear
+| millennium
+| partialCentury
+| quarterCentury
+| halfCentury
+| century
+| partialDecade
+| decade
+| year
+| month
+| date
+;
+
+quarterInYearRange: nthQuarterInYearRange
+| strSeasonInYearRange
+;
+
+date: numDate
+| strDate
+| invStrDate
+;
+
+month: monthYear
+| invMonthYear
+;
+
+yearSpanningWinter: WINTER COMMA? numYear SLASH numYear era ;
+
+partialYear: partOf numYear era ;
+
+quarterYear: seasonYear
+| invSeasonYear
+| nthQuarterYear
+;
+
+halfYear: nthHalf numYear era ;
+
+year: numYear era ;
+
+partialDecade: partOf numDecade era ;
+
+decade: numDecade era ;
+
+partialCentury: partOf ( strCentury | numCentury ) era ;
+
+quarterCentury: nthQuarter ( strCentury | numCentury ) era ;
+
+halfCentury: nthHalf ( strCentury | numCentury ) era ;
+
+century: ( strCentury | numCentury ) era ;
+
+millennium: nth MILLENNIUM era ;
+
+strDate: strMonth ( numDayOfMonth | nth ) COMMA? numYear era;
+invStrDate: era numYear COMMA? strMonth numDayOfMonth ;
+strDayInMonthRange: strMonth numDayOfMonth ( HYPHEN | DASH ) numDayOfMonth COMMA? numYear era ;
+monthInYearRange: strMonth ( HYPHEN | DASH ) strMonth COMMA? numYear era ;
+nthQuarterInYearRange: nthQuarter ( HYPHEN | DASH ) nthQuarter COMMA? numYear era ;
+strSeasonInYearRange: strSeason ( HYPHEN | DASH ) strSeason COMMA? numYear era ;
+numDayInMonthRange: numMonth SLASH numDayOfMonth ( HYPHEN | DASH ) numDayOfMonth SLASH numYear era ;
+numDate: num SLASH num SLASH num era
+| num HYPHEN num HYPHEN num era ;
+monthYear: strMonth COMMA? numYear era ;
+invMonthYear: era numYear COMMA? strMonth ;
+seasonYear: strSeason COMMA? numYear era ;
+invSeasonYear: era numYear COMMA? strSeason ;
+nthQuarterYear: nthQuarter numYear era ;
+nthQuarter: ( nth | LAST ) QUARTER ;
+nthHalf: ( nth | LAST ) HALF ;
+numDecade: TENS ;
+strCentury: nth CENTURY ;
+numCentury: HUNDREDS ;
+nthCenturyRange: allOrPartOf nth ( HYPHEN | DASH ) allOrPartOf nth CENTURY era ;
+strSeason: SPRING | SUMMER | FALL | WINTER ;
+allOrPartOf: partOf | ;
+partOf: EARLY | MIDDLE | LATE ;
+nth: NTHSTR | FIRST | SECOND | THIRD | FOURTH ;
+strMonth: MONTH | SHORTMONTH DOT? ;
+era: BC | AD | ;
+numYear: NUMBER ;
+numMonth: NUMBER ;
+numDayOfMonth: NUMBER ;
+num: NUMBER ;
+
+
+/*
+ * Lexer rules
+ */
+
+WS: [ \t\r\n]+ -> skip;
+CIRCA: ('c' | 'ca') DOT? | 'circa' ;
+SPRING: 'spring' | 'spr' ;
+SUMMER: 'summer' | 'sum' ;
+WINTER: 'winter' | 'win' ;
+FALL: 'fall' | 'autumn' | 'fal' | 'aut' ;
+EARLY: 'early' ;
+MIDDLE: 'middle' | 'mid' ( HYPHEN | DOT )?;
+LATE: 'late' ;
+BEFORE: 'before' | 'pre' HYPHEN? ;
+AFTER: 'after' | 'post' HYPHEN? ;
+FIRST: 'first' ;
+SECOND: 'second' ;
+THIRD: 'third' ;
+FOURTH: 'fourth' ;
+LAST: 'last' ;
+QUARTER: 'quarter' ;
+HALF: 'half' ;
+CENTURY: 'century' ;
+MILLENNIUM: 'millennium' ;
+MONTH: 'january' | 'february' | 'march' | 'april' | 'may' | 'june' | 'july' | 'august' | 'september' | 'october' | 'november' | 'december' ;
+SHORTMONTH: 'jan' | 'feb' | 'mar' | 'apr' | 'jun' | 'jul' | 'aug' | 'sep' | 'sept' | 'oct' | 'nov' | 'dec' ;
+BC: 'bc' | 'bce' | 'b.c.' | 'b.c.e.' ;
+AD: 'ad' | 'a.d.' | 'ce' | 'c.e.';
+NTHSTR: [0-9]*? ([0456789] 'th' | '1st' | '2nd' | '3rd' | '11th' | '12th' | '13th') ;
+HUNDREDS: [0-9]*? '00' '\''? 's';
+TENS: [0-9]*? '0' '\''? 's';
+NUMBER: [0-9]+ ;
+COMMA: ',' ;
+HYPHEN: '-' ;
+DASH: [—–] ; /* EM DASH, EN DASH */
+SLASH: '/' ;
+DOT: '.' ;
+QUESTION: '?' ;
+STRING: [a-z]+ ;
+OTHER: . ;
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+public enum Certainty {
+ AFTER ("urn:cspace:botgarden.cspace.berkeley.edu:vocabularies:name(datecertainty):item:name(after)'After'"),
+ APPROXIMATELY ("urn:cspace:botgarden.cspace.berkeley.edu:vocabularies:name(datecertainty):item:name(approximate)'Approximate'"),
+ BEFORE ("urn:cspace:botgarden.cspace.berkeley.edu:vocabularies:name(datecertainty):item:name(before)'Before'"),
+ CIRCA ("urn:cspace:botgarden.cspace.berkeley.edu:vocabularies:name(datecertainty):item:name(circa)'Circa'"),
+ POSSIBLY ("urn:cspace:botgarden.cspace.berkeley.edu:vocabularies:name(datecertainty):item:name(possibly)'Possibly'"),
+ PROBABLY ("urn:cspace:botgarden.cspace.berkeley.edu:vocabularies:name(datecertainty):item:name(probably)'Probably'");
+
+ private final String value;
+
+ private Certainty(String value) {
+ this.value = value;
+ }
+
+ public String toString() {
+ return value;
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+import org.apache.commons.lang.builder.EqualsBuilder;
+
+public class Date {
+ public static final Era DEFAULT_ERA = Era.CE;
+
+ private Integer year;
+ private Integer month;
+ private Integer day;
+ private Era era;
+ private Certainty certainty;
+ private QualifierType qualifierType;
+ private Integer qualifierValue;
+ private QualifierUnit qualifierUnit;
+
+ public Date() {
+ this(null, null, null, null, null, null, null, null);
+ }
+
+ public Date(Integer year, Integer month, Integer day) {
+ this(year, month, day, null, null, null, null, null);
+ }
+
+ public Date(Integer year, Integer month, Integer day, Era era) {
+ this(year, month, day, era, null, null, null, null);
+ }
+
+ public Date(Integer year, Integer month, Integer day, Era era, Certainty certainty, QualifierType qualifierType, Integer qualifierValue, QualifierUnit qualifierUnit) {
+ this.year = year;
+ this.month = month;
+ this.day = day;
+ this.era = era;
+ this.certainty = certainty;
+ this.qualifierType = qualifierType;
+ this.qualifierValue = qualifierValue;
+ this.qualifierUnit = qualifierUnit;
+ }
+
+ public Date withEra(Era era) {
+ setEra(era);
+
+ return this;
+ }
+
+ public Date withQualifier(QualifierType qualifierType, Integer qualifierValue, QualifierUnit qualifierUnit) {
+ this.setQualifier(qualifierType, qualifierValue, qualifierUnit);
+
+ return this;
+ }
+
+ public String toString() {
+ return
+ "\t\tyear: " + getYear() + "\n" +
+ "\t\tmonth: " + getMonth() + "\n" +
+ "\t\tday: " + getDay() + "\n" +
+ "\t\tera: " + getEra() + "\n" +
+ "\t\tcertainty: " + getCertainty() + "\n" +
+ "\t\tqualifierType: " + getQualifierType() + "\n" +
+ "\t\tqualifierValue: " + getQualifierValue() + "\n" +
+ "\t\tqualifierUnit: " + getQualifierUnit() + "\n";
+ }
+
+ public Date copy() {
+ Date newDate = new Date();
+
+ newDate.setYear(getYear());
+ newDate.setMonth(getMonth());
+ newDate.setDay(getDay());
+ newDate.setEra(getEra());
+ newDate.setCertainty(getCertainty());
+ newDate.setQualifierType(getQualifierType());
+ newDate.setQualifierValue(getQualifierValue());
+ newDate.setQualifierUnit(getQualifierUnit());
+
+ return newDate;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null) {
+ return false;
+ }
+
+ if (obj == this) {
+ return true;
+ }
+
+ // Consider all subclasses of Date to be equal to each other, as long
+ // as the Date fields are equal.
+
+ if (!Date.class.isAssignableFrom(obj.getClass())) {
+ return false;
+ }
+
+ Date that = (Date) obj;
+
+ return
+ new EqualsBuilder()
+ .append(this.getYear(), that.getYear())
+ .append(this.getMonth(), that.getMonth())
+ .append(this.getDay(), that.getDay())
+ .append(this.getEra(), that.getEra())
+ .append(this.getCertainty(), that.getCertainty())
+ .append(this.getQualifierType(), that.getQualifierType())
+ .append(this.getQualifierValue(), that.getQualifierValue())
+ .append(this.getQualifierUnit(), that.getQualifierUnit())
+ .isEquals();
+ }
+
+ public Integer getYear() {
+ return year;
+ }
+
+ public void setYear(Integer year) {
+ this.year = year;
+ }
+
+ public Integer getMonth() {
+ return month;
+ }
+
+ public void setMonth(Integer month) {
+ this.month = month;
+ }
+
+ public Integer getDay() {
+ return day;
+ }
+
+ public void setDay(Integer day) {
+ this.day = day;
+ }
+
+ public Era getEra() {
+ return era;
+ }
+
+ public void setEra(Era era) {
+ this.era = era;
+ }
+
+ public Certainty getCertainty() {
+ return certainty;
+ }
+
+ public void setCertainty(Certainty certainty) {
+ this.certainty = certainty;
+ }
+
+ public QualifierType getQualifierType() {
+ return qualifierType;
+ }
+
+ public void setQualifierType(QualifierType qualifierType) {
+ this.qualifierType = qualifierType;
+ }
+
+ public Integer getQualifierValue() {
+ return qualifierValue;
+ }
+
+ public void setQualifierValue(Integer qualifierValue) {
+ this.qualifierValue = qualifierValue;
+ }
+
+ public QualifierUnit getQualifierUnit() {
+ return qualifierUnit;
+ }
+
+ public void setQualifierUnit(QualifierUnit qualifierUnit) {
+ this.qualifierUnit = qualifierUnit;
+ }
+
+ public void setQualifier(QualifierType qualifierType, Integer qualifierValue, QualifierUnit qualifierUnit) {
+ this.qualifierType = qualifierType;
+ this.qualifierValue = qualifierValue;
+ this.qualifierUnit = qualifierUnit;
+ }
+}
+
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+import org.joda.time.Chronology;
+import org.joda.time.DateTime;
+import org.joda.time.DateTimeConstants;
+import org.joda.time.Days;
+import org.joda.time.IllegalFieldValueException;
+import org.joda.time.MutableDateTime;
+import org.joda.time.Years;
+import org.joda.time.chrono.GJChronology;
+import org.joda.time.format.DateTimeFormat;
+import org.joda.time.format.DateTimeFormatter;
+
+public class DateUtils {
+ private static final DateTimeFormatter monthFormatter = DateTimeFormat.forPattern("MMMM");
+ private static final DateTimeFormatter scalarDateFormatter = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'");
+
+ // The chronology to use for date calculations, which are done using the joda-time library.
+ // See http://www.joda.org/joda-time/apidocs/org/joda/time/Chronology.html for descriptions of
+ // the chronologies supported by joda-time.
+
+ // GJChronology (http://www.joda.org/joda-time/apidocs/org/joda/time/chrono/GJChronology.html)
+ // seems best for representing a mix of modern and historical dates, as might be seen by an
+ // anthropology museum.
+
+ private static final Chronology chronology = GJChronology.getInstance();
+
+ // Define the DateTime that serves as the basis for circa calculations, using the algorithm
+ // ported from the XDB date parser. Its comment states:
+ //
+ // We define circa year/century specifications offsets
+ // as +/- 5% of the difference between that year/century
+ // and the present (2100), so that the farther we go back
+ // in time, the wider the range of meaning of "circa."
+
+ private static final DateTime circaBaseDateTime = new DateTime(2100, 12, 31, 0, 0, 0, 0, chronology);
+
+ /**
+ * Gets the number (1-12) of a month for a given name.
+ *
+ * @param monthName The name of the month
+ * @return The number of the month, between 1 and 12
+ */
+ public static int getMonthByName(String monthName) {
+ // Normalize "sept" to "sep", since DateTimeFormat doesn't
+ // understand the former.
+
+ if (monthName.equals("sept")) {
+ monthName = "sep";
+ }
+
+ return monthFormatter.parseDateTime(monthName).getMonthOfYear();
+ }
+
+ /**
+ * Gets the number of days in a given month.
+ *
+ * @param month The month number, between 1 and 12
+ * @param year The year (in order to account for leap years)
+ * @return The number of days in the month
+ */
+ public static int getDaysInMonth(int month, int year, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ DateTime dateTime = new DateTime(chronology)
+ .withEra((era == Era.BCE) ? DateTimeConstants.BC : DateTimeConstants.AD)
+ .withYearOfEra(year)
+ .withMonthOfYear(month);
+
+ return dateTime.dayOfMonth().getMaximumValue();
+ }
+
+ /**
+ * Gets the Date representing the first day of a given quarter year.
+ *
+ * @param year The year
+ * @param quarter The quarter, between 1 and 4
+ * @return The first day of the quarter year
+ */
+ public static Date getQuarterYearStartDate(int quarter, int year) {
+ int startMonth = getQuarterYearStartMonth(quarter);
+
+ return new Date(year, startMonth, 1);
+ }
+
+ /**
+ * Gets the Date representing the last day of a given quarter year.
+ *
+ * @param year The year
+ * @param quarter The quarter, between 1 and 4
+ * @return The last day of the quarter year
+ */
+ public static Date getQuarterYearEndDate(int quarter, int year, Era era) {
+ int endMonth = getQuarterYearEndMonth(quarter);
+
+ return new Date(year, endMonth, DateUtils.getDaysInMonth(endMonth, year, era));
+ }
+
+ /**
+ * Gets the first month of a given quarter in a year.
+ *
+ * @param quarter The quarter, between 1 and 4
+ * @return The number of the first month in the quarter
+ */
+ public static int getQuarterYearStartMonth(int quarter) {
+ return ((3 * (quarter-1)) + 1);
+ }
+
+ /**
+ * Gets the last month of a given quarter in a year.
+ *
+ * @param quarter The quarter, between 1 and 4
+ * @return The number of the last month in the quarter
+ */
+ public static int getQuarterYearEndMonth(int quarter) {
+ return (getQuarterYearStartMonth(quarter) + 2);
+ }
+
+ /**
+ * Gets the Date representing the first day of a given half year.
+ *
+ * @param year The year
+ * @param half The half, between 1 and 2
+ * @return The first day of the half year
+ */
+ public static Date getHalfYearStartDate(int half, int year) {
+ int startMonth = getHalfYearStartMonth(half);
+
+ return new Date(year, startMonth, 1);
+ }
+
+
+ /**
+ * Gets the Date representing the last day of a given half year.
+ *
+ * @param year The year
+ * @param half The half, between 1 and 2
+ * @return The last day of the half year
+ */
+ public static Date getHalfYearEndDate(int half, int year, Era era) {
+ int endMonth = getHalfYearEndMonth(half);
+
+ return new Date(year, endMonth, DateUtils.getDaysInMonth(endMonth, year, era));
+ }
+
+ /**
+ * Gets the first month of a given half in a year.
+ *
+ * @param half The half, between 1 and 2
+ * @return The number of the first month in the half
+ */
+ public static int getHalfYearStartMonth(int half) {
+ return ((6 * (half-1)) + 1);
+ }
+
+ /**
+ * Gets the last month of a given half in a year.
+ *
+ * @param half The half, between 1 and 2
+ * @return The number of the last month in the half
+ */
+ public static int getHalfYearEndMonth(int half) {
+ return (getHalfYearStartMonth(half) + 5);
+ }
+
+ /**
+ * Gets the Date representing the first day of a given partial year.
+ *
+ * @param year The year
+ * @param part The part
+ * @return The first day of the partial year
+ */
+ public static Date getPartialYearStartDate(Part part, int year) {
+ int startMonth = getPartialYearStartMonth(part);
+
+ return new Date(year, startMonth, 1);
+ }
+
+ /**
+ * Gets the Date representing the last day of a given partial year.
+ *
+ * @param year The year
+ * @param part The part
+ * @return The last day of the partial year
+ */
+ public static Date getPartialYearEndDate(Part part, int year, Era era) {
+ int endMonth = getPartialYearEndMonth(part);
+
+ return new Date(year, endMonth, DateUtils.getDaysInMonth(endMonth, year, era));
+ }
+
+ /**
+ * Gets the first month of a given part of a year.
+ *
+ * @param part The part
+ * @return The number of the first month in the part
+ */
+ public static int getPartialYearStartMonth(Part part) {
+ int month;
+
+ if (part == Part.EARLY) {
+ month = 1;
+ }
+ else if (part == Part.MIDDLE) {
+ month = 5;
+ }
+ else if (part == Part.LATE) {
+ month = 9;
+ }
+ else {
+ throw new IllegalArgumentException("unexpected part");
+ }
+
+ return month;
+ }
+
+ /**
+ * Gets the last month of a given part of a year.
+ *
+ * @param part The part
+ * @return The number of the last month in the part
+ */
+ public static int getPartialYearEndMonth(Part part) {
+ int month;
+
+ if (part == Part.EARLY) {
+ month = 4;
+ }
+ else if (part == Part.MIDDLE) {
+ month = 8;
+ }
+ else if (part == Part.LATE) {
+ month = 12;
+ }
+ else {
+ throw new IllegalArgumentException("unexpected part");
+ }
+
+ return month;
+ }
+
+ /**
+ * Gets the Date representing the first day of a given partial decade.
+ *
+ * @param decade The decade, specified as a number ending in 0.
+ * For decades A.D., this is the first year of the decade. For
+ * decades B.C., this is the last year of the decade.
+ * @param part The part
+ * @param era The era of the decade. If null, Date.DEFAULT_ERA is assumed.
+ * @return The first day of the partial decade
+ */
+ public static Date getPartialDecadeStartDate(int decade, Part part, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int startYear = getPartialDecadeStartYear(decade, part, era);
+
+ return new Date(startYear, 1, 1, era);
+ }
+
+ /**
+ * Gets the Date representing the last day of a given partial decade.
+ *
+ * @param decade The decade, specified as a number ending in 0.
+ * For decades A.D., this is the first year of the decade. For
+ * decades B.C., this is the last year of the decade.
+ * @param part The part
+ * @param era The era of the decade. If null, Date.DEFAULT_ERA is assumed.
+ * @return The last day of the partial decade
+ */
+ public static Date getPartialDecadeEndDate(int decade, Part part, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int endYear = getPartialDecadeEndYear(decade, part, era);
+
+ return new Date(endYear, 12, 31, era);
+ }
+
+ /**
+ * Gets the first year of a given part of a decade.
+ *
+ * @param decade The decade, specified as a number ending in 0.
+ * For decades A.D., this is the first year of the decade. For
+ * decades B.C., this is the last year of the decade.
+ * @param part The part
+ * @param era The era of the decade. If null, Date.DEFAULT_ERA is assumed.
+ * @return The first year in the part
+ */
+ public static int getPartialDecadeStartYear(int decade, Part part, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int startYear;
+
+ if (era == Era.BCE) {
+ if (part == Part.EARLY) {
+ startYear = decade + 9;
+ }
+ else if (part == Part.MIDDLE) {
+ startYear = decade + 6;
+ }
+ else if (part == Part.LATE) {
+ startYear = decade + 3;
+ }
+ else {
+ throw new IllegalArgumentException("unexpected part");
+ }
+ }
+ else {
+ if (part == Part.EARLY) {
+ startYear = decade;
+ }
+ else if (part == Part.MIDDLE) {
+ startYear = decade + 4;
+ }
+ else if (part == Part.LATE) {
+ startYear = decade + 7;
+ }
+ else {
+ throw new IllegalArgumentException("unexpected part");
+ }
+ }
+
+ return startYear;
+ }
+
+ /**
+ * Gets the last year of a given part of a decade.
+ *
+ * @param decade The decade, specified as a number ending in 0.
+ * For decades A.D., this is the first year of the decade. For
+ * decades B.C., this is the last year of the decade.
+ * @param part The part
+ * @param era The era of the decade. If null, Date.DEFAULT_ERA is assumed.
+ * @return The last year in the part
+ */
+ public static int getPartialDecadeEndYear(int decade, Part part, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int endYear;
+
+ if (era == Era.BCE) {
+ if (part == Part.EARLY) {
+ endYear = decade + 7;
+ }
+ else if (part == Part.MIDDLE) {
+ endYear = decade + 4;
+ }
+ else if (part == Part.LATE) {
+ endYear = decade;
+ }
+ else {
+ throw new IllegalArgumentException("unexpected part");
+ }
+ }
+ else {
+ if (part == Part.EARLY) {
+ endYear = decade + 3;
+ }
+ else if (part == Part.MIDDLE) {
+ endYear = decade + 6;
+ }
+ else if (part == Part.LATE) {
+ endYear = decade + 9;
+ }
+ else {
+ throw new IllegalArgumentException("unexpected part");
+ }
+ }
+
+ return endYear;
+ }
+
+ /**
+ * Gets the Date representing the first day of a given decade.
+ *
+ * @param decade The decade, specified as a number ending in 0.
+ * For decades A.D., this is the first year of the decade. For
+ * decades B.C., this is the last year of the decade.
+ * @param era The era of the decade. If null, Date.DEFAULT_ERA is assumed.
+ * @return The first day of the decade
+ */
+ public static Date getDecadeStartDate(int decade, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int startYear = getDecadeStartYear(decade, era);
+
+ return new Date(startYear, 1, 1, era);
+ }
+
+ /**
+ * Gets the Date representing the last day of a given decade.
+ *
+ * @param decade The decade, specified as a number ending in 0.
+ * For decades A.D., this is the first year of the decade. For
+ * decades B.C., this is the last year of the decade.
+ * @param era The era of the decade. If null, Date.DEFAULT_ERA is assumed.
+ * @return The last day of the decade
+ */
+ public static Date getDecadeEndDate(int decade, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int endYear = getDecadeEndYear(decade, era);
+
+ return new Date(endYear, 12, 31, era);
+ }
+
+ /**
+ * Gets the first year of a given decade.
+ *
+ * @param decade The decade, specified as a number ending in 0.
+ * For decades A.D., this is the first year of the decade. For
+ * decades B.C., this is the last year of the decade.
+ * @param era The era of the decade. If null, Date.DEFAULT_ERA is assumed.
+ * @return The first year of the decade
+ */
+ public static int getDecadeStartYear(int decade, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int startYear;
+
+ if (era == Era.BCE) {
+ startYear = decade + 9;
+ }
+ else {
+ startYear = decade;
+ }
+
+ return startYear;
+ }
+
+ /**
+ * Gets the last year of a given decade.
+ *
+ * @param decade The decade, specified as a number ending in 0.
+ * For decades A.D., this is the first year of the decade. For
+ * decades B.C., this is the last year of the decade.
+ * @param era The era of the decade. If null, Date.DEFAULT_ERA is assumed.
+ * @return The last year of the decade
+ */
+ public static int getDecadeEndYear(int decade, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int endYear;
+
+ if (era == Era.BCE) {
+ endYear = decade;
+ }
+ else {
+ endYear = decade + 9;
+ }
+
+ return endYear;
+ }
+
+ /**
+ * Gets the Date representing the first day of a given century.
+ *
+ * @param century The century, specified as a number ending in 00 or 01.
+ * For centuries A.D., this is the first year of the century. For
+ * centuries B.C., this is the last year of the century. For example,
+ * the "21st century" would be specified as 2001, whereas the "2000's"
+ * would be specified as 2000. The "2nd century B.C." would be specified
+ * as 101. The "100's B.C." would be specified as 100.
+ * @param era The era of the century. If null, Date.DEFAULT_ERA is assumed.
+ * @return The first day of the century
+ */
+ public static Date getCenturyStartDate(int century, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int startYear = getCenturyStartYear(century, era);
+
+ return new Date(startYear, 1, 1, era);
+ }
+
+ /**
+ * Gets the Date representing the last day of a given century.
+ *
+ * @param century The century, specified as a number ending in 00 or 01.
+ * For centuries A.D., this is the first year of the century. For
+ * centuries B.C., this is the last year of the century. For example,
+ * the "21st century" would be specified as 2001, whereas the "2000's"
+ * would be specified as 2000. The "2nd century B.C." would be specified
+ * as 101. The "100's B.C." would be specified as 100.
+ * @param era The era of the century. If null, Date.DEFAULT_ERA is assumed.
+ * @return The last day of the century
+ */
+ public static Date getCenturyEndDate(int century, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int endYear = getCenturyEndYear(century, era);
+
+ return new Date(endYear, 12, 31, era);
+ }
+
+ /**
+ * Gets the first year of a given century.
+ *
+ * @param century The century, specified as a number ending in 00 or 01.
+ * For centuries A.D., this is the first year of the century. For
+ * centuries B.C., this is the last year of the century. For example,
+ * the "21st century" would be specified as 2001, whereas the "2000's"
+ * would be specified as 2000. The "2nd century B.C." would be specified
+ * as 101. The "100's B.C." would be specified as 100.
+ * @param era The era of the century. If null, Date.DEFAULT_ERA is assumed.
+ * @return The first year of the century
+ */
+ public static int getCenturyStartYear(int century, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int startYear;
+
+ if (era == Era.BCE) {
+ startYear = century + 99;
+ }
+ else {
+ startYear = century;
+ }
+
+ return startYear;
+ }
+
+ /**
+ * Gets the last year of a given century.
+ *
+ * @param century The century, specified as a number ending in 00 or 01.
+ * For centuries A.D., this is the first year of the century. For
+ * centuries B.C., this is the last year of the century. For example,
+ * the "21st century" would be specified as 2001, whereas the "2000's"
+ * would be specified as 2000. The "2nd century B.C." would be specified
+ * as 101. The "100's B.C." would be specified as 100.
+ * @param era The era of the century. If null, Date.DEFAULT_ERA is assumed.
+ * @return The last year of the century
+ */
+ public static int getCenturyEndYear(int century, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int endYear;
+
+ if (era == Era.BCE) {
+ endYear = century;
+ }
+ else {
+ endYear = century + 99;
+ }
+
+ return endYear;
+ }
+
+ /**
+ * Gets the Date representing the first day of a given partial century.
+ *
+ * @param century The century, specified as a number ending in 00 or 01.
+ * For centuries A.D., this is the first year of the century. For
+ * centuries B.C., this is the last year of the century. For example,
+ * the "21st century" would be specified as 2001, whereas the "2000's"
+ * would be specified as 2000. The "2nd century B.C." would be specified
+ * as 101. The "100's B.C." would be specified as 100.
+ * @param part The part
+ * @param era The era of the century. If null, Date.DEFAULT_ERA is assumed.
+ * @return The first day of the partial century
+ */
+ public static Date getPartialCenturyStartDate(int century, Part part, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int startYear = getPartialCenturyStartYear(century, part, era);
+
+ return new Date(startYear, 1, 1, era);
+ }
+
+ /**
+ * Gets the Date representing the last day of a given partial century.
+ *
+ * @param century The century, specified as a number ending in 00 or 01.
+ * For centuries A.D., this is the first year of the century. For
+ * centuries B.C., this is the last year of the century. For example,
+ * the "21st century" would be specified as 2001, whereas the "2000's"
+ * would be specified as 2000. The "2nd century B.C." would be specified
+ * as 101. The "100's B.C." would be specified as 100.
+ * @param part The part
+ * @param era The era of the century. If null, Date.DEFAULT_ERA is assumed.
+ * @return The last day of the partial century
+ */
+ public static Date getPartialCenturyEndDate(int century, Part part, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int endYear = getPartialCenturyEndYear(century, part, era);
+
+ return new Date(endYear, 12, 31, era);
+ }
+
+ /**
+ * Gets the first year of a given partial century.
+ *
+ * @param century The century, specified as a number ending in 00 or 01.
+ * For centuries A.D., this is the first year of the century. For
+ * centuries B.C., this is the last year of the century. For example,
+ * the "21st century" would be specified as 2001, whereas the "2000's"
+ * would be specified as 2000. The "2nd century B.C." would be specified
+ * as 101. The "100's B.C." would be specified as 100.
+ * @param part The part
+ * @param era The era of the century. If null, Date.DEFAULT_ERA is assumed.
+ * @return The first year of the partial century
+ */
+ public static int getPartialCenturyStartYear(int century, Part part, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int startYear;
+
+ if (era == Era.BCE) {
+ if (part == Part.EARLY) {
+ startYear = century + 99;
+ }
+ else if (part == Part.MIDDLE) {
+ startYear = century + 66;
+ }
+ else if (part == Part.LATE) {
+ startYear = century + 33;
+ }
+ else {
+ throw new IllegalArgumentException("unexpected part");
+ }
+ }
+ else {
+ if (part == Part.EARLY) {
+ startYear = century;
+ }
+ else if (part == Part.MIDDLE) {
+ startYear = century + 33;
+ }
+ else if (part == Part.LATE) {
+ startYear = century + 66;
+ }
+ else {
+ throw new IllegalArgumentException("unexpected part");
+ }
+ }
+
+ return startYear;
+ }
+
+ /**
+ * Gets the last year of a given partial century.
+ *
+ * @param century The century, specified as a number ending in 00 or 01.
+ * For centuries A.D., this is the first year of the century. For
+ * centuries B.C., this is the last year of the century. For example,
+ * the "21st century" would be specified as 2001, whereas the "2000's"
+ * would be specified as 2000. The "2nd century B.C." would be specified
+ * as 101. The "100's B.C." would be specified as 100.
+ * @param part The part
+ * @param era The era of the century. If null, Date.DEFAULT_ERA is assumed.
+ * @return The last year of the partial century
+ */
+ public static int getPartialCenturyEndYear(int century, Part part, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int endYear;
+
+ if (era == Era.BCE) {
+ if (part == Part.EARLY) {
+ endYear = century + 66;
+ }
+ else if (part == Part.MIDDLE) {
+ endYear = century + 33;
+ }
+ else if (part == Part.LATE) {
+ endYear = century;
+ }
+ else {
+ throw new IllegalArgumentException("unexpected part");
+ }
+ }
+ else {
+ if (part == Part.EARLY) {
+ endYear = century + 33;
+ }
+ else if (part == Part.MIDDLE) {
+ endYear = century + 66;
+ }
+ else if (part == Part.LATE) {
+ endYear = century + 99;
+ }
+ else {
+ throw new IllegalArgumentException("unexpected part");
+ }
+ }
+
+ return endYear;
+ }
+
+ /**
+ * Gets the Date representing the first day of a given half century.
+ *
+ * @param century The century, specified as a number ending in 00 or 01.
+ * For centuries A.D., this is the first year of the century. For
+ * centuries B.C., this is the last year of the century. For example,
+ * the "21st century" would be specified as 2001, whereas the "2000's"
+ * would be specified as 2000. The "2nd century B.C." would be specified
+ * as 101. The "100's B.C." would be specified as 100.
+ * @param half The half
+ * @param era The era of the century. If null, Date.DEFAULT_ERA is assumed.
+ * @return The first day of the half century
+ */
+ public static Date getHalfCenturyStartDate(int century, int half, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int startYear = getHalfCenturyStartYear(century, half, era);
+
+ return new Date(startYear, 1, 1, era);
+ }
+
+ /**
+ * Gets the Date representing the last day of a given half century.
+ *
+ * @param century The century, specified as a number ending in 00 or 01.
+ * For centuries A.D., this is the first year of the century. For
+ * centuries B.C., this is the last year of the century. For example,
+ * the "21st century" would be specified as 2001, whereas the "2000's"
+ * would be specified as 2000. The "2nd century B.C." would be specified
+ * as 101. The "100's B.C." would be specified as 100.
+ * @param half The half
+ * @param era The era of the century. If null, Date.DEFAULT_ERA is assumed.
+ * @return The last day of the half century
+ */
+ public static Date getHalfCenturyEndDate(int century, int half, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int endYear = getHalfCenturyEndYear(century, half, era);
+
+ return new Date(endYear, 12, 31, era);
+ }
+
+ /**
+ * Gets the first year of a given half century.
+ *
+ * @param century The century, specified as a number ending in 00 or 01.
+ * For centuries A.D., this is the first year of the century. For
+ * centuries B.C., this is the last year of the century. For example,
+ * the "21st century" would be specified as 2001, whereas the "2000's"
+ * would be specified as 2000. The "2nd century B.C." would be specified
+ * as 101. The "100's B.C." would be specified as 100.
+ * @param half The half
+ * @param era The era of the century. If null, Date.DEFAULT_ERA is assumed.
+ * @return The first year of the half century
+ */
+ public static int getHalfCenturyStartYear(int century, int half, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int startYear;
+
+ if (era == Era.BCE) {
+ startYear = (century + 99) - (50 * (half - 1));
+ }
+ else {
+ startYear = century + (50 * (half - 1));
+ }
+
+ return startYear;
+ }
+
+ /**
+ * Gets the last year of a given half century.
+ *
+ * @param century The century, specified as a number ending in 00 or 01.
+ * For centuries A.D., this is the first year of the century. For
+ * centuries B.C., this is the last year of the century. For example,
+ * the "21st century" would be specified as 2001, whereas the "2000's"
+ * would be specified as 2000. The "2nd century B.C." would be specified
+ * as 101. The "100's B.C." would be specified as 100.
+ * @param half The half
+ * @param era The era of the century. If null, Date.DEFAULT_ERA is assumed.
+ * @return The last year of the half century
+ */
+ public static int getHalfCenturyEndYear(int century, int half, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int endYear;
+
+ if (era == Era.BCE) {
+ endYear = (century + 99) - (50 * half) + 1;
+ }
+ else {
+ endYear = century + (50 * half) - 1;
+ }
+
+ return endYear;
+ }
+
+ /**
+ * Gets the Date representing the first day of a given quarter century.
+ *
+ * @param century The century, specified as a number ending in 00 or 01.
+ * For centuries A.D., this is the first year of the century. For
+ * centuries B.C., this is the last year of the century. For example,
+ * the "21st century" would be specified as 2001, whereas the "2000's"
+ * would be specified as 2000. The "2nd century B.C." would be specified
+ * as 101. The "100's B.C." would be specified as 100.
+ * @param quarter The quarter
+ * @param era The era of the century. If null, Date.DEFAULT_ERA is assumed.
+ * @return The first day of the quarter century
+ */
+ public static Date getQuarterCenturyStartDate(int century, int quarter, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int startYear = getQuarterCenturyStartYear(century, quarter, era);
+
+ return new Date(startYear, 1, 1, era);
+ }
+
+ /**
+ * Gets the Date representing the last day of a given quarter century.
+ *
+ * @param century The century, specified as a number ending in 00 or 01.
+ * For centuries A.D., this is the first year of the century. For
+ * centuries B.C., this is the last year of the century. For example,
+ * the "21st century" would be specified as 2001, whereas the "2000's"
+ * would be specified as 2000. The "2nd century B.C." would be specified
+ * as 101. The "100's B.C." would be specified as 100.
+ * @param quarter The quarter
+ * @param era The era of the century. If null, Date.DEFAULT_ERA is assumed.
+ * @return The last day of the quarter century
+ */
+ public static Date getQuarterCenturyEndDate(int century, int quarter, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int endYear = getQuarterCenturyEndYear(century, quarter, era);
+
+ return new Date(endYear, 12, 31, era);
+ }
+
+ /**
+ * Gets the first year of a given quarter century.
+ *
+ * @param century The century, specified as a number ending in 00 or 01.
+ * For centuries A.D., this is the first year of the century. For
+ * centuries B.C., this is the last year of the century. For example,
+ * the "21st century" would be specified as 2001, whereas the "2000's"
+ * would be specified as 2000. The "2nd century B.C." would be specified
+ * as 101. The "100's B.C." would be specified as 100.
+ * @param quarter The quarter
+ * @param era The era of the century. If null, Date.DEFAULT_ERA is assumed.
+ * @return The first year of the quarter century
+ */
+ public static int getQuarterCenturyStartYear(int century, int quarter, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int startYear;
+
+ if (era == Era.BCE) {
+ startYear = (century + 99) - (25 * (quarter - 1));
+ }
+ else {
+ startYear = century + (25 * (quarter - 1));
+ }
+
+ return startYear;
+ }
+
+ /**
+ * Gets the last year of a given quarter century.
+ *
+ * @param century The century, specified as a number ending in 00 or 01.
+ * For centuries A.D., this is the first year of the century. For
+ * centuries B.C., this is the last year of the century. For example,
+ * the "21st century" would be specified as 2001, whereas the "2000's"
+ * would be specified as 2000. The "2nd century B.C." would be specified
+ * as 101. The "100's B.C." would be specified as 100.
+ * @param quarter The quarter
+ * @param era The era of the century. If null, Date.DEFAULT_ERA is assumed.
+ * @return The last year of the quarter century
+ */
+ public static int getQuarterCenturyEndYear(int century, int quarter, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int endYear;
+
+ if (era == Era.BCE) {
+ endYear = (century + 99) - (25 * quarter) + 1;
+ }
+ else {
+ endYear = century + (25 * quarter) - 1;
+ }
+
+ return endYear;
+ }
+
+ /**
+ * Converts an nth century number to a year. For example, to convert "21st century"
+ * to a year, call nthCenturyToYear(21), which returns 2001. For centuries A.D., the
+ * year returned is the first year of the nth century. For centuries B.C., the
+ * year returned is the last year of the nth century.
+ *
+ * @param n The nth century number
+ * @return The first year in the nth century, for centuries A.D.
+ * The last year of the nth century, for centuries B.C.
+ */
+ public static int nthCenturyToYear(int n) {
+ int year = (n-1) * 100 + 1;
+
+ return year;
+ }
+
+ /**
+ * Gets the Date representing the first day of a given millennium.
+ *
+ * @param n The nth millennium number
+ * @param era The era of the millennium. If null, Date.DEFAULT_ERA is assumed.
+ * @return The first day of the millennium
+ */
+ public static Date getMillenniumStartDate(int n, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int startYear = getMillenniumStartYear(n, era);
+
+ return new Date(startYear, 1, 1, era);
+ }
+
+ /**
+ * Gets the Date representing the last day of a given millennium.
+ *
+ * @param n The nth millennium number
+ * @param era The era of the millennium. If null, Date.DEFAULT_ERA is assumed.
+ * @return The last day of the millennium
+ */
+ public static Date getMillenniumEndDate(int n, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int endYear = getMillenniumEndYear(n, era);
+
+ return new Date(endYear, 12, 31, era);
+ }
+
+ /**
+ * Gets the first year of a given millennium.
+ *
+ * @param n The nth millennium number
+ * @param era The era of the millennium. If null, Date.DEFAULT_ERA is assumed.
+ * @return The first year of the millennium
+ */
+ public static int getMillenniumStartYear(int n, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int year;
+
+ if (era == Era.BCE) {
+ year = n * 1000;
+ }
+ else {
+ year = (n - 1) * 1000 + 1;
+ }
+
+ return year;
+ }
+
+ /**
+ * Gets the last year of a given millennium.
+ *
+ * @param n The nth millennium number
+ * @param era The era of the millennium. If null, Date.DEFAULT_ERA is assumed.
+ * @return The last year of the millennium
+ */
+ public static int getMillenniumEndYear(int n, Era era) {
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int year;
+
+ if (era == Era.BCE) {
+ year = (n - 1) * 1000 + 1;
+ }
+ else {
+ year = n * 1000;
+ }
+
+ return year;
+ }
+
+ /**
+ * Calculates the earliest date that may be considered to be "before"
+ * a given date.
+ *
+ * @param date The date
+ * @return The earliest date "before" the date
+ */
+ public static Date getEarliestBeforeDate(Date date) {
+ return getEarliestBeforeDate(date, null);
+ }
+
+ /**
+ * Calculates the latest date that may be considered to be "after"
+ * a given date.
+ *
+ * @param date The date
+ * @return The latest date "after" the date
+ */
+ public static Date getLatestAfterDate(Date date) {
+ return getLatestAfterDate(date, null);
+ }
+
+ /**
+ * Calculates the earliest date that may be considered to be "before"
+ * a given date range.
+ *
+ * @param startDate The first date in the range
+ * @param endDate The last date in the range
+ * @return The earliest date "before" the range
+ */
+ public static Date getEarliestBeforeDate(Date startDate, Date endDate) {
+ // TODO
+ return null;
+
+ /*
+ // This algorithm is inherited from the XDB fuzzydate parser,
+ // which considers "before" to mean "within a lifetime before".
+
+ if (endDate == null) {
+ endDate = startDate;
+ }
+
+ int difference = getYearsBetween(startDate, endDate);
+
+ Date earliestDate = startDate.copy();
+ subtractYears(earliestDate, 1);
+ earliestDate.setMonth(1);
+ earliestDate.setDay(1);
+
+ if (difference < 100) {
+ // The comment from the XDB fuzzydate parser states:
+ //
+ // Before/after years are really about birth/death dates
+ // so we use average life-span of 75 years
+
+ subtractYears(earliestDate, 75);
+ }
+ else {
+ // The comment from the XDB fuzzydate parser states:
+ //
+ // Before/after years are really about birth/death dates
+ // so we use average life-span of 75 years
+ // but since the spec was a century, e.g. pre 20th century
+ // we'll make the range a bit bigger
+ // sheesh...
+
+ subtractYears(earliestDate, 175);
+ }
+
+ return earliestDate;
+ */
+ }
+
+ /**
+ * Calculates the latest date that may be considered to be "after"
+ * a given date range.
+ *
+ * @param startDate The first date in the range
+ * @param endDate The last date in the range
+ * @return The latest date "after" the range
+ */
+ public static Date getLatestAfterDate(Date startDate, Date endDate) {
+ // TODO
+ return null;
+ }
+
+ public static int getYearsBetween(Date startDate, Date endDate) {
+ if (startDate == null || endDate == null) {
+ throw new InvalidDateException("date must not be null");
+ }
+
+ Integer startYear = startDate.getYear();
+ Integer endYear = endDate.getYear();
+
+ if (startYear == null || endYear == null) {
+ throw new IllegalArgumentException("year must not be null");
+ }
+
+ Era startEra = startDate.getEra();
+ Era endEra = endDate.getEra();
+
+ if (startEra == null || endEra == null) {
+ throw new IllegalArgumentException("era must not be null");
+ }
+
+ MutableDateTime startDateTime = convertToDateTime(startDate);
+ MutableDateTime endDateTime = convertToDateTime(endDate);
+
+ int years = Years.yearsBetween(startDateTime, endDateTime).getYears();
+
+ return years;
+ }
+
+ /**
+ * Calculates the interval, in years, that should be padded around a date so
+ * that any date within that interval may be considered to be "circa" the
+ * given date.
+ *
+ * @param year The year of the date
+ * @param era The era of the date. If null, Date.DEFAULT_ERA is assumed.
+ * @return The number of "circa" years before and after the date
+ */
+ public static int getCircaIntervalYears(int year, Era era) {
+ /*
+ * This algorithm is inherited from the fuzzydate parser
+ * in XDB. Its comment states:
+ *
+ * We define circa year/century specifications offsets
+ * as +/- 5% of the difference between that year/century
+ * and the present (2100), so that the farther we go back
+ * in time, the wider the range of meaning of "circa."
+ *
+ */
+
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ MutableDateTime dateTime = new MutableDateTime(chronology);
+ dateTime.era().set((era == Era.BCE) ? DateTimeConstants.BC : DateTimeConstants.AD);
+ dateTime.yearOfEra().set(year);
+ dateTime.monthOfYear().set(1);
+ dateTime.dayOfMonth().set(1);
+ dateTime.setTime(0, 0, 0, 0);
+
+ int years = Years.yearsBetween(dateTime, circaBaseDateTime).getYears();
+
+ return ((int) Math.round(years * 0.05));
+ }
+
+ /**
+ * Adds a number of days to a date.
+ *
+ * @param date The date
+ * @param days The number of days to add to the date
+ */
+ public static void addDays(Date date, int days) {
+ MutableDateTime dateTime = convertToDateTime(date);
+
+ dateTime.add(Days.days(days));
+
+ setFromDateTime(date, dateTime);
+ }
+
+ /**
+ * Adds a number of years to a date's year.
+ *
+ * @param date The date
+ * @param years The number of years to add to the date
+ */
+ public static void addYears(Date date, int years) {
+ MutableDateTime dateTime = convertToDateTime(date);
+
+ dateTime.add(Years.years(years));
+
+ setFromDateTime(date, dateTime);
+ }
+
+ /**
+ * Subtracts a number of years from a date's year.
+ *
+ * @param date The date
+ * @param years The number of years to subtract from the date
+ */
+ public static void subtractYears(Date date, int years) {
+ addYears(date, -years);
+ }
+
+ public static String getEarliestTimestamp(Date date) {
+ Era era = date.getEra();
+
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ MutableDateTime dateTime = null;
+
+ try {
+ dateTime = convertToDateTime(date);
+ }
+ catch(IllegalFieldValueException e) {
+ throw new InvalidDateException(e.getMessage());
+ }
+
+ String scalarDate = scalarDateFormatter.print(dateTime);
+
+ return scalarDate;
+ }
+
+ public static String getLatestTimestamp(Date date) {
+ Era era = date.getEra();
+
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ MutableDateTime dateTime = null;
+
+ try {
+ dateTime = convertToDateTime(date);
+ }
+ catch(IllegalFieldValueException e) {
+ throw new InvalidDateException(e.getMessage());
+ }
+
+ dateTime.setTime(23, 59, 59, 999);
+
+ String scalarDate = scalarDateFormatter.print(dateTime);
+
+ return scalarDate;
+ }
+
+ public static boolean isValidDate(int year, int month, int day, Era era) {
+ boolean isValid = true;
+
+ try {
+ convertToDateTime(new Date(year, month,day, era));
+ }
+ catch(IllegalFieldValueException e) {
+ isValid = false;
+ }
+
+ return isValid;
+ }
+
+ /**
+ * Converts a Date to a joda-time DateTime.
+ *
+ * @param date The Date
+ * @return A MutableDateTime representing the same date
+ */
+ private static MutableDateTime convertToDateTime(Date date) {
+ Era era = date.getEra();
+
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ MutableDateTime dateTime = new MutableDateTime(chronology);
+ dateTime.era().set((era == Era.BCE) ? DateTimeConstants.BC : DateTimeConstants.AD);
+ dateTime.yearOfEra().set(date.getYear());
+ dateTime.monthOfYear().set(date.getMonth());
+ dateTime.dayOfMonth().set(date.getDay());
+ dateTime.setTime(0, 0, 0, 0);
+
+ return dateTime;
+ }
+
+ /**
+ * Sets the fields in a Date so that it represents the same date
+ * as a given DateTime.
+ *
+ * @param date The Date to set
+ * @param dateTime A MutableDateTime representing the desired date
+ */
+ private static void setFromDateTime(Date date, MutableDateTime dateTime) {
+ date.setYear(dateTime.getYearOfEra());
+ date.setMonth(dateTime.getMonthOfYear());
+ date.setDay(dateTime.getDayOfMonth());
+ date.setEra((dateTime.getEra() == DateTimeConstants.BC) ? Era.BCE : Era.CE);
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+/**
+ * A deferred date that represents an endpoint of a century.
+ */
+public abstract class DeferredCenturyDate extends DeferredDate {
+ protected int century;
+
+ public DeferredCenturyDate(int century) {
+ this.century = century;
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+
+/**
+ * A deferred date that represents the end of a century. The end year
+ * can not be determined until the era of the century is known. Once the
+ * era is known, resolveDate() may be called to calculate the year.
+ */
+public class DeferredCenturyEndDate extends DeferredCenturyDate {
+
+ public DeferredCenturyEndDate(int century) {
+ super(century);
+ }
+
+ @Override
+ public void resolveDate() {
+ Era era = getEra();
+
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ Date endDate = DateUtils.getCenturyEndDate(century, era);
+
+ setYear(endDate.getYear());
+ setMonth(endDate.getMonth());
+ setDay(endDate.getDay());
+ setEra(endDate.getEra());
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+
+/**
+ * A deferred date that represents the start of a century. The start year
+ * can not be determined until the era of the century is known. Once the
+ * era is known, resolveDate() may be called to calculate the year.
+ */
+public class DeferredCenturyStartDate extends DeferredCenturyDate {
+
+ public DeferredCenturyStartDate(int century) {
+ super(century);
+ }
+
+ @Override
+ public void resolveDate() {
+ Era era = getEra();
+
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ Date startDate = DateUtils.getCenturyStartDate(century, era);
+
+ setYear(startDate.getYear());
+ setMonth(startDate.getMonth());
+ setDay(startDate.getDay());
+ setEra(startDate.getEra());
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+
+/**
+ * Represents a date that is not completely known, and whose
+ * unknown parts require some future calculation to be determined.
+ */
+public abstract class DeferredDate extends Date {
+
+ /**
+ * Resolves the date by executing the deferred
+ * calculation. This causes all of the date parts
+ * to be determined.
+ */
+ public abstract void resolveDate();
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+/**
+ * A deferred date that represents an endpoint of a decade.
+ */
+public abstract class DeferredDecadeDate extends DeferredDate {
+ protected int decade;
+
+ public DeferredDecadeDate(int decade) {
+ this.decade = decade;
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+
+/**
+ * A deferred date that represents the end of a decade. The end year
+ * can not be determined until the era of the decade is known. Once the
+ * era is known, resolveDate() may be called to calculate the year.
+ */
+public class DeferredDecadeEndDate extends DeferredDecadeDate {
+
+ public DeferredDecadeEndDate(int decade) {
+ super(decade);
+ }
+
+ @Override
+ public void resolveDate() {
+ Era era = getEra();
+
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ Date endDate = DateUtils.getDecadeEndDate(decade, era);
+
+ setYear(endDate.getYear());
+ setMonth(endDate.getMonth());
+ setDay(endDate.getDay());
+ setEra(endDate.getEra());
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+
+/**
+ * A deferred date that represents the start of a decade. The start year
+ * can not be determined until the era of the decade is known. Once the
+ * era is known, resolveDate() may be called to calculate the year.
+ */
+public class DeferredDecadeStartDate extends DeferredDecadeDate {
+
+ public DeferredDecadeStartDate(int decade) {
+ super(decade);
+ }
+
+ @Override
+ public void resolveDate() {
+ Era era = getEra();
+
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ Date startDate = DateUtils.getDecadeStartDate(decade, era);
+
+ setYear(startDate.getYear());
+ setMonth(startDate.getMonth());
+ setDay(startDate.getDay());
+ setEra(startDate.getEra());
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+/**
+ * A deferred date that represents an endpoint of a half century.
+ */
+public abstract class DeferredHalfCenturyDate extends DeferredCenturyDate {
+ protected int half;
+
+ public DeferredHalfCenturyDate(int century) {
+ super(century);
+ }
+
+ public DeferredHalfCenturyDate(int century, int half) {
+ this(century);
+
+ this.half = half;
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+
+/**
+ * A deferred date that represents the end of a half century. The end year
+ * can not be determined until the era of the century is known. Once the
+ * era is known, resolveDate() may be called to calculate the year.
+ */
+public class DeferredHalfCenturyEndDate extends DeferredHalfCenturyDate {
+
+ public DeferredHalfCenturyEndDate(int century, int half) {
+ super(century, half);
+ }
+
+ @Override
+ public void resolveDate() {
+ Era era = getEra();
+
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ Date startDate = DateUtils.getHalfCenturyEndDate(century, half, era);
+
+ setYear(startDate.getYear());
+ setMonth(startDate.getMonth());
+ setDay(startDate.getDay());
+ setEra(startDate.getEra());
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+
+/**
+ * A deferred date that represents the start of a half century. The start year
+ * can not be determined until the era of the century is known. Once the
+ * era is known, resolveDate() may be called to calculate the year.
+ */
+public class DeferredHalfCenturyStartDate extends DeferredHalfCenturyDate {
+
+ public DeferredHalfCenturyStartDate(int century, int half) {
+ super(century, half);
+ }
+
+ @Override
+ public void resolveDate() {
+ Era era = getEra();
+
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ Date startDate = DateUtils.getHalfCenturyStartDate(century, half, era);
+
+ setYear(startDate.getYear());
+ setMonth(startDate.getMonth());
+ setDay(startDate.getDay());
+ setEra(startDate.getEra());
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+/**
+ * A deferred date that represents an endpoint of a millennium.
+ */
+public abstract class DeferredMillenniumDate extends DeferredDate {
+ protected int millennium;
+
+ public DeferredMillenniumDate(int millennium) {
+ this.millennium = millennium;
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+/**
+ * A deferred date that represents the end of a millennium. The end year
+ * can not be determined until the era of the millennium is known. Once the
+ * era is known, resolveDate() may be called to calculate the year.
+ */
+public class DeferredMillenniumEndDate extends DeferredMillenniumDate {
+
+ public DeferredMillenniumEndDate(int millennium) {
+ super(millennium);
+ }
+
+ @Override
+ public void resolveDate() {
+ Era era = getEra();
+
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ Date startDate = DateUtils.getMillenniumEndDate(millennium, era);
+
+ setYear(startDate.getYear());
+ setMonth(startDate.getMonth());
+ setDay(startDate.getDay());
+ setEra(startDate.getEra());
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+/**
+ * A deferred date that represents the start of a millennium. The start year
+ * can not be determined until the era of the millennium is known. Once the
+ * era is known, resolveDate() may be called to calculate the year.
+ */
+public class DeferredMillenniumStartDate extends DeferredMillenniumDate {
+
+ public DeferredMillenniumStartDate(int millennium) {
+ super(millennium);
+ }
+
+ @Override
+ public void resolveDate() {
+ Era era = getEra();
+
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ Date startDate = DateUtils.getMillenniumStartDate(millennium, era);
+
+ setYear(startDate.getYear());
+ setMonth(startDate.getMonth());
+ setDay(startDate.getDay());
+ setEra(startDate.getEra());
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+/**
+ * A deferred date that represents the end of a month. The end day
+ * can not be determined until the year and era of the month are known. Once the
+ * year and era are known, resolveDate() may be called to calculate the day.
+ */
+public class DeferredMonthEndDate extends DeferredDate {
+
+ @Override
+ public void resolveDate() {
+ Era era = getEra();
+
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int day = DateUtils.getDaysInMonth(getMonth(), getYear(), era);
+
+ setDay(day);
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+/**
+ * A deferred date that represents an endpoint of a partial century.
+ */
+public abstract class DeferredPartialCenturyDate extends DeferredCenturyDate {
+ protected Part part;
+
+ public DeferredPartialCenturyDate(int century) {
+ super(century);
+ }
+
+ public DeferredPartialCenturyDate(int century, Part part) {
+ this(century);
+
+ this.part = part;
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+
+/**
+ * A deferred date that represents the end of a partial century. The end year
+ * can not be determined until the era of the century is known. Once the
+ * era is known, resolveDate() may be called to calculate the year.
+ */
+public class DeferredPartialCenturyEndDate extends DeferredPartialCenturyDate {
+
+ public DeferredPartialCenturyEndDate(int century, Part part) {
+ super(century, part);
+ }
+
+ @Override
+ public void resolveDate() {
+ Era era = getEra();
+
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ Date startDate = DateUtils.getPartialCenturyEndDate(century, part, era);
+
+ setYear(startDate.getYear());
+ setMonth(startDate.getMonth());
+ setDay(startDate.getDay());
+ setEra(startDate.getEra());
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+/**
+ * A deferred date that represents the start of a partial century. The start year
+ * can not be determined until the era of the century is known. Once the
+ * era is known, resolveDate() may be called to calculate the year.
+ */
+public class DeferredPartialCenturyStartDate extends DeferredPartialCenturyDate {
+
+ public DeferredPartialCenturyStartDate(int century, Part part) {
+ super(century, part);
+ }
+
+ @Override
+ public void resolveDate() {
+ Era era = getEra();
+
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ Date startDate = DateUtils.getPartialCenturyStartDate(century, part, era);
+
+ setYear(startDate.getYear());
+ setMonth(startDate.getMonth());
+ setDay(startDate.getDay());
+ setEra(startDate.getEra());
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+/**
+ * A deferred date that represents an endpoint of a partial decade.
+ */
+public abstract class DeferredPartialDecadeDate extends DeferredDecadeDate {
+ protected Part part;
+
+ public DeferredPartialDecadeDate(int decade) {
+ super(decade);
+ }
+
+ public DeferredPartialDecadeDate(int decade, Part part) {
+ this(decade);
+
+ this.part = part;
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+/**
+ * A deferred date that represents the end of a partial decade. The end year
+ * can not be determined until the era of the decade is known. Once the
+ * era is known, resolveDate() may be called to calculate the year.
+ */
+public class DeferredPartialDecadeEndDate extends DeferredPartialDecadeDate {
+
+ public DeferredPartialDecadeEndDate(int decade, Part part) {
+ super(decade, part);
+ }
+
+ @Override
+ public void resolveDate() {
+ Era era = getEra();
+
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ Date startDate = DateUtils.getPartialDecadeEndDate(decade, part, era);
+
+ setYear(startDate.getYear());
+ setMonth(startDate.getMonth());
+ setDay(startDate.getDay());
+ setEra(startDate.getEra());
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+/**
+ * A deferred date that represents the start of a partial decade. The start year
+ * can not be determined until the era of the decade is known. Once the
+ * era is known, resolveDate() may be called to calculate the year.
+ */
+public class DeferredPartialDecadeStartDate extends DeferredPartialDecadeDate {
+
+ public DeferredPartialDecadeStartDate(int decade, Part part) {
+ super(decade, part);
+ }
+
+ @Override
+ public void resolveDate() {
+ Era era = getEra();
+
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ Date startDate = DateUtils.getPartialDecadeStartDate(decade, part, era);
+
+ setYear(startDate.getYear());
+ setMonth(startDate.getMonth());
+ setDay(startDate.getDay());
+ setEra(startDate.getEra());
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+/**
+ * A deferred date that represents an endpoint of a quarter century.
+ */
+public abstract class DeferredQuarterCenturyDate extends DeferredCenturyDate {
+ protected int quarter;
+
+ public DeferredQuarterCenturyDate(int century) {
+ super(century);
+ }
+
+ public DeferredQuarterCenturyDate(int century, int quarter) {
+ this(century);
+
+ this.quarter = quarter;
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+
+/**
+ * A deferred date that represents the end of a quarter century. The end year
+ * can not be determined until the era of the century is known. Once the
+ * era is known, resolveDate() may be called to calculate the year.
+ */
+public class DeferredQuarterCenturyEndDate extends DeferredQuarterCenturyDate {
+
+ public DeferredQuarterCenturyEndDate(int century, int quarter) {
+ super(century, quarter);
+ }
+
+ @Override
+ public void resolveDate() {
+ Era era = getEra();
+
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ Date endDate = DateUtils.getQuarterCenturyEndDate(century, quarter, era);
+
+ setYear(endDate.getYear());
+ setMonth(endDate.getMonth());
+ setDay(endDate.getDay());
+ setEra(endDate.getEra());
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+
+/**
+ * A deferred date that represents the start of a quarter century. The start year
+ * can not be determined until the era of the century is known. Once the
+ * era is known, resolveDate() may be called to calculate the year.
+ */
+public class DeferredQuarterCenturyStartDate extends DeferredQuarterCenturyDate {
+
+ public DeferredQuarterCenturyStartDate(int century, int quarter) {
+ super(century, quarter);
+ }
+
+ @Override
+ public void resolveDate() {
+ Era era = getEra();
+
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ Date startDate = DateUtils.getQuarterCenturyStartDate(century, quarter, era);
+
+ setYear(startDate.getYear());
+ setMonth(startDate.getMonth());
+ setDay(startDate.getDay());
+ setEra(startDate.getEra());
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+public enum Era {
+ BCE ("urn:cspace:botgarden.cspace.berkeley.edu:vocabularies:name(dateera):item:name(bce)'BCE'"),
+ CE ("urn:cspace:botgarden.cspace.berkeley.edu:vocabularies:name(dateera):item:name(ce)'CE'");
+
+ private final String value;
+
+ private Era(String value) {
+ this.value = value;
+ }
+
+ public String toString() {
+ return value;
+ }
+
+ public String toDisplayString() {
+ int index = value.indexOf("'");
+
+ return value.substring(index + 1, value.length() - 1);
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+public class InvalidDateException extends RuntimeException {
+
+ private static final long serialVersionUID = 1L;
+
+ InvalidDateException() {
+ super();
+ }
+
+ InvalidDateException(String message) {
+ super(message);
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+
+import org.apache.commons.lang.StringUtils;
+import org.joda.time.IllegalFieldValueException;
+
+public class ParseDates {
+
+ /**
+ * Parse a newline-separated list of strings from a file (or standard input),
+ * and print the results to standard output.
+ *
+ * @param args The first argument to the program is the name of the file
+ * containing strings to parse. If not supplied, strings are
+ * read from standard input.
+ */
+ public static void main(String[] args) {
+ BufferedReader in = null;
+
+ if (args.length > 0) {
+ String filename = args[0];
+
+ try {
+ in = new BufferedReader(new FileReader(filename));
+ } catch (FileNotFoundException e) {
+ System.err.println("File not found: " + filename);
+ }
+ }
+ else {
+ in = new BufferedReader(new InputStreamReader(System.in));
+ }
+
+ if (in == null) {
+ return;
+ }
+
+ try {
+ for(String line; (line = in.readLine()) != null; ) {
+ line = StringUtils.trim(line);
+
+ if (StringUtils.isNotEmpty(line)) {
+ parse(line);
+ }
+ }
+ }
+ catch(IOException e) {
+ System.err.println("Error reading file: " + e.getLocalizedMessage());
+ }
+
+ try {
+ in.close();
+ }
+ catch(IOException e) {
+ System.err.println("Error closing file: " + e.getLocalizedMessage());
+ }
+ }
+
+ private static void parse(String displayDate) {
+ System.out.print(displayDate + "\t");
+
+ String result = "";
+ String scalar = "";
+
+ try {
+ StructuredDate structuredDate = StructuredDate.parse(displayDate);
+ Date earliestSingleDate = structuredDate.getEarliestSingleDate();
+ Date latestDate = structuredDate.getLatestDate();
+
+ result =
+ earliestSingleDate.getYear() + "-" +
+ earliestSingleDate.getMonth() + "-" +
+ earliestSingleDate.getDay() + " " +
+ earliestSingleDate.getEra().toDisplayString(); // use toString() to get the data value (refname)
+
+ // These don't get filled in by the parser, so no need to print.
+
+ // earliestSingleDate.getCertainty();
+ // earliestSingleDate.getQualifierType();
+ // earliestSingleDate.getQualifierValue();
+ // earliestSingleDate.getQualifierUnit();
+ // earliestSingleDate.getScalarValue();
+
+ if (latestDate != null) {
+ result += " - " +
+ latestDate.getYear() + "-" +
+ latestDate.getMonth() + "-" +
+ latestDate.getDay() + " " +
+ latestDate.getEra().toDisplayString(); // use toString() to get the data value (refname)
+ }
+
+ try {
+ structuredDate.computeScalarValues();
+
+ scalar = structuredDate.getEarliestScalarDate() + " - " + structuredDate.getLatestScalarDate();
+ }
+ catch(InvalidDateException e) {
+ scalar = "[invalid date: " + e.getMessage() + "]";
+ }
+ }
+ catch(StructuredDateFormatException e) {
+ result = "[unable to parse]";
+ scalar = "";
+ }
+
+ System.out.println(result + "\t" + scalar);
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+public enum Part {
+ EARLY,
+ MIDDLE,
+ LATE;
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+public enum QualifierType {
+ MINUS ("-"),
+ PLUS ("+"),
+ PLUS_OR_MINUS ("+/-");
+
+ private final String value;
+
+ private QualifierType(String value) {
+ this.value = value;
+ }
+
+ public String toString() {
+ return value;
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+public enum QualifierUnit {
+ DAYS ("urn:cspace:botgarden.cspace.berkeley.edu:vocabularies:name(datequalifier):item:name(days)'Day(s)'"),
+ MONTHS ("urn:cspace:botgarden.cspace.berkeley.edu:vocabularies:name(datequalifier):item:name(month)'Month(s)'"),
+ YEARS ("urn:cspace:botgarden.cspace.berkeley.edu:vocabularies:name(datequalifier):item:name(years)'Year(s)'");
+
+ private final String value;
+
+ private QualifierUnit(String value) {
+ this.value = value;
+ }
+
+ public String toString() {
+ return value;
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+import org.apache.commons.lang.builder.EqualsBuilder;
+import org.collectionspace.services.structureddate.antlr.ANTLRStructuredDateEvaluator;
+
+
+/**
+ * A CollectionSpace structured date.
+ */
+public class StructuredDate {
+ // The UI layer is interpreting scalarValuesComputed as follows:
+ // - If true, the UI should compute scalar values
+ // - If false (or null), the UI should not compute scalar values
+ // Given that interpretation, scalarValuesComputed should default
+ // to true.
+ public static final boolean DEFAULT_SCALAR_VALUES_COMPUTED = true;
+
+ private String displayDate;
+ private String note;
+ private String association;
+ private String period;
+
+ private Date earliestSingleDate;
+ private Date latestDate;
+
+ private String earliestScalarDate;
+ private String latestScalarDate;
+ private Boolean scalarValuesComputed;
+
+ public StructuredDate() {
+ scalarValuesComputed = DEFAULT_SCALAR_VALUES_COMPUTED;
+ }
+
+ public String toString() {
+ String string =
+ "\n" +
+ "\tdisplayDate: " + getDisplayDate() + "\n" +
+ "\tnote: " + getNote() + "\n" +
+ "\tassociation: " + getAssociation() + "\n" +
+ "\tperiod: " + getPeriod() + "\n";
+
+ if (getEarliestSingleDate() != null) {
+ string +=
+ "\n" +
+ "\tearliestSingleDate: \n" +
+ getEarliestSingleDate().toString() + "\n";
+ }
+
+ if (getLatestDate() != null) {
+ string +=
+ "\n" +
+ "\tlatestDate: \n" +
+ getLatestDate().toString() + "\n";
+ }
+
+ return string;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null) {
+ return false;
+ }
+
+ if (obj == this) {
+ return true;
+ }
+
+ if (obj.getClass() != getClass()) {
+ return false;
+ }
+
+ StructuredDate that = (StructuredDate) obj;
+
+ return
+ new EqualsBuilder()
+ .append(this.getDisplayDate(), that.getDisplayDate())
+ .append(this.getAssociation(), that.getAssociation())
+ .append(this.getNote(), that.getNote())
+ .append(this.getPeriod(), that.getPeriod())
+ .append(this.getEarliestSingleDate(), that.getEarliestSingleDate())
+ .append(this.getLatestDate(), that.getLatestDate())
+ .append(this.areScalarValuesComputed(), that.areScalarValuesComputed())
+ .isEquals();
+ }
+
+ public void computeScalarValues() {
+ Date earliestDate = getEarliestSingleDate();
+ Date latestDate = getLatestDate();
+
+ if (earliestDate == null && latestDate == null) {
+ setEarliestScalarDate(null);
+ setLatestScalarDate(null);
+
+ return;
+ }
+
+ if (earliestDate == null) {
+ earliestDate = latestDate.copy();
+ }
+ else {
+ earliestDate = earliestDate.copy();
+ }
+
+ if (latestDate == null) {
+ latestDate = earliestDate.copy();
+ }
+ else {
+ latestDate = latestDate.copy();
+ }
+
+ if (earliestDate.getYear() == null || latestDate.getYear() == null) {
+ // The dates must at least specify a year.
+ throw new InvalidDateException("year must not be null");
+ }
+
+ if (earliestDate.getDay() != null && earliestDate.getMonth() == null) {
+ // If a day is specified, the month must be specified.
+ throw new InvalidDateException("month may not be null when day is not null");
+ }
+
+ if (latestDate.getDay() != null && latestDate.getMonth() == null) {
+ // If a day is specified, the month must be specified.
+ throw new InvalidDateException("month may not be null when day is not null");
+ }
+
+ if (earliestDate.getEra() == null) {
+ earliestDate.setEra(Date.DEFAULT_ERA);
+ }
+
+ if (latestDate.getEra() == null) {
+ latestDate.setEra(Date.DEFAULT_ERA);
+ }
+
+ if (earliestDate.getMonth() == null) {
+ earliestDate.setMonth(1);
+ earliestDate.setDay(1);
+ }
+
+ if (latestDate.getMonth() == null) {
+ latestDate.setMonth(12);
+ latestDate.setDay(31);
+ }
+
+ if (earliestDate.getDay() == null) {
+ earliestDate.setDay(1);
+ }
+
+ if (latestDate.getDay() == null) {
+ latestDate.setDay(DateUtils.getDaysInMonth(latestDate.getMonth(), latestDate.getYear(), latestDate.getEra()));
+ }
+
+ // Add one day to the latest day, since that's what the UI does.
+ // DateUtils.addDays(latestDate, 1);
+
+ setEarliestScalarDate(DateUtils.getEarliestTimestamp(earliestDate));
+ setLatestScalarDate(DateUtils.getLatestTimestamp(latestDate));
+ }
+
+ public static StructuredDate parse(String displayDate) throws StructuredDateFormatException {
+ StructuredDateEvaluator evaluator = new ANTLRStructuredDateEvaluator();
+
+ return evaluator.evaluate(displayDate);
+ }
+
+ public String getDisplayDate() {
+ return displayDate;
+ }
+
+ public void setDisplayDate(String displayDate) {
+ this.displayDate = displayDate;
+ }
+
+ public String getNote() {
+ return note;
+ }
+
+ public void setNote(String note) {
+ this.note = note;
+ }
+
+ public String getAssociation() {
+ return association;
+ }
+
+ public void setAssociation(String association) {
+ this.association = association;
+ }
+
+ public String getPeriod() {
+ return period;
+ }
+
+ public void setPeriod(String period) {
+ this.period = period;
+ }
+
+ public Date getEarliestSingleDate() {
+ return earliestSingleDate;
+ }
+
+ public void setEarliestSingleDate(Date earliestSingleDate) {
+ this.earliestSingleDate = earliestSingleDate;
+ }
+
+ public Date getLatestDate() {
+ return latestDate;
+ }
+
+ public void setLatestDate(Date latestDate) {
+ this.latestDate = latestDate;
+ }
+
+ public boolean isRange() {
+ return (getLatestDate() != null);
+ }
+
+ public String getEarliestScalarDate() {
+ return earliestScalarDate;
+ }
+
+ public void setEarliestScalarDate(String earliestScalarDate) {
+ this.earliestScalarDate = earliestScalarDate;
+ }
+
+ public Boolean areScalarValuesComputed() {
+ return scalarValuesComputed;
+ }
+
+ public String getLatestScalarDate() {
+ return latestScalarDate;
+ }
+
+ public void setLatestScalarDate(String latestScalarDate) {
+ this.latestScalarDate = latestScalarDate;
+ }
+
+ public void setScalarValuesComputed(Boolean scalarValuesComputed) {
+ this.scalarValuesComputed = scalarValuesComputed;
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+public interface StructuredDateEvaluator {
+ public StructuredDate evaluate(String displayDate) throws StructuredDateFormatException;
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+public class StructuredDateFormatException extends IllegalArgumentException {
+ private static final long serialVersionUID = 1L;
+
+ public StructuredDateFormatException() {
+ super();
+ }
+
+ public StructuredDateFormatException(Throwable cause) {
+ super(cause);
+ }
+
+ public StructuredDateFormatException(String message) {
+ super(message);
+ }
+
+ public StructuredDateFormatException(String message, Throwable cause) {
+ super(message, cause);
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.collectionspace.services.structureddate.antlr;
+
+import java.util.Stack;
+
+import org.antlr.v4.runtime.ANTLRInputStream;
+import org.antlr.v4.runtime.BailErrorStrategy;
+import org.antlr.v4.runtime.CommonTokenStream;
+import org.antlr.v4.runtime.FailedPredicateException;
+import org.antlr.v4.runtime.InputMismatchException;
+import org.antlr.v4.runtime.NoViableAltException;
+import org.antlr.v4.runtime.Parser;
+import org.antlr.v4.runtime.RecognitionException;
+import org.antlr.v4.runtime.Token;
+import org.antlr.v4.runtime.TokenStream;
+import org.antlr.v4.runtime.misc.ParseCancellationException;
+import org.antlr.v4.runtime.tree.TerminalNode;
+import org.collectionspace.services.structureddate.Date;
+import org.collectionspace.services.structureddate.DateUtils;
+import org.collectionspace.services.structureddate.DeferredCenturyEndDate;
+import org.collectionspace.services.structureddate.DeferredCenturyStartDate;
+import org.collectionspace.services.structureddate.DeferredDate;
+import org.collectionspace.services.structureddate.DeferredDecadeEndDate;
+import org.collectionspace.services.structureddate.DeferredDecadeStartDate;
+import org.collectionspace.services.structureddate.DeferredHalfCenturyEndDate;
+import org.collectionspace.services.structureddate.DeferredHalfCenturyStartDate;
+import org.collectionspace.services.structureddate.DeferredMillenniumEndDate;
+import org.collectionspace.services.structureddate.DeferredMillenniumStartDate;
+import org.collectionspace.services.structureddate.DeferredPartialCenturyEndDate;
+import org.collectionspace.services.structureddate.DeferredPartialCenturyStartDate;
+import org.collectionspace.services.structureddate.DeferredPartialDecadeEndDate;
+import org.collectionspace.services.structureddate.DeferredPartialDecadeStartDate;
+import org.collectionspace.services.structureddate.DeferredQuarterCenturyEndDate;
+import org.collectionspace.services.structureddate.DeferredQuarterCenturyStartDate;
+import org.collectionspace.services.structureddate.Era;
+import org.collectionspace.services.structureddate.Part;
+import org.collectionspace.services.structureddate.StructuredDate;
+import org.collectionspace.services.structureddate.StructuredDateEvaluator;
+import org.collectionspace.services.structureddate.StructuredDateFormatException;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.AllOrPartOfContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.BeforeOrAfterDateContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.CenturyContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.CertainDateContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.DateContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.DecadeContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.DisplayDateContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.EraContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.HalfCenturyContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.HalfYearContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.HyphenatedRangeContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.InvMonthYearContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.InvSeasonYearContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.InvStrDateContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.MillenniumContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.MonthContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.MonthInYearRangeContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.NthCenturyRangeContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.NthContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.NthHalfContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.NthQuarterContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.NumCenturyContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.NumContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.NumDateContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.NumDayInMonthRangeContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.NumDayOfMonthContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.NumDecadeContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.NumMonthContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.NumYearContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.PartOfContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.PartialCenturyContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.PartialDecadeContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.PartialYearContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.QuarterCenturyContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.QuarterInYearRangeContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.QuarterYearContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.StrCenturyContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.StrDateContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.StrDayInMonthRangeContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.StrMonthContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.StrSeasonContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.UncertainDateContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.YearContext;
+import org.collectionspace.services.structureddate.antlr.StructuredDateParser.YearSpanningWinterContext;
+
+/**
+ * A StructuredDateEvaluator that uses an ANTLR parser to parse the display date,
+ * and an ANTLR listener to generate a structured date from the resulting parse
+ * tree.
+ */
+public class ANTLRStructuredDateEvaluator extends StructuredDateBaseListener implements StructuredDateEvaluator {
+ /**
+ * The result of the evaluation.
+ */
+ protected StructuredDate result;
+
+ /**
+ * The operation stack. The parse listener methods that are implemented here
+ * pop input parameters off the stack, and push results back on to the stack.
+ */
+ protected Stack<Object> stack;
+
+ public ANTLRStructuredDateEvaluator() {
+
+ }
+
+ @Override
+ public StructuredDate evaluate(String displayDate) throws StructuredDateFormatException {
+ stack = new Stack<Object>();
+
+ result = new StructuredDate();
+ result.setDisplayDate(displayDate);
+
+ // Instantiate a parser from the lowercased display date, so that parsing will be
+ // case insensitive.
+ ANTLRInputStream inputStream = new ANTLRInputStream(displayDate.toLowerCase());
+ StructuredDateLexer lexer = new StructuredDateLexer(inputStream);
+ CommonTokenStream tokenStream = new CommonTokenStream(lexer);
+ StructuredDateParser parser = new StructuredDateParser(tokenStream);
+
+ // Don't try to recover from parse errors, just bail.
+ parser.setErrorHandler(new BailErrorStrategy());
+
+ // Don't print error messages to the console.
+ parser.removeErrorListeners();
+
+ // Generate our own custom error messages.
+ parser.addParseListener(this);
+
+ try {
+ // Attempt to fulfill the oneDisplayDate rule of the grammar.
+ parser.oneDisplayDate();
+ }
+ catch(ParseCancellationException e) {
+ // ParseCancellationException is thrown by the BailErrorStrategy when there is a
+ // parse error, with the underlying RecognitionException as the cause.
+ RecognitionException re = (RecognitionException) e.getCause();
+
+ throw new StructuredDateFormatException(getErrorMessage(re), re);
+ }
+
+ // The parsing was successful. Return the result.
+ return result;
+ }
+
+ @Override
+ public void exitDisplayDate(DisplayDateContext ctx) {
+ if (ctx.exception != null) return;
+
+ Date latestDate = (Date) stack.pop();
+ Date earliestDate = (Date) stack.pop();
+
+ // If the earliest date and the latest date are the same, it's just a "single" date.
+ // There's no need to have the latest, so set it to null.
+
+ if (earliestDate.equals(latestDate)) {
+ latestDate = null;
+ }
+
+ result.setEarliestSingleDate(earliestDate);
+ result.setLatestDate(latestDate);
+ }
+
+ @Override
+ public void exitBeforeOrAfterDate(BeforeOrAfterDateContext ctx) {
+ if (ctx.exception != null) return;
+
+ Date latestDate = (Date) stack.pop();
+ Date earliestDate = (Date) stack.pop();
+
+ // Set null eras to the default.
+
+ if (earliestDate.getEra() == null) {
+ earliestDate.setEra(Date.DEFAULT_ERA);
+ }
+
+ if (latestDate.getEra() == null) {
+ latestDate.setEra(Date.DEFAULT_ERA);
+ }
+
+ // Finalize any deferred calculations.
+
+ if (latestDate instanceof DeferredDate) {
+ ((DeferredDate) latestDate).resolveDate();
+ }
+
+ if (earliestDate instanceof DeferredDate) {
+ ((DeferredDate) earliestDate).resolveDate();
+ }
+
+ // Calculate the earliest date or end date.
+
+ if (ctx.BEFORE() != null) {
+ latestDate = earliestDate;
+ earliestDate = DateUtils.getEarliestBeforeDate(earliestDate, latestDate);
+ }
+ else if (ctx.AFTER() != null) {
+ earliestDate = latestDate;
+ latestDate = DateUtils.getLatestAfterDate(earliestDate, latestDate);
+ }
+
+ stack.push(earliestDate);
+ stack.push(latestDate);
+ }
+
+ @Override
+ public void exitUncertainDate(UncertainDateContext ctx) {
+ if (ctx.exception != null) return;
+
+ Date latestDate = (Date) stack.pop();
+ Date earliestDate = (Date) stack.pop();
+
+ int earliestInterval = DateUtils.getCircaIntervalYears(earliestDate.getYear(), earliestDate.getEra());
+ int latestInterval = DateUtils.getCircaIntervalYears(latestDate.getYear(), latestDate.getEra());
+
+ // Express the circa interval as a qualifier.
+
+ // stack.push(earliestDate.withQualifier(QualifierType.MINUS, earliestInterval, QualifierUnit.YEARS));
+ // stack.push(latestDate.withQualifier(QualifierType.PLUS, latestInterval, QualifierUnit.YEARS));
+
+ // OR:
+
+ // Express the circa interval as an offset calculated into the year.
+
+ DateUtils.subtractYears(earliestDate, earliestInterval);
+ DateUtils.addYears(latestDate, latestInterval);
+
+ stack.push(earliestDate);
+ stack.push(latestDate);
+ }
+
+ @Override
+ public void exitCertainDate(CertainDateContext ctx) {
+ if (ctx.exception != null) return;
+
+ Date latestDate = (Date) stack.pop();
+ Date earliestDate = (Date) stack.pop();
+
+ // Set null eras to the default.
+
+ if (earliestDate.getEra() == null) {
+ earliestDate.setEra(Date.DEFAULT_ERA);
+ }
+
+ if (latestDate.getEra() == null) {
+ latestDate.setEra(Date.DEFAULT_ERA);
+ }
+
+ // Finalize any deferred calculations.
+
+ if (latestDate instanceof DeferredDate) {
+ ((DeferredDate) latestDate).resolveDate();
+ }
+
+ if (earliestDate instanceof DeferredDate) {
+ ((DeferredDate) earliestDate).resolveDate();
+ }
+
+ stack.push(earliestDate);
+ stack.push(latestDate);
+ }
+
+ @Override
+ public void exitHyphenatedRange(HyphenatedRangeContext ctx) {
+ if (ctx.exception != null) return;
+
+ Date latestEndDate = (Date) stack.pop();
+ stack.pop(); // latestStartDate
+ stack.pop(); // earliestEndDate
+ Date earliestStartDate = (Date) stack.pop();
+
+ // If no era was explicitly specified for the first date,
+ // make it inherit the era of the second date.
+
+ if (earliestStartDate.getEra() == null && latestEndDate.getEra() != null) {
+ earliestStartDate.setEra(latestEndDate.getEra());
+ }
+
+ // Finalize any deferred calculations.
+
+ if (earliestStartDate instanceof DeferredDate) {
+ ((DeferredDate) earliestStartDate).resolveDate();
+ }
+
+ if (latestEndDate instanceof DeferredDate) {
+ ((DeferredDate) latestEndDate).resolveDate();
+ }
+
+ stack.push(earliestStartDate);
+ stack.push(latestEndDate);
+ }
+
+ @Override
+ public void exitNthCenturyRange(NthCenturyRangeContext ctx) {
+ if (ctx.exception != null) return;
+
+ Era era = (Era) stack.pop();
+ Integer endN = (Integer) stack.pop();
+ Part endPart = (Part) stack.pop();
+ Integer startN = (Integer) stack.pop();
+ Part startPart = (Part) stack.pop();
+
+ if (era == null) {
+ era = Date.DEFAULT_ERA;
+ }
+
+ int startYear = DateUtils.nthCenturyToYear(startN);
+ int endYear = DateUtils.nthCenturyToYear(endN);
+
+ stack.push(startPart == null ? DateUtils.getCenturyStartDate(startYear, era) : DateUtils.getPartialCenturyStartDate(startYear, startPart, era));
+ stack.push(startPart == null ? DateUtils.getCenturyEndDate(startYear, era) : DateUtils.getPartialCenturyEndDate(startYear, startPart, era));
+ stack.push(endPart == null ? DateUtils.getCenturyStartDate(endYear, era) : DateUtils.getPartialCenturyStartDate(endYear, endPart, era));
+ stack.push(endPart == null ? DateUtils.getCenturyEndDate(endYear, era) : DateUtils.getPartialCenturyEndDate(endYear, endPart, era));
+ }
+
+ @Override
+ public void exitMonthInYearRange(MonthInYearRangeContext ctx) {
+ if (ctx.exception != null) return;
+
+ Era era = (Era) stack.pop();
+ Integer year = (Integer) stack.pop();
+ Integer numMonthEnd = (Integer) stack.pop();
+ Integer numMonthStart = (Integer) stack.pop();
+
+ stack.push(new Date(year, numMonthStart, 1, era));
+ stack.push(new Date(year, numMonthStart, DateUtils.getDaysInMonth(numMonthStart, year, era), era));
+ stack.push(new Date(year, numMonthEnd, 1, era));
+ stack.push(new Date(year, numMonthEnd, DateUtils.getDaysInMonth(numMonthEnd, year, era), era));
+ }
+
+ @Override
+ public void exitQuarterInYearRange(QuarterInYearRangeContext ctx) {
+ if (ctx.exception != null) return;
+
+ Era era = (Era) stack.pop();
+ Integer year = (Integer) stack.pop();
+ Integer lastQuarter = (Integer) stack.pop();
+ Integer firstQuarter = (Integer) stack.pop();
+
+ stack.push(DateUtils.getQuarterYearStartDate(firstQuarter, year).withEra(era));
+ stack.push(DateUtils.getQuarterYearEndDate(firstQuarter, year, era).withEra(era));
+ stack.push(DateUtils.getQuarterYearStartDate(lastQuarter, year).withEra(era));
+ stack.push(DateUtils.getQuarterYearEndDate(lastQuarter, year, era).withEra(era));
+ }
+
+ @Override
+ public void exitStrDayInMonthRange(StrDayInMonthRangeContext ctx) {
+ if (ctx.exception != null) return;
+
+ Era era = (Era) stack.pop();
+ Integer year = (Integer) stack.pop();
+ Integer dayOfMonthEnd = (Integer) stack.pop();
+ Integer dayOfMonthStart = (Integer) stack.pop();
+ Integer numMonth = (Integer) stack.pop();
+
+ stack.push(new Date(year, numMonth, dayOfMonthStart, era));
+ stack.push(new Date(year, numMonth, dayOfMonthStart, era));
+ stack.push(new Date(year, numMonth, dayOfMonthEnd, era));
+ stack.push(new Date(year, numMonth, dayOfMonthEnd, era));
+ }
+
+ @Override
+ public void exitNumDayInMonthRange(NumDayInMonthRangeContext ctx) {
+ if (ctx.exception != null) return;
+
+ Era era = (Era) stack.pop();
+ Integer year = (Integer) stack.pop();
+ Integer dayOfMonthEnd = (Integer) stack.pop();
+ Integer dayOfMonthStart = (Integer) stack.pop();
+ Integer numMonth = (Integer) stack.pop();
+
+ stack.push(new Date(year, numMonth, dayOfMonthStart, era));
+ stack.push(new Date(year, numMonth, dayOfMonthStart, era));
+ stack.push(new Date(year, numMonth, dayOfMonthEnd, era));
+ stack.push(new Date(year, numMonth, dayOfMonthEnd, era));
+ }
+
+ @Override
+ public void exitDate(DateContext ctx) {
+ if (ctx.exception != null) return;
+
+ // Expect the canonical year-month-day-era ordering
+ // to be on the stack.
+
+ Era era = (Era) stack.pop();
+ Integer dayOfMonth = (Integer) stack.pop();
+ Integer numMonth = (Integer) stack.pop();
+ Integer year = (Integer) stack.pop();
+
+ // For the latest date we could either return null, or a copy of the earliest date,
+ // since the UI doesn't care. Use a copy of the earliest date, since it makes
+ // things easier here if we don't have to test for null up the tree.
+
+ stack.push(new Date(year, numMonth, dayOfMonth, era));
+ stack.push(new Date(year, numMonth, dayOfMonth, era));
+ }
+
+
+ @Override
+ public void exitNumDate(NumDateContext ctx) {
+ if (ctx.exception != null) return;
+
+ // This could either be year-month-day, or
+ // month-day-year. Try to determine which,
+ // and reorder the stack into the canonical
+ // year-month-day-era ordering.
+
+ Era era = (Era) stack.pop();
+ Integer num3 = (Integer) stack.pop();
+ Integer num2 = (Integer) stack.pop();
+ Integer num1 = (Integer) stack.pop();
+
+ // Default to a year-month-day interpretation.
+
+ int year = num1;
+ int numMonth = num2;
+ int dayOfMonth = num3;
+
+ if (DateUtils.isValidDate(num1, num2, num3, era)) {
+ // Interpreting as year-month-day produces a valid date. Go with it.
+ }
+ else if (DateUtils.isValidDate(num3, num1, num2, era)) {
+ // Interpreting as year-month-day doesn't produce a valid date, but
+ // month-day-year does. Go with month-day-year.
+
+ year = num3;
+ numMonth = num1;
+ dayOfMonth = num2;
+ }
+
+ stack.push(year);
+ stack.push(numMonth);
+ stack.push(dayOfMonth);
+ stack.push(era);
+ }
+
+ @Override
+ public void exitStrDate(StrDateContext ctx) {
+ if (ctx.exception != null) return;
+
+ // Reorder the stack into a canonical ordering,
+ // year-month-day-era.
+
+ Era era = (Era) stack.pop();
+ Integer year = (Integer) stack.pop();
+ Integer dayOfMonth = (Integer) stack.pop();
+ Integer numMonth = (Integer) stack.pop();
+
+ stack.push(year);
+ stack.push(numMonth);
+ stack.push(dayOfMonth);
+ stack.push(era);
+ }
+
+ @Override
+ public void exitInvStrDate(InvStrDateContext ctx) {
+ if (ctx.exception != null) return;
+
+ // Reorder the stack into a canonical ordering,
+ // year-month-day-era.
+
+ Integer dayOfMonth = (Integer) stack.pop();
+ Integer numMonth = (Integer) stack.pop();
+ Integer year = (Integer) stack.pop();
+ Era era = (Era) stack.pop();
+
+ stack.push(year);
+ stack.push(numMonth);
+ stack.push(dayOfMonth);
+ stack.push(era);
+ }
+
+ @Override
+ public void exitMonth(MonthContext ctx) {
+ if (ctx.exception != null) return;
+
+ Era era = (Era) stack.pop();
+ Integer year = (Integer) stack.pop();
+ Integer numMonth = (Integer) stack.pop();
+
+ stack.push(new Date(year, numMonth, 1, era));
+ stack.push(new Date(year, numMonth, DateUtils.getDaysInMonth(numMonth, year, era), era));
+ }
+
+ @Override
+ public void exitInvMonthYear(InvMonthYearContext ctx) {
+ if (ctx.exception != null) return;
+
+ // Invert the arguments.
+
+ Integer numMonth = (Integer) stack.pop();
+ Integer year = (Integer) stack.pop();
+ Era era = (Era) stack.pop();
+
+ stack.push(numMonth);
+ stack.push(year);
+ stack.push(era);
+ }
+
+ @Override
+ public void exitYearSpanningWinter(YearSpanningWinterContext ctx) {
+ if (ctx.exception != null) return;
+
+ Era era = (Era) stack.pop();
+ Integer endYear = (Integer) stack.pop();
+ Integer startYear = (Integer) stack.pop();
+
+ stack.push(new Date(startYear, 12, 1).withEra(era));
+ stack.push(DateUtils.getQuarterYearEndDate(1, endYear, era).withEra(era));
+ }
+
+ @Override
+ public void exitPartialYear(PartialYearContext ctx) {
+ if (ctx.exception != null) return;
+
+ Era era = (Era) stack.pop();
+ Integer year = (Integer) stack.pop();
+ Part part = (Part) stack.pop();
+
+ stack.push(DateUtils.getPartialYearStartDate(part, year).withEra(era));
+ stack.push(DateUtils.getPartialYearEndDate(part, year, era).withEra(era));
+ }
+
+ @Override
+ public void exitQuarterYear(QuarterYearContext ctx) {
+ if (ctx.exception != null) return;
+
+ Era era = (Era) stack.pop();
+ Integer year = (Integer) stack.pop();
+ Integer quarter = (Integer) stack.pop();
+
+ stack.push(DateUtils.getQuarterYearStartDate(quarter, year).withEra(era));
+ stack.push(DateUtils.getQuarterYearEndDate(quarter, year, era).withEra(era));
+ }
+
+ @Override
+ public void exitHalfYear(HalfYearContext ctx) {
+ if (ctx.exception != null) return;
+
+ Era era = (Era) stack.pop();
+ Integer year = (Integer) stack.pop();
+ Integer half = (Integer) stack.pop();
+
+ stack.push(DateUtils.getHalfYearStartDate(half, year).withEra(era));
+ stack.push(DateUtils.getHalfYearEndDate(half, year, era).withEra(era));
+ }
+
+ @Override
+ public void exitInvSeasonYear(InvSeasonYearContext ctx) {
+ if (ctx.exception != null) return;
+
+ // Invert the arguments.
+
+ Integer quarter = (Integer) stack.pop();
+ Integer year = (Integer) stack.pop();
+ Era era = (Era) stack.pop();
+
+ stack.push(quarter);
+ stack.push(year);
+ stack.push(era);
+ }
+
+ @Override
+ public void exitYear(YearContext ctx) {
+ if (ctx.exception != null) return;
+
+ Era era = (Era) stack.pop();
+ Integer year = (Integer) stack.pop();
+
+ stack.push(new Date(year, 1, 1, era));
+ stack.push(new Date(year, 12, 31, era));
+ }
+
+ @Override
+ public void exitPartialDecade(PartialDecadeContext ctx) {
+ if (ctx.exception != null) return;
+
+ Era era = (Era) stack.pop();
+ Integer year = (Integer) stack.pop();
+ Part part = (Part) stack.pop();
+
+ if (era != null) {
+ // If the era was explicitly specified, the start and end years
+ // may be calculated now.
+
+ stack.push(DateUtils.getPartialDecadeStartDate(year, part, era));
+ stack.push(DateUtils.getPartialDecadeEndDate(year, part, era));
+ }
+ else {
+ // If the era was not explicitly specified, the start and end years
+ // can't be calculated yet. The calculation must be deferred until
+ // later. For example, this partial decade may be the start of a hyphenated
+ // range, where the era will be inherited from the era of the end of
+ // the range; this era won't be known until farther up the parse tree,
+ // when both sides of the range will have been parsed.
+
+ stack.push(new DeferredPartialDecadeStartDate(year, part));
+ stack.push(new DeferredPartialDecadeEndDate(year, part));
+ }
+ }
+
+ @Override
+ public void exitDecade(DecadeContext ctx) {
+ if (ctx.exception != null) return;
+
+ Era era = (Era) stack.pop();
+ Integer year = (Integer) stack.pop();
+
+ // Calculate the start and end year of the decade, which depends on the era.
+
+ if (era != null) {
+ // If the era was explicitly specified, the start and end years
+ // may be calculated now.
+
+ stack.push(DateUtils.getDecadeStartDate(year, era));
+ stack.push(DateUtils.getDecadeEndDate(year, era));
+ }
+ else {
+ // If the era was not explicitly specified, the start and end years
+ // can't be calculated yet. The calculation must be deferred until
+ // later. For example, this decade may be the start of a hyphenated
+ // range, where the era will be inherited from the era of the end of
+ // the range; this era won't be known until farther up the parse tree,
+ // when both sides of the range will have been parsed.
+
+ stack.push(new DeferredDecadeStartDate(year));
+ stack.push(new DeferredDecadeEndDate(year));
+ }
+ }
+
+ @Override
+ public void exitPartialCentury(PartialCenturyContext ctx) {
+ if (ctx.exception != null) return;
+
+ Era era = (Era) stack.pop();
+ Integer year = (Integer) stack.pop();
+ Part part = (Part) stack.pop();
+
+ if (era != null) {
+ // If the era was explicitly specified, the start and end years
+ // may be calculated now.
+
+ stack.push(DateUtils.getPartialCenturyStartDate(year, part, era));
+ stack.push(DateUtils.getPartialCenturyEndDate(year, part, era));
+ }
+ else {
+ // If the era was not explicitly specified, the start and end years
+ // can't be calculated yet. The calculation must be deferred until
+ // later. For example, this partial century may be the start of a hyphenated
+ // range, where the era will be inherited from the era of the end of
+ // the range; this era won't be known until farther up the parse tree,
+ // when both sides of the range will have been parsed.
+
+ stack.push(new DeferredPartialCenturyStartDate(year, part));
+ stack.push(new DeferredPartialCenturyEndDate(year, part));
+ }
+ }
+
+ @Override
+ public void exitQuarterCentury(QuarterCenturyContext ctx) {
+ if (ctx.exception != null) return;
+
+ Era era = (Era) stack.pop();
+ Integer year = (Integer) stack.pop();
+ Integer quarter = (Integer) stack.pop();
+
+ if (era != null) {
+ // If the era was explicitly specified, the start and end years
+ // may be calculated now.
+
+ stack.push(DateUtils.getQuarterCenturyStartDate(year, quarter, era));
+ stack.push(DateUtils.getQuarterCenturyEndDate(year, quarter, era));
+ }
+ else {
+ // If the era was not explicitly specified, the start and end years
+ // can't be calculated yet. The calculation must be deferred until
+ // later. For example, this century may be the start of a hyphenated
+ // range, where the era will be inherited from the era of the end of
+ // the range; this era won't be known until farther up the parse tree,
+ // when both sides of the range will have been parsed.
+
+ stack.push(new DeferredQuarterCenturyStartDate(year, quarter));
+ stack.push(new DeferredQuarterCenturyEndDate(year, quarter));
+ }
+ }
+
+ @Override
+ public void exitHalfCentury(HalfCenturyContext ctx) {
+ if (ctx.exception != null) return;
+
+ Era era = (Era) stack.pop();
+ Integer year = (Integer) stack.pop();
+ Integer half = (Integer) stack.pop();
+
+ if (era != null) {
+ // If the era was explicitly specified, the start and end years
+ // may be calculated now.
+
+ stack.push(DateUtils.getHalfCenturyStartDate(year, half, era));
+ stack.push(DateUtils.getHalfCenturyEndDate(year, half, era));
+ }
+ else {
+ // If the era was not explicitly specified, the start and end years
+ // can't be calculated yet. The calculation must be deferred until
+ // later. For example, this half century may be the start of a hyphenated
+ // range, where the era will be inherited from the era of the end of
+ // the range; this era won't be known until farther up the parse tree,
+ // when both sides of the range will have been parsed.
+
+ stack.push(new DeferredHalfCenturyStartDate(year, half));
+ stack.push(new DeferredHalfCenturyEndDate(year, half));
+ }
+ }
+
+ @Override
+ public void exitCentury(CenturyContext ctx) {
+ if (ctx.exception != null) return;
+
+ Era era = (Era) stack.pop();
+ Integer year = (Integer) stack.pop();
+
+ if (era != null) {
+ // If the era was explicitly specified, the start and end years
+ // may be calculated now.
+
+ stack.push(DateUtils.getCenturyStartDate(year, era));
+ stack.push(DateUtils.getCenturyEndDate(year, era));
+ }
+ else {
+ // If the era was not explicitly specified, the start and end years
+ // can't be calculated yet. The calculation must be deferred until
+ // later. For example, this quarter century may be the start of a hyphenated
+ // range, where the era will be inherited from the era of the end of
+ // the range; this era won't be known until farther up the parse tree,
+ // when both sides of the range will have been parsed.
+
+ stack.push(new DeferredCenturyStartDate(year));
+ stack.push(new DeferredCenturyEndDate(year));
+ }
+ }
+
+ @Override
+ public void exitMillennium(MillenniumContext ctx) {
+ if (ctx.exception != null) return;
+
+ Era era = (Era) stack.pop();
+ Integer n = (Integer) stack.pop();
+
+ if (era != null) {
+ // If the era was explicitly specified, the start and end years
+ // may be calculated now.
+
+ stack.push(DateUtils.getMillenniumStartDate(n, era));
+ stack.push(DateUtils.getMillenniumEndDate(n, era));
+ }
+ else {
+ // If the era was not explicitly specified, the start and end years
+ // can't be calculated yet. The calculation must be deferred until
+ // later. For example, this millennium may be the start of a hyphenated
+ // range, where the era will be inherited from the era of the end of
+ // the range; this era won't be known until farther up the parse tree,
+ // when both sides of the range will have been parsed.
+
+ stack.push(new DeferredMillenniumStartDate(n));
+ stack.push(new DeferredMillenniumEndDate(n));
+ }
+ }
+
+ @Override
+ public void exitStrCentury(StrCenturyContext ctx) {
+ if (ctx.exception != null) return;
+
+ Integer n = (Integer) stack.pop();
+
+ // Convert the nth number to a year number,
+ // and push on the stack.
+
+ Integer year = DateUtils.nthCenturyToYear(n);
+
+ stack.push(year);
+ }
+
+ @Override
+ public void exitNumCentury(NumCenturyContext ctx) {
+ if (ctx.exception != null) return;
+
+ // Convert the string to a number,
+ // and push on the stack.
+
+ Integer year = new Integer(stripEndLetters(ctx.HUNDREDS().getText()));
+
+ if (year == 0) {
+ throw new StructuredDateFormatException("unexpected century '" + ctx.HUNDREDS().getText() + "'");
+ }
+
+ stack.push(year);
+ }
+
+ @Override
+ public void exitNumDecade(NumDecadeContext ctx) {
+ if (ctx.exception != null) return;
+
+ // Convert the string to a number,
+ // and push on the stack.
+
+ Integer year = new Integer(stripEndLetters(ctx.TENS().getText()));
+
+ if (year == 0) {
+ throw new StructuredDateFormatException("unexpected decade '" + ctx.TENS().getText() + "'");
+ }
+
+ stack.push(year);
+ }
+
+ @Override
+ public void exitNumYear(NumYearContext ctx) {
+ if (ctx.exception != null) return;
+
+ // Convert the string to a number,
+ // and push on the stack.
+
+ Integer year = new Integer(ctx.NUMBER().getText());
+
+ if (year == 0) {
+ throw new StructuredDateFormatException("unexpected year '" + ctx.NUMBER().getText() + "'");
+ }
+
+ stack.push(year);
+ }
+
+ @Override
+ public void exitNumMonth(NumMonthContext ctx) {
+ if (ctx.exception != null) return;
+
+ // Convert the string a number,
+ // and push on the stack.
+
+ Integer month = new Integer(ctx.NUMBER().getText());
+
+ if (month < 1 || month > 12) {
+ throw new StructuredDateFormatException("unexpected month '" + ctx.NUMBER().getText() + "'");
+ }
+
+ stack.push(month);
+ }
+
+ @Override
+ public void exitNthHalf(NthHalfContext ctx) {
+ if (ctx.exception != null) return;
+
+ // Convert LAST to a number (the last half
+ // is the 2nd). If this rule matched the
+ // alternative with nth instead of LAST,
+ // the nth handler will already have pushed
+ // a number on the stack.
+
+ if (ctx.LAST() != null) {
+ stack.push(new Integer(2));
+ }
+
+ // Check for a valid half.
+
+ Integer n = (Integer) stack.peek();
+
+ if (n < 1 || n > 2) {
+ throw new StructuredDateFormatException("unexpected half '" + n + "'");
+ }
+ }
+
+ @Override
+ public void exitNthQuarter(NthQuarterContext ctx) {
+ if (ctx.exception != null) return;
+
+ // Convert LAST to a number (the last quarter
+ // is the 4th). If this rule matched the
+ // alternative with nth instead of LAST,
+ // the nth handler will already have pushed
+ // a number on the stack.
+
+ if (ctx.LAST() != null) {
+ stack.push(new Integer(4));
+ }
+
+ // Check for a valid quarter.
+
+ Integer n = (Integer) stack.peek();
+
+ if (n < 1 || n > 4) {
+ throw new StructuredDateFormatException("unexpected quarter '" + n + "'");
+ }
+ }
+
+ @Override
+ public void exitNth(NthContext ctx) {
+ if (ctx.exception != null) return;
+
+ // Convert the string to a number,
+ // and push on the stack.
+
+ Integer n = null;
+
+ if (ctx.NTHSTR() != null) {
+ n = new Integer(stripEndLetters(ctx.NTHSTR().getText()));
+ }
+ else if (ctx.FIRST() != null) {
+ n = 1;
+ }
+ else if (ctx.SECOND() != null) {
+ n = 2;
+ }
+ else if (ctx.THIRD() != null) {
+ n = 3;
+ }
+ else if (ctx.FOURTH() != null) {
+ n = 4;
+ }
+
+ stack.push(n);
+ }
+
+ @Override
+ public void exitStrMonth(StrMonthContext ctx) {
+ if (ctx.exception != null) return;
+
+ // Convert the month name to a number,
+ // and push on the stack.
+
+ TerminalNode monthNode = ctx.MONTH();
+
+ if (monthNode == null) {
+ monthNode = ctx.SHORTMONTH();
+ }
+
+ String monthStr = monthNode.getText();
+
+ stack.push(DateUtils.getMonthByName(monthStr));
+ }
+
+ @Override
+ public void exitStrSeason(StrSeasonContext ctx) {
+ if (ctx.exception != null) return;
+
+ // Convert the season to a quarter number,
+ // and push on the stack.
+
+ Integer quarter = null;
+
+ if (ctx.WINTER() != null) {
+ quarter = 1;
+ }
+ else if (ctx.SPRING() != null) {
+ quarter = 2;
+ }
+ else if (ctx.SUMMER() != null) {
+ quarter = 3;
+ }
+ else if (ctx.FALL() != null) {
+ quarter = 4;
+ }
+
+ stack.push(quarter);
+ }
+
+ @Override
+ public void exitAllOrPartOf(AllOrPartOfContext ctx) {
+ if (ctx.exception != null) return;
+
+ // If a part was specified, it will have been
+ // pushed on the stack in exitPartOf(). If not,
+ // push null on the stack.
+
+ if (ctx.partOf() == null) {
+ stack.push(null);
+ }
+ }
+
+ @Override
+ public void exitPartOf(PartOfContext ctx) {
+ if (ctx.exception != null) return;
+
+ // Convert the token to a Part,
+ // and push on the stack.
+
+ Part part = null;
+
+ if (ctx.EARLY() != null) {
+ part = Part.EARLY;
+ }
+ else if (ctx.MIDDLE() != null) {
+ part = Part.MIDDLE;
+ }
+ else if (ctx.LATE() != null) {
+ part = Part.LATE;
+ }
+
+ stack.push(part);
+ }
+
+ @Override
+ public void exitEra(EraContext ctx) {
+ if (ctx.exception != null) return;
+
+ // Convert the token to an Era,
+ // and push on the stack.
+
+ Era era = null;
+
+ if (ctx.BC() != null) {
+ era = Era.BCE;
+ }
+ else if (ctx.AD() != null) {
+ era = Era.CE;
+ }
+
+ stack.push(era);
+ }
+
+ @Override
+ public void exitNumDayOfMonth(NumDayOfMonthContext ctx) {
+ if (ctx.exception != null) return;
+
+ // Convert the numeric string to an Integer,
+ // and push on the stack.
+
+ Integer dayOfMonth = new Integer(ctx.NUMBER().getText());
+
+ if (dayOfMonth == 0 || dayOfMonth > 31) {
+ throw new StructuredDateFormatException("unexpected day of month '" + ctx.NUMBER().getText() + "'");
+ }
+
+ stack.push(dayOfMonth);
+ }
+
+ @Override
+ public void exitNum(NumContext ctx) {
+ if (ctx.exception != null) return;
+
+ // Convert the numeric string to an Integer,
+ // and push on the stack.
+
+ Integer num = new Integer(ctx.NUMBER().getText());
+
+ stack.push(num);
+ }
+
+ protected String getErrorMessage(RecognitionException re) {
+ String message = "";
+
+ Parser recognizer = (Parser) re.getRecognizer();
+ TokenStream tokens = recognizer.getInputStream();
+
+ if (re instanceof NoViableAltException) {
+ NoViableAltException e = (NoViableAltException) re;
+ Token startToken = e.getStartToken();
+ String input = (startToken.getType() == Token.EOF ) ? "end of text" : quote(tokens.getText(startToken, e.getOffendingToken()));
+
+ message = "no viable date format found at " + input;
+ }
+ else if (re instanceof InputMismatchException) {
+ InputMismatchException e = (InputMismatchException) re;
+ message = "did not expect " + getTokenDisplayString(e.getOffendingToken()) + " while looking for " +
+ e.getExpectedTokens().toString(recognizer.getTokenNames());
+ }
+ else if (re instanceof FailedPredicateException) {
+ FailedPredicateException e = (FailedPredicateException) re;
+ String ruleName = recognizer.getRuleNames()[recognizer.getContext().getRuleIndex()];
+
+ message = "failed predicate " + ruleName + ": " + e.getMessage();
+ }
+
+ return message;
+ }
+
+ protected String quote(String text) {
+ return "'" + text + "'";
+ }
+
+ protected String getTokenDisplayString(Token token) {
+ String string;
+
+ if (token == null) {
+ string = "[no token]";
+ }
+ else {
+ String text = token.getText();
+
+ if (text == null) {
+ if (token.getType() == Token.EOF ) {
+ string = "end of text";
+ }
+ else {
+ string = "[" + token.getType() + "]";
+ }
+ }
+ else {
+ string = quote(text);
+ }
+ }
+
+ return string;
+ }
+
+ protected String stripEndLetters(String input) {
+ return input.replaceAll("[^\\d]+$", "");
+ }
+
+ public static void main(String[] args) {
+ StructuredDateEvaluator evaluator = new ANTLRStructuredDateEvaluator();
+
+ for (String displayDate : args) {
+ try {
+ evaluator.evaluate(displayDate);
+ } catch (StructuredDateFormatException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+}
\ No newline at end of file
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+import java.lang.reflect.InvocationTargetException;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.beanutils.PropertyUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+import org.yaml.snakeyaml.Yaml;
+
+public class StructuredDateEvaluatorTest {
+ public static final String TEST_CASE_FILE = "/test-dates.yaml";
+ public static final List<String> YAML_DATE_SPEC = Arrays.asList("year", "month", "day", "era", "certainty", "qualifierType", "qualifierValue", "qualifierUnit");
+
+ final Logger logger = LoggerFactory.getLogger(StructuredDateEvaluatorTest.class);
+
+ @BeforeClass
+ public void setUp() {
+
+ };
+
+ @Test
+ public void test() {
+ Yaml yaml = new Yaml();
+ Map<String, Object> testCases = (Map<String, Object>) yaml.load(getClass().getResourceAsStream(TEST_CASE_FILE));
+
+ for (String displayDate : testCases.keySet()) {
+ logger.debug("Testing input: " + displayDate);
+
+ Map<String, Object> expectedStructuredDateFields = (Map<String, Object>) testCases.get(displayDate);
+
+ StructuredDate expectedStructuredDate = createStructuredDateFromYamlSpec(displayDate, expectedStructuredDateFields);
+ StructuredDate actualStructuredDate = null;
+
+ try {
+ actualStructuredDate = StructuredDate.parse(displayDate);
+ }
+ catch(StructuredDateFormatException e) {
+ logger.debug(e.getMessage());
+ }
+
+ Assert.assertEquals(actualStructuredDate, expectedStructuredDate);
+ }
+ }
+
+ private StructuredDate createStructuredDateFromYamlSpec(String displayDate, Map<String, Object> structuredDateFields) {
+ StructuredDate structuredDate = null;
+
+ if (structuredDateFields != null) {
+ structuredDate = new StructuredDate();
+
+ for (String propertyName : structuredDateFields.keySet()) {
+ Object value = structuredDateFields.get(propertyName);
+
+ try {
+ Class propertyType = PropertyUtils.getPropertyType(structuredDate, propertyName);
+
+ if (propertyType.equals(Date.class)) {
+ value = createDateFromYamlSpec((List<Object>) value);
+ }
+
+ PropertyUtils.setProperty(structuredDate, propertyName, value);
+ }
+ catch(NoSuchMethodException e) {
+ logger.warn(propertyName + " is not a property");
+ }
+ catch(InvocationTargetException e) {
+ logger.error(propertyName + " accessor threw an exception");
+ }
+ catch(IllegalAccessException e) {
+ logger.error("could not access property " + propertyName);
+ }
+ }
+
+ if (structuredDate.getDisplayDate() == null) {
+ structuredDate.setDisplayDate(displayDate);
+ }
+ }
+
+ return structuredDate;
+ }
+
+ private Date createDateFromYamlSpec(List<Object> dateFields) {
+ Date date = new Date();
+ Iterator<Object> fieldIterator = dateFields.iterator();
+
+ for (String propertyName : YAML_DATE_SPEC) {
+ Object value = fieldIterator.hasNext() ? fieldIterator.next() : null;
+
+ try {
+ Class propertyType = PropertyUtils.getPropertyType(date, propertyName);
+
+ if (value != null && Enum.class.isAssignableFrom(propertyType)) {
+ value = Enum.valueOf(propertyType, (String) value);
+ }
+
+ PropertyUtils.setProperty(date, propertyName, value);
+ }
+ catch(NoSuchMethodException e) {
+ logger.warn(propertyName + " is not a property");
+ }
+ catch(InvocationTargetException e) {
+ logger.error(propertyName + " accessor threw an exception");
+ }
+ catch(IllegalAccessException e) {
+ logger.error("could not access property " + propertyName);
+ }
+ }
+
+ return date;
+ }
+}
--- /dev/null
+package org.collectionspace.services.structureddate;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+public class StructuredDateTest {
+ final Logger logger = LoggerFactory.getLogger(StructuredDateTest.class);
+
+ @BeforeClass
+ public void setUp() {
+
+ };
+
+ @Test
+ public void testScalarDates() {
+
+ }
+}
--- /dev/null
+log4j.rootLogger=DEBUG, STDOUT, R
+
+log4j.appender.STDOUT=org.apache.log4j.ConsoleAppender
+log4j.appender.STDOUT.layout=org.apache.log4j.PatternLayout
+
+# Pattern to output the caller's file name and line number.
+log4j.appender.stdout.layout.ConversionPattern=%d %-5p [%t] [%c:%L] %m%n
+
+log4j.appender.R=org.apache.log4j.RollingFileAppender
+log4j.appender.R.File=target/test.log
+
+log4j.appender.R.MaxFileSize=100KB
+# Keep one backup file
+log4j.appender.R.MaxBackupIndex=1
+
+log4j.appender.R.layout=org.apache.log4j.PatternLayout
+log4j.appender.R.layout.ConversionPattern=%d %-5p [%t] [%c:%L] %m%n
+
+#packages
+log4j.logger.org.collectionspace=DEBUG
+log4j.logger.org.apache.commons.beanutils=WARN
\ No newline at end of file
--- /dev/null
+# This file contains test cases for structured date evaluation.
+#
+# Specify an input display date, and an expected output structured date. Fields expected to be null
+# in the structured date may be omitted.
+#
+# If the structured date's displayDate field is omitted, it will be expected to be equal to the
+# input display date. This is the usual case.
+#
+# The StructuredDate's earliestSingleDate and latestDate should be specified as a
+# list: [year, month, day, era, certainty, qualifierType, qualifierValue, qualifierUnit]
+# Values at the end of the list may be omitted, in which case they will be expected to be null.
+#
+# If an input is expected to fail, specify null for the expected structured date.
+#
+# Input Display Date Expected Structured Date
+# =======================================================================================================
+#
+# Valid dates
+# -------------------------------------------------------------------------------------------------------
+
+ '1997': # year
+ earliestSingleDate: [1997, 1, 1, CE]
+ latestDate: [1997, 12, 31, CE]
+
+ '53 BC': # year
+ earliestSingleDate: [ 53, 1, 1, BCE]
+ latestDate: [ 53, 12, 31, BCE]
+
+ '53 bc': # year
+ earliestSingleDate: [ 53, 1, 1, BCE]
+ latestDate: [ 53, 12, 31, BCE]
+
+ '53 B.C.': # year
+ earliestSingleDate: [ 53, 1, 1, BCE]
+ latestDate: [ 53, 12, 31, BCE]
+
+ '53 b.c.': # year
+ earliestSingleDate: [ 53, 1, 1, BCE]
+ latestDate: [ 53, 12, 31, BCE]
+
+ '53 BCE': # year
+ earliestSingleDate: [ 53, 1, 1, BCE]
+ latestDate: [ 53, 12, 31, BCE]
+
+ '53 bce': # year
+ earliestSingleDate: [ 53, 1, 1, BCE]
+ latestDate: [ 53, 12, 31, BCE]
+
+ '53 B.C.E.': # year
+ earliestSingleDate: [ 53, 1, 1, BCE]
+ latestDate: [ 53, 12, 31, BCE]
+
+ '53 b.c.e.': # year
+ earliestSingleDate: [ 53, 1, 1, BCE]
+ latestDate: [ 53, 12, 31, BCE]
+
+ '1974 AD': # year
+ earliestSingleDate: [1974, 1, 1, CE]
+ latestDate: [1974, 12, 31, CE]
+
+ '1974 ad': # year
+ earliestSingleDate: [1974, 1, 1, CE]
+ latestDate: [1974, 12, 31, CE]
+
+ '1974 A.D.': # year
+ earliestSingleDate: [1974, 1, 1, CE]
+ latestDate: [1974, 12, 31, CE]
+
+ '1974 a.d.': # year
+ earliestSingleDate: [1974, 1, 1, CE]
+ latestDate: [1974, 12, 31, CE]
+
+ '1974 c.e.': # year
+ earliestSingleDate: [1974, 1, 1, CE]
+ latestDate: [1974, 12, 31, CE]
+
+ '1974 ce': # year
+ earliestSingleDate: [1974, 1, 1, CE]
+ latestDate: [1974, 12, 31, CE]
+
+ "1800's-1900's": # hyphenatedRange, century
+ earliestSingleDate: [1800, 1, 1, CE]
+ latestDate: [1999, 12, 31, CE]
+
+ '19th century-20th century': # hyphenatedRange, century
+ earliestSingleDate: [1801, 1, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ "1700's-20th century": # hyphenatedRange, century
+ earliestSingleDate: [1700, 1, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ "2nd century BC—first century AD": # hyphenatedRange, century (em dash)
+ earliestSingleDate: [ 200, 1, 1, BCE]
+ latestDate: [ 100, 12, 31, CE]
+
+ "fourth century-2nd century BC": # hyphenatedRange, century
+ earliestSingleDate: [ 400, 1, 1, BCE]
+ latestDate: [ 101, 12, 31, BCE]
+
+ "300s-200s BC": # hyphenatedRange, century
+ earliestSingleDate: [ 399, 1, 1, BCE]
+ latestDate: [ 200, 12, 31, BCE]
+
+ "1970's-1980's": # hyphenatedRange, decade
+ earliestSingleDate: [1970, 1, 1, CE]
+ latestDate: [1989, 12, 31, CE]
+
+ "1920s - 1940's": # hyphenatedRange, decade
+ earliestSingleDate: [1920, 1, 1, CE]
+ latestDate: [1949, 12, 31, CE]
+
+ '30s BC - 20s CE': # hyphenatedRange, decade
+ earliestSingleDate: [ 39, 1, 1, BCE]
+ latestDate: [ 29, 12, 31, CE]
+
+ '30s-20s BC': # hyphenatedRange, decade
+ earliestSingleDate: [ 39, 1, 1, BCE]
+ latestDate: [ 20, 12, 31, BCE]
+
+ '30s BC-20s BC': # hyphenatedRange, decade
+ earliestSingleDate: [ 39, 1, 1, BCE]
+ latestDate: [ 20, 12, 31, BCE]
+
+ '1974-2013': # hyphenatedRange, year
+ earliestSingleDate: [1974, 1, 1, CE]
+ latestDate: [2013, 12, 31, CE]
+
+ '1974-2013 AD': # hyphenatedRange, year
+ earliestSingleDate: [1974, 1, 1, CE]
+ latestDate: [2013, 12, 31, CE]
+
+ '100 BC-12 BC': # hyphenatedRange, year
+ earliestSingleDate: [ 100, 1, 1, BCE]
+ latestDate: [ 12, 12, 31, BCE]
+
+ '100-12 BC': # hyphenatedRange, year
+ earliestSingleDate: [ 100, 1, 1, BCE]
+ latestDate: [ 12, 12, 31, BCE]
+
+ '12 BC - 30 AD': # hyphenatedRange, year
+ earliestSingleDate: [ 12, 1, 1, BCE]
+ latestDate: [ 30, 12, 31, CE]
+
+ '12 BC - 30': # hyphenatedRange, year
+ earliestSingleDate: [ 12, 1, 1, BCE]
+ latestDate: [ 30, 12, 31, CE]
+
+ 'Spring 1974-Summer 1975': # hyphenatedRange, quarterYear
+ earliestSingleDate: [1974, 4, 1, CE]
+ latestDate: [1975, 9, 30, CE]
+
+ 'last quarter 1974-Summer 1975': # hyphenatedRange, quarterYear
+ earliestSingleDate: [1974, 10, 1, CE]
+ latestDate: [1975, 9, 30, CE]
+
+ '4th quarter 1974-first quarter 1975': # hyphenatedRange, quarterYear
+ earliestSingleDate: [1974, 10, 1, CE]
+ latestDate: [1975, 3, 31, CE]
+
+ '4th quarter 1974-first quarter 1975 A.D.': # hyphenatedRange, quarterYear
+ earliestSingleDate: [1974, 10, 1, CE]
+ latestDate: [1975, 3, 31, CE]
+
+ 'Spring 12 BC-Summer 10': # hyphenatedRange, quarterYear
+ earliestSingleDate: [ 12, 4, 1, BCE]
+ latestDate: [ 10, 9, 30, CE]
+
+ 'Spring 12-Summer 10 BC': # hyphenatedRange, quarterYear
+ earliestSingleDate: [ 12, 4, 1, BCE]
+ latestDate: [ 10, 9, 30, BCE]
+
+ '1st half 2000 - 1st half 2001': # hyphenatedRange, halfYear
+ earliestSingleDate: [2000, 1, 1, CE]
+ latestDate: [2001, 6, 30, CE]
+
+ 'second half 1998 - last half 2003': # hyphenatedRange, halfYear
+ earliestSingleDate: [1998, 7, 1, CE]
+ latestDate: [2003, 12, 31, CE]
+
+ 'second half 1998 - last half 2003 ad': # hyphenatedRange, halfYear
+ earliestSingleDate: [1998, 7, 1, CE]
+ latestDate: [2003, 12, 31, CE]
+
+ '2nd half 12 - first half 11 BC': # hyphenatedRange, halfYear
+ earliestSingleDate: [ 12, 7, 1, BCE]
+ latestDate: [ 11, 6, 30, BCE]
+
+ '2nd half 12 b.c. - first half 2 AD': # hyphenatedRange, halfYear
+ earliestSingleDate: [ 12, 7, 1, BCE]
+ latestDate: [ 2, 6, 30, CE]
+
+ '1st half 2000': # halfYear
+ earliestSingleDate: [2000, 1, 1, CE]
+ latestDate: [2000, 6, 30, CE]
+
+ '2nd half 2000': # halfYear
+ earliestSingleDate: [2000, 7, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ 'last half 2000': # halfYear
+ earliestSingleDate: [2000, 7, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ 'early 1960s': # partialDecade
+ earliestSingleDate: [1960, 1, 1, CE]
+ latestDate: [1963, 12, 31, CE]
+
+ "mid 1960's": # partialDecade
+ earliestSingleDate: [1964, 1, 1, CE]
+ latestDate: [1966, 12, 31, CE]
+
+ 'late 1960s': # partialDecade
+ earliestSingleDate: [1967, 1, 1, CE]
+ latestDate: [1969, 12, 31, CE]
+
+ 'early 1960s ce': # partialDecade
+ earliestSingleDate: [1960, 1, 1, CE]
+ latestDate: [1963, 12, 31, CE]
+
+ "mid-1960's a.d.": # partialDecade
+ earliestSingleDate: [1964, 1, 1, CE]
+ latestDate: [1966, 12, 31, CE]
+
+ 'late 1960s AD': # partialDecade
+ earliestSingleDate: [1967, 1, 1, CE]
+ latestDate: [1969, 12, 31, CE]
+
+ 'early 530s bc': # partialDecade
+ earliestSingleDate: [ 539, 1, 1, BCE]
+ latestDate: [ 537, 12, 31, BCE]
+
+ "middle 530's b.c.": # partialDecade
+ earliestSingleDate: [ 536, 1, 1, BCE]
+ latestDate: [ 534, 12, 31, BCE]
+
+ 'late 530s B.C.': # partialDecade
+ earliestSingleDate: [ 533, 1, 1, BCE]
+ latestDate: [ 530, 12, 31, BCE]
+
+ "mid 1960's-early 1970's": # hyphenatedRange, partialDecade
+ earliestSingleDate: [1964, 1, 1, CE]
+ latestDate: [1973, 12, 31, CE]
+
+ "mid 1960's-early 1970's a.d.": # hyphenatedRange, partialDecade
+ earliestSingleDate: [1964, 1, 1, CE]
+ latestDate: [1973, 12, 31, CE]
+
+ "mid 520's-early 480's b.c.": # hyphenatedRange, partialDecade
+ earliestSingleDate: [ 526, 1, 1, BCE]
+ latestDate: [ 487, 12, 31, BCE]
+
+ '1960s': # decade
+ earliestSingleDate: [1960, 1, 1, CE]
+ latestDate: [1969, 12, 31, CE]
+
+ "1920's": # decade
+ earliestSingleDate: [1920, 1, 1, CE]
+ latestDate: [1929, 12, 31, CE]
+
+ '1920S': # decade
+ earliestSingleDate: [1920, 1, 1, CE]
+ latestDate: [1929, 12, 31, CE]
+
+ "20's BC": # decade
+ earliestSingleDate: [ 29, 1, 1, BCE]
+ latestDate: [ 20, 12, 31, BCE]
+
+ '1960s-1980s': # hyphenatedRange, decade
+ earliestSingleDate: [1960, 1, 1, CE]
+ latestDate: [1989, 12, 31, CE]
+
+ '1960s-1980s A.D.': # hyphenatedRange, decade
+ earliestSingleDate: [1960, 1, 1, CE]
+ latestDate: [1989, 12, 31, CE]
+
+ '1960s–1920s B.C.': # hyphenatedRange, decade (en dash)
+ earliestSingleDate: [1969, 1, 1, BCE]
+ latestDate: [1920, 12, 31, BCE]
+
+ "1960's-early 1980's": # hyphenatedRange, decade, partialDecade
+ earliestSingleDate: [1960, 1, 1, CE]
+ latestDate: [1983, 12, 31, CE]
+
+ "late 1820's - 1880's": # hyphenatedRange, partialDecade, decade
+ earliestSingleDate: [1827, 1, 1, CE]
+ latestDate: [1889, 12, 31, CE]
+
+ 'early 1900s': # partialCentury
+ earliestSingleDate: [1900, 1, 1, CE]
+ latestDate: [1933, 12, 31, CE]
+
+ 'mid 1900s': # partialCentury
+ earliestSingleDate: [1933, 1, 1, CE]
+ latestDate: [1966, 12, 31, CE]
+
+ "Late 1900's": # partialCentury
+ earliestSingleDate: [1966, 1, 1, CE]
+ latestDate: [1999, 12, 31, CE]
+
+ 'early 1900s ad': # partialCentury
+ earliestSingleDate: [1900, 1, 1, CE]
+ latestDate: [1933, 12, 31, CE]
+
+ 'mid 1900s CE': # partialCentury
+ earliestSingleDate: [1933, 1, 1, CE]
+ latestDate: [1966, 12, 31, CE]
+
+ "Late 1900's A.D.": # partialCentury
+ earliestSingleDate: [1966, 1, 1, CE]
+ latestDate: [1999, 12, 31, CE]
+
+ 'early 18th century': # partialCentury
+ earliestSingleDate: [1701, 1, 1, CE]
+ latestDate: [1734, 12, 31, CE]
+
+ 'mid-18th century': # partialCentury
+ earliestSingleDate: [1734, 1, 1, CE]
+ latestDate: [1767, 12, 31, CE]
+
+ 'Late 18th century': # partialCentury
+ earliestSingleDate: [1767, 1, 1, CE]
+ latestDate: [1800, 12, 31, CE]
+
+ 'early 100s b.c.': # partialCentury
+ earliestSingleDate: [ 199, 1, 1, BCE]
+ latestDate: [ 166, 12, 31, BCE]
+
+ 'middle 100s bc': # partialCentury
+ earliestSingleDate: [ 166, 1, 1, BCE]
+ latestDate: [ 133, 12, 31, BCE]
+
+ "Late 100's B.C.": # partialCentury
+ earliestSingleDate: [ 133, 1, 1, BCE]
+ latestDate: [ 100, 12, 31, BCE]
+
+ 'early 2nd century b.c.': # partialCentury
+ earliestSingleDate: [ 200, 1, 1, BCE]
+ latestDate: [ 167, 12, 31, BCE]
+
+ 'mid 2nd century bc': # partialCentury
+ earliestSingleDate: [ 167, 1, 1, BCE]
+ latestDate: [ 134, 12, 31, BCE]
+
+ 'Late 2nd Century B.C.': # partialCentury
+ earliestSingleDate: [ 134, 1, 1, BCE]
+ latestDate: [ 101, 12, 31, BCE]
+
+ 'mid-17th century - late 18th century': # hyphenatedRange, partialCentury
+ earliestSingleDate: [1634, 1, 1, CE]
+ latestDate: [1800, 12, 31, CE]
+
+ 'mid 17th century - late 18th century a.d.': # hyphenatedRange, partialCentury
+ earliestSingleDate: [1634, 1, 1, CE]
+ latestDate: [1800, 12, 31, CE]
+
+ 'middle 17th century AD - late 18th century a.d.': # hyphenatedRange, partialCentury
+ earliestSingleDate: [1634, 1, 1, CE]
+ latestDate: [1800, 12, 31, CE]
+
+ 'late 1800s - early 1900s': # hyphenatedRange, partialCentury
+ earliestSingleDate: [1866, 1, 1, CE]
+ latestDate: [1933, 12, 31, CE]
+
+ 'late 1800s - early 1900s ce': # hyphenatedRange, partialCentury
+ earliestSingleDate: [1866, 1, 1, CE]
+ latestDate: [1933, 12, 31, CE]
+
+ "mid 200s-late 100's BCE": # hyphenatedRange, partialCentury
+ earliestSingleDate: [ 266, 1, 1, BCE]
+ latestDate: [ 100, 12, 31, BCE]
+
+ 'late 4th century - early 3rd century bc': # hyphenatedRange, partialCentury
+ earliestSingleDate: [ 334, 1, 1, BCE]
+ latestDate: [ 267, 12, 31, BCE]
+
+ 'late 4th century B.C. - early 3rd century BC': # hyphenatedRange, partialCentury
+ earliestSingleDate: [ 334, 1, 1, BCE]
+ latestDate: [ 267, 12, 31, BCE]
+
+ 'late 2nd century B.C. - mid 1st century': # hyphenatedRange, partialCentury
+ earliestSingleDate: [ 134, 1, 1, BCE]
+ latestDate: [ 67, 12, 31, CE]
+
+ 'late 2nd century B.C. - early 3rd century A.D.': # hyphenatedRange, partialCentury
+ earliestSingleDate: [ 134, 1, 1, BCE]
+ latestDate: [ 234, 12, 31, CE]
+
+ '1st quarter 1900s': # quarterCentury
+ earliestSingleDate: [1900, 1, 1, CE]
+ latestDate: [1924, 12, 31, CE]
+
+ 'second quarter 1900s': # quarterCentury
+ earliestSingleDate: [1925, 1, 1, CE]
+ latestDate: [1949, 12, 31, CE]
+
+ "3rd quarter 1900's": # quarterCentury
+ earliestSingleDate: [1950, 1, 1, CE]
+ latestDate: [1974, 12, 31, CE]
+
+ "fourth quarter 1900's": # quarterCentury
+ earliestSingleDate: [1975, 1, 1, CE]
+ latestDate: [1999, 12, 31, CE]
+
+ 'first quarter 18th century': # quarterCentury
+ earliestSingleDate: [1701, 1, 1, CE]
+ latestDate: [1725, 12, 31, CE]
+
+ '2nd quarter 18th century': # quarterCentury
+ earliestSingleDate: [1726, 1, 1, CE]
+ latestDate: [1750, 12, 31, CE]
+
+ 'third quarter 18th century': # quarterCentury
+ earliestSingleDate: [1751, 1, 1, CE]
+ latestDate: [1775, 12, 31, CE]
+
+ '4th quarter 18th century': # quarterCentury
+ earliestSingleDate: [1776, 1, 1, CE]
+ latestDate: [1800, 12, 31, CE]
+
+ 'first quarter 1st century a.d.': # quarterCentury
+ earliestSingleDate: [ 1, 1, 1, CE]
+ latestDate: [ 25, 12, 31, CE]
+
+ '4th quarter 22nd century CE': # quarterCentury
+ earliestSingleDate: [2176, 1, 1, CE]
+ latestDate: [2200, 12, 31, CE]
+
+ 'First quarter 300s BC': # quarterCentury
+ earliestSingleDate: [ 399, 1, 1, BCE]
+ latestDate: [ 375, 12, 31, BCE]
+
+ "2nd quarter 300's BC": # quarterCentury
+ earliestSingleDate: [ 374, 1, 1, BCE]
+ latestDate: [ 350, 12, 31, BCE]
+
+ 'third quarter 300s BC': # quarterCentury
+ earliestSingleDate: [ 349, 1, 1, BCE]
+ latestDate: [ 325, 12, 31, BCE]
+
+ '4th quarter 300s B.C.': # quarterCentury
+ earliestSingleDate: [ 324, 1, 1, BCE]
+ latestDate: [ 300, 12, 31, BCE]
+
+ 'First quarter 1st century BC': # quarterCentury
+ earliestSingleDate: [ 100, 1, 1, BCE]
+ latestDate: [ 76, 12, 31, BCE]
+
+ '2nd quarter 1st century BC': # quarterCentury
+ earliestSingleDate: [ 75, 1, 1, BCE]
+ latestDate: [ 51, 12, 31, BCE]
+
+ 'third quarter 1st century BC': # quarterCentury
+ earliestSingleDate: [ 50, 1, 1, BCE]
+ latestDate: [ 26, 12, 31, BCE]
+
+ '4th quarter 1st century BC': # quarterCentury
+ earliestSingleDate: [ 25, 1, 1, BCE]
+ latestDate: [ 1, 12, 31, BCE]
+
+ '1st quarter 2nd century B.C.': # quarterCentury
+ earliestSingleDate: [ 200, 1, 1, BCE]
+ latestDate: [ 176, 12, 31, BCE]
+
+ 'second quarter 2nd century BCE': # quarterCentury
+ earliestSingleDate: [ 175, 1, 1, BCE]
+ latestDate: [ 151, 12, 31, BCE]
+
+ '3rd quarter 2nd century b.c.': # quarterCentury
+ earliestSingleDate: [ 150, 1, 1, BCE]
+ latestDate: [ 126, 12, 31, BCE]
+
+ 'fourth quarter 2nd century b.c.e.': # quarterCentury
+ earliestSingleDate: [ 125, 1, 1, BCE]
+ latestDate: [ 101, 12, 31, BCE]
+
+ 'last quarter 2nd century b.c.e.': # quarterCentury
+ earliestSingleDate: [ 125, 1, 1, BCE]
+ latestDate: [ 101, 12, 31, BCE]
+
+ '1st quarter 18th century - 3rd quarter 19th century': # hyphenatedRange, quarterCentury
+ earliestSingleDate: [1701, 1, 1, CE]
+ latestDate: [1875, 12, 31, CE]
+
+ '1st quarter 18th century - 3rd quarter 19th century a.d.': # hyphenatedRange, quarterCentury
+ earliestSingleDate: [1701, 1, 1, CE]
+ latestDate: [1875, 12, 31, CE]
+
+ '4th quarter 3rd century - 3rd quarter 2nd century b.c.': # hyphenatedRange, quarterCentury
+ earliestSingleDate: [ 225, 1, 1, BCE]
+ latestDate: [ 126, 12, 31, BCE]
+
+ '4th quarter 1st century b.c.-1st quarter 1st century ce': # hyphenatedRange, quarterCentury
+ earliestSingleDate: [ 25, 1, 1, BCE]
+ latestDate: [ 25, 12, 31, CE]
+
+ '1st half 1900s': # halfCentury
+ earliestSingleDate: [1900, 1, 1, CE]
+ latestDate: [1949, 12, 31, CE]
+
+ 'second half 1900s': # halfCentury
+ earliestSingleDate: [1950, 1, 1, CE]
+ latestDate: [1999, 12, 31, CE]
+
+ 'last half 1900s': # halfCentury
+ earliestSingleDate: [1950, 1, 1, CE]
+ latestDate: [1999, 12, 31, CE]
+
+ 'first half 20th century': # halfCentury
+ earliestSingleDate: [1901, 1, 1, CE]
+ latestDate: [1950, 12, 31, CE]
+
+ '2nd half 20th century': # halfCentury
+ earliestSingleDate: [1951, 1, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ '1st half 1900s A.D.': # halfCentury
+ earliestSingleDate: [1900, 1, 1, CE]
+ latestDate: [1949, 12, 31, CE]
+
+ "second half 1900's c.e.": # halfCentury
+ earliestSingleDate: [1950, 1, 1, CE]
+ latestDate: [1999, 12, 31, CE]
+
+ '1st half 1900s B.C.': # halfCentury
+ earliestSingleDate: [1999, 1, 1, BCE]
+ latestDate: [1950, 12, 31, BCE]
+
+ "second half 1900's b.c.e.": # halfCentury
+ earliestSingleDate: [1949, 1, 1, BCE]
+ latestDate: [1900, 12, 31, BCE]
+
+ 'first half first century b.c.': # halfCentury
+ earliestSingleDate: [ 100, 1, 1, BCE]
+ latestDate: [ 51, 12, 31, BCE]
+
+ "second half 1st century BCE": # halfCentury
+ earliestSingleDate: [ 50, 1, 1, BCE]
+ latestDate: [ 1, 12, 31, BCE]
+
+ '1st half 1900s - 1st half 2000s': # hyphenatedRange, halfCentury
+ earliestSingleDate: [1900, 1, 1, CE]
+ latestDate: [2049, 12, 31, CE]
+
+ '1st half 1900s - 1st half 2000s ad': # hyphenatedRange, halfCentury
+ earliestSingleDate: [1900, 1, 1, CE]
+ latestDate: [2049, 12, 31, CE]
+
+ '2nd half 900s - 1st half 700s bc': # hyphenatedRange, halfCentury
+ earliestSingleDate: [ 949, 1, 1, BCE]
+ latestDate: [ 750, 12, 31, BCE]
+
+ '2nd half 200s bc - 1st half 100s': # hyphenatedRange, halfCentury
+ earliestSingleDate: [ 249, 1, 1, BCE]
+ latestDate: [ 149, 12, 31, CE]
+
+ '1900s': # century
+ earliestSingleDate: [1900, 1, 1, CE]
+ latestDate: [1999, 12, 31, CE]
+
+ '20th century': # century
+ earliestSingleDate: [1901, 1, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ '20th century AD': # century
+ earliestSingleDate: [1901, 1, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ "1600's": # century
+ earliestSingleDate: [1600, 1, 1, CE]
+ latestDate: [1699, 12, 31, CE]
+
+ '21st century': # century
+ earliestSingleDate: [2001, 1, 1, CE]
+ latestDate: [2100, 12, 31, CE]
+
+ '1st century BC': # century
+ earliestSingleDate: [ 100, 1, 1, BCE]
+ latestDate: [ 1, 12, 31, BCE]
+
+ '8th century BC': # century
+ earliestSingleDate: [ 800, 1, 1, BCE]
+ latestDate: [ 701, 12, 31, BCE]
+
+ "100's BC": # century
+ earliestSingleDate: [ 199, 1, 1, BCE]
+ latestDate: [ 100, 12, 31, BCE]
+
+ '19th century - 20th century': # hyphenatedRange, century
+ earliestSingleDate: [1801, 1, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ '19th century - 20th century ad': # hyphenatedRange, century
+ earliestSingleDate: [1801, 1, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ '3rd century - 2nd century B.C.': # hyphenatedRange, century
+ earliestSingleDate: [ 300, 1, 1, BCE]
+ latestDate: [ 101, 12, 31, BCE]
+
+ '3rd century BC - 1st century A.D.': # hyphenatedRange, century
+ earliestSingleDate: [ 300, 1, 1, BCE]
+ latestDate: [ 100, 12, 31, CE]
+
+ 'late 1880s - early 1900s': # hyphenatedRange, partialDecade, partialCentury
+ earliestSingleDate: [1887, 1, 1, CE]
+ latestDate: [1933, 12, 31, CE]
+
+ '1st millennium': # millennium
+ earliestSingleDate: [ 1, 1, 1, CE]
+ latestDate: [1000, 12, 31, CE]
+
+ '2nd millennium': # millennium
+ earliestSingleDate: [1001, 1, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ '3rd millennium': # millennium
+ earliestSingleDate: [2001, 1, 1, CE]
+ latestDate: [3000, 12, 31, CE]
+
+ '5th millennium': # millennium
+ earliestSingleDate: [4001, 1, 1, CE]
+ latestDate: [5000, 12, 31, CE]
+
+ '1st Millennium BC': # millennium
+ earliestSingleDate: [1000, 1, 1, BCE]
+ latestDate: [ 1, 12, 31, BCE]
+
+ '2nd millennium B.C.E.': # millennium
+ earliestSingleDate: [2000, 1, 1, BCE]
+ latestDate: [1001, 12, 31, BCE]
+
+ '1st millennium-2nd millennium': # hyphenatedRange, millennium
+ earliestSingleDate: [ 1, 1, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ '1st millennium-2nd millennium ad': # hyphenatedRange, millennium
+ earliestSingleDate: [ 1, 1, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ '3rd millennium-2nd millennium bc': # hyphenatedRange, millennium
+ earliestSingleDate: [3000, 1, 1, BCE]
+ latestDate: [1001, 12, 31, BCE]
+
+ 'Winter 2000': # quarterYear, seasonYear
+ earliestSingleDate: [2000, 1, 1, CE]
+ latestDate: [2000, 3, 31, CE]
+
+ '2000, Winter': # quarterYear, seasonYear
+ earliestSingleDate: [2000, 1, 1, CE]
+ latestDate: [2000, 3, 31, CE]
+
+ 'Spring 2000': # quarterYear, seasonYear
+ earliestSingleDate: [2000, 4, 1, CE]
+ latestDate: [2000, 6, 30, CE]
+
+ 'Spr 2000': # quarterYear, seasonYear
+ earliestSingleDate: [2000, 4, 1, CE]
+ latestDate: [2000, 6, 30, CE]
+
+ 'Summer 2000': # quarterYear, seasonYear
+ earliestSingleDate: [2000, 7, 1, CE]
+ latestDate: [2000, 9, 30, CE]
+
+ 'Sum 2000': # quarterYear, seasonYear
+ earliestSingleDate: [2000, 7, 1, CE]
+ latestDate: [2000, 9, 30, CE]
+
+ 'Fall 2000': # quarterYear, seasonYear
+ earliestSingleDate: [2000, 10, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ 'Fal 2000': # quarterYear, seasonYear
+ earliestSingleDate: [2000, 10, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ 'Autumn 2000': # quarterYear, seasonYear
+ earliestSingleDate: [2000, 10, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ 'Aut 2000': # quarterYear, seasonYear
+ earliestSingleDate: [2000, 10, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ 'Sum 12 BC': # quarterYear, seasonYear
+ earliestSingleDate: [ 12, 7, 1, BCE]
+ latestDate: [ 12, 9, 30, BCE]
+
+ 'BC 12, Summer': # quarterYear, seasonYear
+ earliestSingleDate: [ 12, 7, 1, BCE]
+ latestDate: [ 12, 9, 30, BCE]
+
+ 'Winter 2000/2001': # yearSpanningWinter
+ earliestSingleDate: [2000, 12, 1, CE]
+ latestDate: [2001, 3, 31, CE]
+
+ 'Winter, 2000/2001': # yearSpanningWinter
+ earliestSingleDate: [2000, 12, 1, CE]
+ latestDate: [2001, 3, 31, CE]
+
+ 'Winter 10/9 BC': # yearSpanningWinter
+ earliestSingleDate: [ 10, 12, 1, BCE]
+ latestDate: [ 9, 3, 31, BCE]
+
+ 'Winter 2000/2001 - Winter 2003/2004': # hyphenatedRange, yearSpanningWinter
+ earliestSingleDate: [2000, 12, 1, CE]
+ latestDate: [2004, 3, 31, CE]
+
+ 'Winter 2000/2001 - Winter 2003/2004 CE': # hyphenatedRange, yearSpanningWinter
+ earliestSingleDate: [2000, 12, 1, CE]
+ latestDate: [2004, 3, 31, CE]
+
+ 'Winter 10/9 BC - Winter 9/8 BC': # hyphenatedRange, yearSpanningWinter
+ earliestSingleDate: [ 10, 12, 1, BCE]
+ latestDate: [ 8, 3, 31, BCE]
+
+ 'Winter 10/9 - Winter 9/8 BC': # hyphenatedRange, yearSpanningWinter
+ earliestSingleDate: [ 10, 12, 1, BCE]
+ latestDate: [ 8, 3, 31, BCE]
+
+ '1st Quarter 2000': # quarterYear
+ earliestSingleDate: [2000, 1, 1, CE]
+ latestDate: [2000, 3, 31, CE]
+
+ '2nd quarter 2000': # quarterYear
+ earliestSingleDate: [2000, 4, 1, CE]
+ latestDate: [2000, 6, 30, CE]
+
+ '3rd quarter 2000': # quarterYear
+ earliestSingleDate: [2000, 7, 1, CE]
+ latestDate: [2000, 9, 30, CE]
+
+ '4th quarter 2000': # quarterYear
+ earliestSingleDate: [2000, 10, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ 'last quarter 2000': # quarterYear
+ earliestSingleDate: [2000, 10, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ '1st quarter - 3rd quarter 2000': # hyphenatedRange, quarterYear
+ earliestSingleDate: [2000, 1, 1, CE]
+ latestDate: [2000, 9, 30, CE]
+
+ '2nd quarter - last quarter 2000': # hyphenatedRange, quarterYear
+ earliestSingleDate: [2000, 4, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ 'Spring - Summer 2000': # hyphenatedRange, quarterYear, seasonYear
+ earliestSingleDate: [2000, 4, 1, CE]
+ latestDate: [2000, 9, 30, CE]
+
+ 'Spring-Summer 12 BC': # hyphenatedRange, quarterYear, seasonYear
+ earliestSingleDate: [ 12, 4, 1, BCE]
+ latestDate: [ 12, 9, 30, BCE]
+
+ 'early 2000': # partialYear
+ earliestSingleDate: [2000, 1, 1, CE]
+ latestDate: [2000, 4, 30, CE]
+
+ 'mid-2000': # partialYear
+ earliestSingleDate: [2000, 5, 1, CE]
+ latestDate: [2000, 8, 31, CE]
+
+ 'Middle 2000': # partialYear
+ earliestSingleDate: [2000, 5, 1, CE]
+ latestDate: [2000, 8, 31, CE]
+
+ 'mid 2000': # partialYear
+ earliestSingleDate: [2000, 5, 1, CE]
+ latestDate: [2000, 8, 31, CE]
+
+ 'Late 2000': # partialYear
+ earliestSingleDate: [2000, 9, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ 'Late 1200 BC': # partialYear
+ earliestSingleDate: [1200, 9, 1, BCE]
+ latestDate: [1200, 12, 31, BCE]
+
+ 'early 2000 - mid-2001': # hyphenatedRange, partialYear
+ earliestSingleDate: [2000, 1, 1, CE]
+ latestDate: [2001, 8, 31, CE]
+
+ 'early 2000 - mid-2001 ad': # hyphenatedRange, partialYear
+ earliestSingleDate: [2000, 1, 1, CE]
+ latestDate: [2001, 8, 31, CE]
+
+ 'late 157 - early 155 bc': # hyphenatedRange, partialYear
+ earliestSingleDate: [ 157, 9, 1, BCE]
+ latestDate: [ 155, 4, 30, BCE]
+
+ 'late 2 b.c. - early 2 a.d.': # hyphenatedRange, partialYear
+ earliestSingleDate: [ 2, 9, 1, BCE]
+ latestDate: [ 2, 4, 30, CE]
+
+ 'May 2007': # month
+ earliestSingleDate: [2007, 5, 1, CE]
+ latestDate: [2007, 5, 31, CE]
+
+ 'May 2007 AD': # month
+ earliestSingleDate: [2007, 5, 1, CE]
+ latestDate: [2007, 5, 31, CE]
+
+ 'Feb. 2007': # month
+ earliestSingleDate: [2007, 2, 1, CE]
+ latestDate: [2007, 2, 28, CE]
+
+ 'Feb. 2007 BC': # month
+ earliestSingleDate: [2007, 2, 1, BCE]
+ latestDate: [2007, 2, 28, BCE]
+
+ '2000, February': # month (leap year)
+ earliestSingleDate: [2000, 2, 1, CE]
+ latestDate: [2000, 2, 29, CE]
+
+ '1900 Feb': # month
+ earliestSingleDate: [1900, 2, 1, CE]
+ latestDate: [1900, 2, 28, CE]
+
+ 'B.C.E. 960 Feb': # month
+ earliestSingleDate: [ 960, 2, 1, BCE]
+ latestDate: [ 960, 2, 28, BCE]
+
+ '2000 Feb-2001 June': # hyphenatedRange, month
+ earliestSingleDate: [2000, 2, 1, CE]
+ latestDate: [2001, 6, 30, CE]
+
+ '2000 Feb - AD 2001 June': # hyphenatedRange, month
+ earliestSingleDate: [2000, 2, 1, CE]
+ latestDate: [2001, 6, 30, CE]
+
+ 'Dec 58 BC-Feb 3 AD': # hyphenatedRange, month
+ earliestSingleDate: [ 58, 12, 1, BCE]
+ latestDate: [ 3, 2, 28, CE]
+
+ 'Dec 58 BC-Feb 3': # hyphenatedRange, month
+ earliestSingleDate: [ 58, 12, 1, BCE]
+ latestDate: [ 3, 2, 28, CE]
+
+ 'May 130-May 120 BC': # hyphenatedRange, month
+ earliestSingleDate: [ 130, 5, 1, BCE]
+ latestDate: [ 120, 5, 31, BCE]
+
+ '3/12/2013': # date
+ earliestSingleDate: [2013, 3, 12, CE]
+
+ '03/05/13': # date
+ earliestSingleDate: [ 3, 5, 13, CE]
+
+ '7-6-1773': # date
+ earliestSingleDate: [1773, 7, 6, CE]
+
+ '7-6-1773 AD': # date
+ earliestSingleDate: [1773, 7, 6, CE]
+
+ '07-06-1773': # date
+ earliestSingleDate: [1773, 7, 6, CE]
+
+ '1773-07-06': # date
+ earliestSingleDate: [1773, 7, 6, CE]
+
+ '1773/07/06': # date
+ earliestSingleDate: [1773, 7, 6, CE]
+
+ '13-12-3': # date
+ earliestSingleDate: [ 13, 12, 3, CE]
+
+ '12-12-3': # date
+ earliestSingleDate: [ 12, 12, 3, CE]
+
+ '3-12-12': # date
+ earliestSingleDate: [ 3, 12, 12, CE]
+
+ '3-12-32': # date
+ earliestSingleDate: [ 32, 3, 12, CE]
+
+ 'May 06 1952': # date
+ earliestSingleDate: [1952, 5, 6, CE]
+
+ 'May 12 1952': # date
+ earliestSingleDate: [1952, 5, 12, CE]
+
+ 'May 12, 1952': # date
+ earliestSingleDate: [1952, 5, 12, CE]
+
+ 'may 12, 1952': # date
+ earliestSingleDate: [1952, 5, 12, CE]
+
+ 'September 19, 1821': # date
+ earliestSingleDate: [1821, 9, 19, CE]
+
+ 'Sep 19, 1821': # date
+ earliestSingleDate: [1821, 9, 19, CE]
+
+ 'SEPT 19, 1821': # date
+ earliestSingleDate: [1821, 9, 19, CE]
+
+ 'Sept. 19, 1821': # date
+ earliestSingleDate: [1821, 9, 19, CE]
+
+ 'July 1, 560 BC': # date
+ earliestSingleDate: [ 560, 7, 1, BCE]
+
+ 'July 4th, 1776': # date
+ earliestSingleDate: [1776, 7, 4, CE]
+
+ 'September 11th, 2001': # date
+ earliestSingleDate: [2001, 9, 11, CE]
+
+ '2000, April 1': # date
+ earliestSingleDate: [2000, 4, 1, CE]
+
+ '2000 , april 1': # date
+ earliestSingleDate: [2000, 4, 1, CE]
+
+ '2000 Apr 1': # date
+ earliestSingleDate: [2000, 4, 1, CE]
+
+ 'BC 560 July 1': # date
+ earliestSingleDate: [ 560, 7, 1, BCE]
+
+ '5/3/1962-4/5/2013': # hyphenatedRange, date
+ earliestSingleDate: [1962, 5, 3, CE]
+ latestDate: [2013, 4, 5, CE]
+
+ '5/3/1962-4/5/2013 BC': # hyphenatedRange, date
+ earliestSingleDate: [1962, 5, 3, BCE]
+ latestDate: [2013, 4, 5, BCE]
+
+ '5/3/1962 BC-4/5/2013': # hyphenatedRange, date
+ earliestSingleDate: [1962, 5, 3, BCE]
+ latestDate: [2013, 4, 5, CE]
+
+ '19th-20th century': # hyphenatedRange, nthCenturyRange
+ earliestSingleDate: [1801, 1, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ '19th-20th century a.d.': # hyphenatedRange, nthCenturyRange
+ earliestSingleDate: [1801, 1, 1, CE]
+ latestDate: [2000, 12, 31, CE]
+
+ '5th - 1st century bc': # hyphenatedRange, nthCenturyRange
+ earliestSingleDate: [ 500, 1, 1, BCE]
+ latestDate: [ 1, 12, 31, BCE]
+
+ 'late 19th - mid-20th century': # hyphenatedRange, nthCenturyRange (partial)
+ earliestSingleDate: [1867, 1, 1, CE]
+ latestDate: [1967, 12, 31, CE]
+
+ 'late 18th - 19th century': # hyphenatedRange, nthCenturyRange (partial)
+ earliestSingleDate: [1767, 1, 1, CE]
+ latestDate: [1900, 12, 31, CE]
+
+ '18th - early 19th century': # hyphenatedRange, nthCenturyRange (partial)
+ earliestSingleDate: [1701, 1, 1, CE]
+ latestDate: [1834, 12, 31, CE]
+
+ 'late 10th - early 8th century bc': # hyphenatedRange, nthCenturyRange (partial)
+ earliestSingleDate: [ 934, 1, 1, BCE]
+ latestDate: [ 767, 12, 31, BCE]
+
+ '10th - early 8th century bc': # hyphenatedRange, nthCenturyRange (partial)
+ earliestSingleDate: [1000, 1, 1, BCE]
+ latestDate: [ 767, 12, 31, BCE]
+
+ 'late 10th - 9th century bc': # hyphenatedRange, nthCenturyRange (partial)
+ earliestSingleDate: [ 934, 1, 1, BCE]
+ latestDate: [ 801, 12, 31, BCE]
+
+ 'Apr-Oct 2013': # hyphenatedRange, monthInYearRange (31 day end month)
+ earliestSingleDate: [2013, 4, 1, CE]
+ latestDate: [2013, 10, 31, CE]
+
+ 'Apr-Sept 2013': # hyphenatedRange, monthInYearRange (30 day end month)
+ earliestSingleDate: [2013, 4, 1, CE]
+ latestDate: [2013, 9, 30, CE]
+
+ 'Jan-Feb 2003': # hyphenatedRange, monthInYearRange (28 day end month)
+ earliestSingleDate: [2003, 1, 1, CE]
+ latestDate: [2003, 2, 28, CE]
+
+ 'January - Feb 2004': # hyphenatedRange, monthInYearRange (leap year: divisible by 4)
+ earliestSingleDate: [2004, 1, 1, CE]
+ latestDate: [2004, 2, 29, CE]
+
+ 'Jan-Feb, 1900': # hyphenatedRange, monthInYearRange (not a leap year: divisible by 100)
+ earliestSingleDate: [1900, 1, 1, CE]
+ latestDate: [1900, 2, 28, CE]
+
+ 'january-feb, 2000': # hyphenatedRange, monthInYearRange (leap year: divisible by 400)
+ earliestSingleDate: [2000, 1, 1, CE]
+ latestDate: [2000, 2, 29, CE]
+
+ 'jan-february, 1100': # hyphenatedRange, monthInYearRange (leap year: divisible by 100, but before Gregorian calendar)
+ earliestSingleDate: [1100, 1, 1, CE]
+ latestDate: [1100, 2, 29, CE]
+
+ 'Jan-Mar 100 B.C.': # hyphenatedRange, monthInYearRange
+ earliestSingleDate: [ 100, 1, 1, BCE]
+ latestDate: [ 100, 3, 31, BCE]
+
+ 'December 3-9, 2004': # hyphenatedRange, strDayInMonthRange
+ earliestSingleDate: [2004, 12, 3, CE]
+ latestDate: [2004, 12, 9, CE]
+
+ 'Jan 5 - 29 1850': # hyphenatedRange, strDayInMonthRange
+ earliestSingleDate: [1850, 1, 5, CE]
+ latestDate: [1850, 1, 29, CE]
+
+ 'Jan 5 - 29 100 BCE': # hyphenatedRange, strDayInMonthRange
+ earliestSingleDate: [ 100, 1, 5, BCE]
+ latestDate: [ 100, 1, 29, BCE]
+
+ '1/5-29/1850': # hyphenatedRange, numDayInMonthRange
+ earliestSingleDate: [1850, 1, 5, CE]
+ latestDate: [1850, 1, 29, CE]
+
+ '09/05 - 18/1776': # hyphenatedRange, numDayInMonthRange
+ earliestSingleDate: [1776, 9, 5, CE]
+ latestDate: [1776, 9, 18, CE]
+
+ '09/05 - 18/300 BC': # hyphenatedRange, numDayInMonthRange
+ earliestSingleDate: [ 300, 9, 5, BCE]
+ latestDate: [ 300, 9, 18, BCE]
+
+ # 'circa 2013': # uncertainDate, year
+ # earliestSingleDate: [2013, 1, 1, CE, null, MINUS, 4, YEARS]
+ # latestDate: [2013, 12, 31, CE, null, PLUS, 4, YEARS]
+
+ 'circa 2013': # uncertainDate, year
+ earliestSingleDate: [2009, 1, 1, CE]
+ latestDate: [2017, 12, 31, CE]
+
+ # 'ca 2013': # uncertainDate, year - using qualifier/value/unit fields
+ # earliestSingleDate: [2013, 1, 1, CE, null, MINUS, 4, YEARS]
+ # latestDate: [2013, 12, 31, CE, null, PLUS, 4, YEARS]
+
+ 'ca 2013': # uncertainDate, year
+ earliestSingleDate: [2009, 1, 1, CE]
+ latestDate: [2017, 12, 31, CE]
+
+ # 'ca. 2013': # uncertainDate, year - using qualifier/value/unit fields
+ # earliestSingleDate: [2013, 1, 1, CE, null, MINUS, 4, YEARS]
+ # latestDate: [2013, 12, 31, CE, null, PLUS, 4, YEARS]
+
+ 'ca. 2013': # uncertainDate, year - calculating the uncertainty into the year field
+ earliestSingleDate: [2009, 1, 1, CE]
+ latestDate: [2017, 12, 31, CE]
+
+ # 'c. 1950': # uncertainDate, year - using qualifier/value/unit fields
+ # earliestSingleDate: [1950, 1, 1, CE, null, MINUS, 8, YEARS]
+ # latestDate: [1950, 12, 31, CE, null, PLUS, 8, YEARS]
+
+ 'c. 1950': # uncertainDate, year - calculating the uncertainty into the year field
+ earliestSingleDate: [1942, 1, 1, CE]
+ latestDate: [1958, 12, 31, CE]
+
+ # 'c1950': # uncertainDate, year - using qualifier/value/unit fields
+ # earliestSingleDate: [1950, 1, 1, CE, null, MINUS, 8, YEARS]
+ # latestDate: [1950, 12, 31, CE, null, PLUS, 8, YEARS]
+
+ 'c1950': # uncertainDate, year - calculating the uncertainty into the year field
+ earliestSingleDate: [1942, 1, 1, CE]
+ latestDate: [1958, 12, 31, CE]
+
+ # 'Circa 10 BC': # uncertainDate, year - using qualifier/value/unit fields
+ # earliestSingleDate: [ 10, 1, 1, BCE, null, MINUS, 106, YEARS]
+ # latestDate: [ 10, 12, 31, BCE, null, PLUS, 106, YEARS]
+
+ 'Circa 10 BC': # uncertainDate, year - calculating the uncertainty into the year field
+ earliestSingleDate: [ 115, 1, 1, BCE]
+ latestDate: [ 96, 12, 31, CE]
+
+ # 'Circa 10': # uncertainDate, year - using qualifier/value/unit fields
+ # earliestSingleDate: [ 10, 1, 1, CE, null, MINUS, 105, YEARS]
+ # latestDate: [ 10, 12, 31, CE, null, PLUS, 105, YEARS]
+
+ 'Circa 10': # uncertainDate, year - calculating the uncertainty into the year field
+ earliestSingleDate: [ 96, 1, 1, BCE]
+ latestDate: [ 115, 12, 31, CE]
+
+ # 'circa 2100': # uncertainDate, year - using qualifier/value/unit fields
+ # earliestSingleDate: [2100, 1, 1, CE, null, MINUS, 0, YEARS]
+ # latestDate: [2100, 12, 31, CE, null, PLUS, 0, YEARS]
+
+ 'circa 2100': # uncertainDate, year - calculating the uncertainty into the year field
+ earliestSingleDate: [2100, 1, 1, CE]
+ latestDate: [2100, 12, 31, CE]
+
+ # 'c 1950-2013': # uncertainDate, hyphenatedRange, year - using qualifier/value/unit fields
+ # earliestSingleDate: [1950, 1, 1, CE, null, MINUS, 8, YEARS]
+ # latestDate: [2013, 12, 31, CE, null, PLUS, 4, YEARS]
+
+ 'c 1950-2013': # uncertainDate, hyphenatedRange, year - calculating the uncertainty into the year field
+ earliestSingleDate: [1942, 1, 1, CE]
+ latestDate: [2017, 12, 31, CE]
+
+ "ca. 1730's": # uncertainDate, decade - calculating the uncertainty into the year field
+ earliestSingleDate: [1711, 1, 1, CE]
+ latestDate: [1757, 12, 31, CE]
+
+ "3/4/2000.": # oneDisplayDate - with period
+ earliestSingleDate: [2000, 3, 4, CE]
+
+ "3/4/2000?": # oneDisplayDate - with question mark
+ earliestSingleDate: [2000, 3, 4, CE]
+
+# -------------------------------------------------------------------------------------------------------
+# Invalid dates
+# -------------------------------------------------------------------------------------------------------
+
+ '': # empty
+ null
+
+ 'Octo 19, 1821': # invalid month
+ null
+
+ 'October. 19, 1821': # long month with a dot
+ null
+
+ 'May-Foo 2000': # invalid month
+ null
+
+ 'Something else': # totally not a date
+ null
+
+ 'BC 12, Sumer': # invalid season
+ null
+
+ '5th quarter 1900': # invalid quarter number
+ null
+
+ '0th quarter 1900': # invalid quarter number
+ null
+
+ 'third half 1900': # invalid half number
+ null
+
+ '0th half 1900': # invalid half number
+ null
+
+ '00s': # invalid century
+ null
+
+ '3/4/2005 (something)': # parentheses
+ null
+
+ '3/4?/2005': # question mark
+ null
+
\ No newline at end of file
--- /dev/null
+package org.collectionspace.services.taxonomy.nuxeo;
+
+import org.collectionspace.services.client.CollectionSpaceClient;
+import org.collectionspace.services.client.TaxonomyAuthorityClient;
+
+public class TaxonBotGardenConstants {
+ public static final String NATURALHISTORY_SCHEMA_NAME = TaxonomyAuthorityClient.SERVICE_ITEM_NAME + CollectionSpaceClient.PART_LABEL_SEPARATOR + CollectionSpaceClient.NATURALHISTORY_EXTENSION_NAME;
+
+ public final static String CONSERVATION_CATEGORY_SCHEMA_NAME = NATURALHISTORY_SCHEMA_NAME;
+ public final static String CONSERVATION_CATEGORY_FIELD_NAME = "plantAttributesGroupList/plantAttributesGroup/conservationCategory";
+
+ public final static String ACCESS_CODE_SCHEMA_NAME = NATURALHISTORY_SCHEMA_NAME;
+ public final static String ACCESS_CODE_FIELD_NAME = "accessRestrictions";
+ public final static String ACCESS_CODE_DEAD_VALUE = "Dead";
+ public final static String ACCESS_CODE_UNRESTRICTED_VALUE = "Unrestricted";
+
+ public final static String COMMON_VOCABULARY_SHORTID = "common";
+}
*/
package org.collectionspace.services.taxonomy.nuxeo;
+import org.collectionspace.services.client.CollectionSpaceClient;
+import org.collectionspace.services.client.TaxonomyAuthorityClient;
+
/**
* TaxonConstants provides constants for Taxonomy documents
*
public final static String NUXEO_DOCTYPE = "Taxon";
public final static String NUXEO_SCHEMA_NAME = "taxon";
public final static String NUXEO_DC_TITLE = "CollectionSpace-Taxon";
+
+ public final static String CORE_SCHEMA_NAME = CollectionSpaceClient.COLLECTIONSPACE_CORE_SCHEMA;
+ public final static String COMMON_SCHEMA_NAME = TaxonomyAuthorityClient.SERVICE_ITEM_COMMON_PART_NAME; //"taxon_common";
+
+ public final static String IN_AUTHORITY_SCHEMA_NAME = COMMON_SCHEMA_NAME;
+ public final static String IN_AUTHORITY_FIELD_NAME = "inAuthority";
+
+ public final static String DISPLAY_NAME_SCHEMA_NAME = COMMON_SCHEMA_NAME;
+ public final static String DISPLAY_NAME_FIELD_NAME = "taxonTermGroupList/taxonTermGroup/termDisplayName";
+
+ public final static String FORMATTED_DISPLAY_NAME_SCHEMA_NAME = COMMON_SCHEMA_NAME;
+ public final static String FORMATTED_DISPLAY_NAME_FIELD_NAME = "taxonTermGroupList/taxonTermGroup/termFormattedDisplayName";
+
+ public final static String REFNAME_SCHEMA_NAME = COMMON_SCHEMA_NAME;
+ public final static String REFNAME_FIELD_NAME = "refName";
+
+ public static final String WORKFLOW_STATE_SCHEMA_NAME = CORE_SCHEMA_NAME;
+ public static final String WORKFLOW_STATE_FIELD_NAME = CollectionSpaceClient.COLLECTIONSPACE_CORE_WORKFLOWSTATE; //"workflowState";
}