release 4.3

git-svn-id: http://svn.research-infrastructures.eu/public/d4science/gcube/branches/data-access/spd-plugin-fwk/3.1@142009 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Lucio Lelii 2017-02-01 16:59:14 +00:00
commit 50c8d4ac0e
44 changed files with 1532 additions and 0 deletions

10
.classpath Normal file
View File

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src/main/java"/>
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources"/>
<classpathentry kind="src" output="target/test-classes" path="src/test/java"/>
<classpathentry excluding="**" kind="src" output="target/test-classes" path="src/test/resources"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER"/>
<classpathentry kind="output" path="target/classes"/>
</classpath>

23
.project Normal file
View File

@ -0,0 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>spd-plugin-framework</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.m2e.core.maven2Builder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.m2e.core.maven2Nature</nature>
</natures>
</projectDescription>

View File

@ -0,0 +1,7 @@
#Fri Jul 06 17:13:26 CEST 2012
eclipse.preferences.version=1
encoding//src/main/java=UTF-8
encoding//src/main/resources=UTF-8
encoding//src/test/java=UTF-8
encoding//src/test/resources=UTF-8
encoding/<project>=UTF-8

View File

@ -0,0 +1,6 @@
#Fri Jul 06 17:13:26 CEST 2012
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
org.eclipse.jdt.core.compiler.source=1.6

View File

@ -0,0 +1,5 @@
#Fri Jul 06 17:04:03 CEST 2012
activeProfiles=
eclipse.preferences.version=1
resolveWorkspaceProjects=true
version=1

1
distro/INSTALL Normal file
View File

@ -0,0 +1 @@
Used as a Library in the gCube Framework

6
distro/LICENSE Normal file
View File

@ -0,0 +1,6 @@
gCube System - License
------------------------------------------------------------
The gCube/gCore software is licensed as Free Open Source software conveying to the EUPL (http://ec.europa.eu/idabc/eupl).
The software and documentation is provided by its authors/distributors "as is" and no expressed or
implied warranty is given for its use, quality or fitness for a particular case.

2
distro/MAINTAINERS Normal file
View File

@ -0,0 +1,2 @@
Lucio lelii (lucio.lelii@isti.cnr.it), CNR Pisa,
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo".

47
distro/README Normal file
View File

@ -0,0 +1,47 @@
The gCube System - spd client library
------------------------------------------------------------
This work has been partially supported by the following European projects: DILIGENT (FP6-2003-IST-2),
D4Science (FP7-INFRA-2007-1.2.2), D4Science-II (FP7-INFRA-2008-1.2.2), iMarine (FP7-INFRASTRUCTURES-2011-2),
and EUBrazilOpenBio (FP7-ICT-2011-EU-Brazil).
Authors
-------
* Lucio Lelii (lucio.lelii@isti.cnr.it), CNR Pisa,
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo".
Version and Release Date
------------------------
v. 1.0.0 (04-05-2012)
* First release
Description
-----------
Download information
--------------------
Documentation
-------------
Documentation is available on-line from the Projects Documentation Wiki:
https://gcube.wiki.gcube-system.org/gcube/index.php/Biodiversity_Access
Licensing
---------
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.

22
distro/changelog.xml Normal file
View File

@ -0,0 +1,22 @@
<ReleaseNotes>
<Changeset component="org.gcube.data-access.spd-client-library.1-0-0" date="2012-09-14">
<Change>species products discovery client library release</Change>
</Changeset>
<Changeset component="org.gcube.data-access.spd-client-library.1-1-0" date="2012-10-18">
<Change>new calls added</Change>
</Changeset>
<Changeset component="org.gcube.data-access.spd-plugin-framework.1-2-0" date="2012-11-30">
<Change>a bug on the Writer has been fixed</Change>
</Changeset><Changeset component="org.gcube.data-access.spd-plugin-framework.2-0-0" date="2013-01-17">
<Change>model part splitted in a different library</Change>
</Changeset>
<Changeset component="org.gcube.data-access.spd-plugin-framework.2-2-0" date="2013-01-17">
<Change>bug fixing</Change>
</Changeset>
<Changeset component="org.gcube.data-access.spd-plugin-framework.2-3-0" date="2013-07-18">
<Change>Unfold Capability for plugin added</Change>
</Changeset>
<Changeset component="org.gcube.data-access.spd-plugin-framework.3-1-0" date="2017-02-01">
<Change>gCore dependency removed</Change>
</Changeset>
</ReleaseNotes>

42
distro/descriptor.xml Normal file
View File

@ -0,0 +1,42 @@
<assembly
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
<id>servicearchive</id>
<formats>
<format>tar.gz</format>
</formats>
<baseDirectory>/</baseDirectory>
<fileSets>
<fileSet>
<directory>${distroDirectory}</directory>
<outputDirectory>/</outputDirectory>
<useDefaultExcludes>true</useDefaultExcludes>
<includes>
<include>README</include>
<include>LICENSE</include>
<include>INSTALL</include>
<include>MAINTAINERS</include>
<include>changelog.xml</include>
</includes>
<fileMode>755</fileMode>
<filtered>true</filtered>
</fileSet>
</fileSets>
<files>
<file>
<source>${distroDirectory}/profile.xml</source>
<outputDirectory>/</outputDirectory>
<filtered>true</filtered>
</file>
<file>
<source>target/${build.finalName}.jar</source>
<outputDirectory>/${artifactId}</outputDirectory>
</file>
<file>
<source>${distroDirectory}/svnpath.txt</source>
<outputDirectory>/${artifactId}</outputDirectory>
<filtered>true</filtered>
</file>
</files>
</assembly>

29
distro/profile.xml Normal file
View File

@ -0,0 +1,29 @@
<?xml version="1.0" encoding="UTF-8"?>
<Resource>
<ID></ID>
<Type>Service</Type>
<Profile>
<Description>${description}</Description>
<Class>DataAccess</Class>
<Name>${artifactId}</Name>
<Version>1.0.0</Version>
<Packages>
<Software>
<Description>${description}</Description>
<Name>${artifactId}</Name>
<Version>${version}</Version>
<MavenCoordinates>
<groupId>${groupId}</groupId>
<artifactId>${artifactId}</artifactId>
<version>${version}</version>
</MavenCoordinates>
<Type>library</Type>
<Files>
<File>${build.finalName}.jar</File>
</Files>
</Software>
</Packages>
</Profile>
</Resource>

0
distro/svnpath.txt Normal file
View File

107
pom.xml Normal file
View File

@ -0,0 +1,107 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.gcube.tools</groupId>
<artifactId>maven-parent</artifactId>
<version>1.0.0</version>
</parent>
<groupId>org.gcube.data.spd</groupId>
<artifactId>spd-plugin-framework</artifactId>
<version>3.1.0-SNAPSHOT</version>
<name>spd-plugin-framework</name>
<description>SPD plugin framework</description>
<dependencies>
<dependency>
<groupId>org.gcube.resources</groupId>
<artifactId>common-gcore-resources</artifactId>
<version>[1.0.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.data.spd</groupId>
<artifactId>spd-model</artifactId>
<version>[3.0.0-SNAPSHOT,4.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.execution</groupId>
<artifactId>grs2library</artifactId>
<version>[2.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
</dependency>
<dependency>
<groupId>org.gcube.data.access</groupId>
<artifactId>streams</artifactId>
<version>[2.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
</dependency>
</dependencies>
<properties>
<distroDirectory>${project.basedir}/distro</distroDirectory>
</properties>
<build>
<plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.6</source>
<target>1.6</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<version>2.5</version>
<executions>
<execution>
<id>copy-profile</id>
<phase>install</phase>
<goals>
<goal>copy-resources</goal>
</goals>
<configuration>
<outputDirectory>target</outputDirectory>
<resources>
<resource>
<directory>${distroDirectory}</directory>
<filtering>true</filtering>
<includes>
<include>profile.xml</include>
</includes>
</resource>
</resources>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>2.2</version>
<configuration>
<descriptors>
<descriptor>${distroDirectory}/descriptor.xml</descriptor>
</descriptors>
</configuration>
<executions>
<execution>
<id>servicearchive</id>
<phase>install</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,105 @@
package org.gcube.data.spd.plugin.fwk;
import java.util.Collections;
import java.util.Set;
import org.gcube.common.resources.gcore.ServiceEndpoint;
import org.gcube.data.spd.model.Conditions;
import org.gcube.data.spd.model.PropertySupport;
import org.gcube.data.spd.model.RepositoryInfo;
import org.gcube.data.spd.model.products.ResultItem;
import org.gcube.data.spd.model.util.Capabilities;
import org.gcube.data.spd.plugin.fwk.capabilities.ClassificationCapability;
import org.gcube.data.spd.plugin.fwk.capabilities.ExpansionCapability;
import org.gcube.data.spd.plugin.fwk.capabilities.MappingCapability;
import org.gcube.data.spd.plugin.fwk.capabilities.OccurrencesCapability;
import org.gcube.data.spd.plugin.fwk.capabilities.UnfoldCapability;
public abstract class AbstractPlugin implements PropertySupport, Searchable<ResultItem>{
private boolean initialized = false;
private boolean useCache= false;
public void initialize(ServiceEndpoint resource) throws Exception{
initialized= true;
}
public void update(ServiceEndpoint resource) throws Exception{}
public void shutdown() throws Exception{}
public ClassificationCapability getClassificationInterface(){return null;};
public MappingCapability getMappingInterface(){return null;};
public ExpansionCapability getExpansionInterface(){return null;};
public OccurrencesCapability getOccurrencesInterface(){return null;};
public UnfoldCapability getUnfoldInterface(){return null;};
public Set<Conditions> getSupportedProperties(){
return Collections.emptySet();
}
public Set<Capabilities> getSupportedCapabilities() {
return Collections.emptySet();
}
public abstract RepositoryInfo getRepositoryInfo();
@Override
public Class<ResultItem> getHandledClass() {
return ResultItem.class;
}
/*
@Override
public abstract void searchByScientificName(String word,
ObjectWriter<ResultItem> writer, Condition... properties) throws ExternalRepositoryException;
*/
public abstract String getRepositoryName();
public abstract String getDescription();
public boolean isUseCache() {
return useCache;
}
public void setUseCache(boolean useCache) {
this.useCache = useCache;
}
public boolean isInitialized() {
return initialized;
}
public boolean isRemote(){
return false;
}
@Override
public String toString() {
return getRepositoryName()+"(use-cache="+isUseCache()+")";
}
@Override
public boolean equals(Object obj) {
if (obj==null) return false;
AbstractPlugin ap = (AbstractPlugin) obj;
if (ap.getRepositoryName()==null || ap.getRepositoryName().equals("")) return false;
return ap.getRepositoryName().equals(this.getRepositoryName());
}
@Override
public int hashCode() {
return this.getRepositoryName().hashCode();
}
}

View File

@ -0,0 +1,30 @@
package org.gcube.data.spd.plugin.fwk;
public class Labels {
public static final String RANK_LABEL="rank";
public static final String ACCORDINGTO_LABEL="accordingTo";
public static final String SCIENTIFICNAME_LABEL="scientificName";
public static final String COMMONNAME_LABEL="commonName";
public static final String COMMONNAMES_LABEL="commonNames";
public static final String PROVIDER_LABEL="provider";
public static final String CITATION_LABEL="citation";
public static final String PARENT_TAG="parent";
public static final String PRODUCTS_LABEL="products";
public static final String PRODUCT_LABEL="product";
public static final String TYPE_LABEL="type";
public static final String KEY_LABEL="key";
public static final String DATASET_TAG="dataSet";
public static final String DATAPROVIDER_TAG="dataProvider";
public static final String NAME_TAG = "name";
public static final String LANGUAGE_TAG = "language";
public static final String COUNT_LABEL = "count";
public static final String CREDITS_LABEL = "credits";
}

View File

@ -0,0 +1,15 @@
package org.gcube.data.spd.plugin.fwk;
import org.gcube.data.spd.model.Condition;
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
import org.gcube.data.spd.model.products.ResultElement;
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
public interface Searchable<T extends ResultElement>{
public void searchByScientificName(String word, ObjectWriter<T> writer, Condition ... properties) throws ExternalRepositoryException;
public Class<T> getHandledClass();
}

View File

@ -0,0 +1,60 @@
package org.gcube.data.spd.plugin.fwk.capabilities;
import java.util.Iterator;
import java.util.List;
import org.gcube.data.spd.model.PropertySupport;
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
import org.gcube.data.spd.model.exceptions.IdNotValidException;
import org.gcube.data.spd.model.exceptions.MethodNotSupportedException;
import org.gcube.data.spd.model.products.TaxonomyItem;
import org.gcube.data.spd.plugin.fwk.Searchable;
import org.gcube.data.spd.plugin.fwk.writers.ClosableWriter;
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
public abstract class ClassificationCapability implements PropertySupport, Searchable<TaxonomyItem>{
/**
* retrieves all children giving a taxon id
*
* @param taxonId the taxon id
* @return a list of taxon
*/
public abstract List<TaxonomyItem> retrieveTaxonChildrenByTaxonId(String taxonId) throws IdNotValidException, ExternalRepositoryException;
/**
* retrieves taxon by ids
*
* @param ids the taxon ids
* @return a taxon
*/
public abstract void retrieveTaxonByIds(Iterator<String> ids, ClosableWriter<TaxonomyItem> writer) throws ExternalRepositoryException;
/**
* retrieves taxon by id
*
* @param taxonId the taxon id
* @return a taxon
*/
public abstract TaxonomyItem retrieveTaxonById(String id) throws IdNotValidException, ExternalRepositoryException;
/**
*
* retrieve a list of synonyms
*
* @param writer
* @param ids
*/
public void getSynonymnsById(ObjectWriter<TaxonomyItem> writer, String id) throws IdNotValidException, MethodNotSupportedException, ExternalRepositoryException{
throw new MethodNotSupportedException();
}
@Override
public Class<TaxonomyItem> getHandledClass() {
return TaxonomyItem.class;
}
}

View File

@ -0,0 +1,10 @@
package org.gcube.data.spd.plugin.fwk.capabilities;
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
public interface ExpansionCapability {
public void getSynonyms(ObjectWriter<String> writer, String scientifcName ) throws ExternalRepositoryException;
}

View File

@ -0,0 +1,11 @@
package org.gcube.data.spd.plugin.fwk.capabilities;
import org.gcube.data.spd.model.products.Image;
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
public interface ImagesCapability {
public void getImagesById(ObjectWriter<Image> writer, String ... ids) throws Exception;
}

View File

@ -0,0 +1,17 @@
package org.gcube.data.spd.plugin.fwk.capabilities;
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
public interface MappingCapability{
/**
*
* this method returns a set of scientific names related to the common name passed as input
*
* @param commonName to Map
* @return a set of scientificNames
* @throws Exception
*/
public void getRelatedScientificNames(ObjectWriter<String> writer, String commonName) throws ExternalRepositoryException;
}

View File

@ -0,0 +1,38 @@
package org.gcube.data.spd.plugin.fwk.capabilities;
import java.util.Iterator;
import org.gcube.data.spd.model.PropertySupport;
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
import org.gcube.data.spd.model.products.OccurrencePoint;
import org.gcube.data.spd.plugin.fwk.Searchable;
import org.gcube.data.spd.plugin.fwk.writers.ClosableWriter;
public abstract class OccurrencesCapability implements PropertySupport, Searchable<OccurrencePoint>{
/**
* retrieves all occurrences for the given ids
*
* @param writer the stream where the elements must be wrote
* @param keys a list of products keys
* @throws Exception
*/
public abstract void getOccurrencesByProductKeys(ClosableWriter<OccurrencePoint> writer, Iterator<String> keys) throws ExternalRepositoryException;
/**
* retrieves all occurrences for the given ids
*
* @param writer the stream where the elements must be wrote
* @param ids a list of occurrence ids
* @throws Exception
*/
public abstract void getOccurrencesByIds(ClosableWriter<OccurrencePoint> writer, Iterator<String> ids) throws ExternalRepositoryException;
@Override
public Class<OccurrencePoint> getHandledClass() {
return OccurrencePoint.class;
}
}

View File

@ -0,0 +1,10 @@
package org.gcube.data.spd.plugin.fwk.capabilities;
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
public interface UnfoldCapability {
public void unfold(ObjectWriter<String> writer, String scientificName) throws ExternalRepositoryException;
}

View File

@ -0,0 +1,45 @@
package org.gcube.data.spd.plugin.fwk.readers;
import java.util.concurrent.BlockingQueue;
import org.gcube.data.spd.plugin.fwk.writers.rswrapper.AbstractLocalWrapper;
import org.gcube.data.streams.Stream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class AbstractLocalReader<T> implements Stream<T>{
protected static final Logger logger = LoggerFactory.getLogger(AbstractLocalReader.class);
protected BlockingQueue<T> queue;
protected T element = null;
protected int timeoutInSeconds= 2;
AbstractLocalWrapper<T> wrapper ;
public AbstractLocalReader(AbstractLocalWrapper<T> wrapper) {
queue = wrapper.getQueue();
this.wrapper = wrapper;
}
public void setTimeoutInSeconds(int timeoutInSeconds) {
this.timeoutInSeconds = timeoutInSeconds;
}
@Override
public T next() {
return element;
}
@Override
public void remove() {}
}

View File

@ -0,0 +1,48 @@
package org.gcube.data.spd.plugin.fwk.readers;
import java.net.URI;
import java.util.concurrent.TimeUnit;
import org.gcube.data.spd.plugin.fwk.writers.rswrapper.AbstractLocalWrapper;
public class LocalReader<T> extends AbstractLocalReader<T> {
public LocalReader(AbstractLocalWrapper<T> wrapper) {
super(wrapper);
}
@Override
public boolean hasNext() {
if (queue==null) return false;
else{
try {
element=null;
while (!this.wrapper.isClosed() && element==null )
element= queue.poll(timeoutInSeconds, TimeUnit.SECONDS);
if (element ==null) element=queue.poll();
} catch (InterruptedException e) {
logger.warn("the queue is empty",e);
}
return element!=null;
}
}
@Override
public URI locator() {
return null;
}
@Override
public void close() {
logger.debug("closing reader");
if (wrapper.isClosed())
wrapper.disposeBuffer();
else wrapper.close();
}
@Override
public boolean isClosed() {
return wrapper.isClosed();
}
}

View File

@ -0,0 +1,19 @@
package org.gcube.data.spd.plugin.fwk.readers;
import org.gcube.data.spd.model.binding.Bindings;
import org.gcube.data.spd.model.products.OccurrencePoint;
public class OccurrencesReader extends RSReader<OccurrencePoint>{
public OccurrencesReader(String locator) throws Exception {
super(locator);
}
@Override
public OccurrencePoint transform(String serializedItem) throws Exception {
return Bindings.fromXml(serializedItem);
}
}

View File

@ -0,0 +1,51 @@
package org.gcube.data.spd.plugin.fwk.readers;
import gr.uoa.di.madgik.grs.reader.ForwardReader;
import gr.uoa.di.madgik.grs.reader.GRS2ReaderException;
import gr.uoa.di.madgik.grs.record.GenericRecord;
import gr.uoa.di.madgik.grs.record.field.StringField;
import java.net.URI;
import java.util.Iterator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class RSReader<T> implements Iterator<T>{
private static final Logger logger = LoggerFactory.getLogger(RSReader.class);
private Iterator<GenericRecord> it;
private ForwardReader<GenericRecord> reader;
public RSReader(String locator) throws Exception{
reader=new ForwardReader<GenericRecord>(new URI(locator));
reader.setIteratorTimeout(3000);
it =reader.iterator();
}
@Override
public boolean hasNext() {
if (it.hasNext()) return true;
else {
try {
reader.close();
} catch (GRS2ReaderException e) {
logger.error("error closing reader",e);
}
return false;
}
}
@Override
public T next() {
try {
return transform(((StringField)it.next().getField("result")).getPayload());
} catch (Exception e) {
logger.error("error getting tree",e);
return null;
}
}
public abstract T transform(String serializedItem) throws Exception;
@Override
public void remove() {}
}

View File

@ -0,0 +1,19 @@
package org.gcube.data.spd.plugin.fwk.readers;
import org.gcube.data.spd.model.binding.Bindings;
import org.gcube.data.spd.model.products.ResultItem;
public class ResultItemReader extends RSReader<ResultItem>{
public ResultItemReader(String locator) throws Exception {
super(locator);
}
@Override
public ResultItem transform(String serializedItem) throws Exception {
return Bindings.fromXml(serializedItem);
}
}

View File

@ -0,0 +1,16 @@
package org.gcube.data.spd.plugin.fwk.readers;
public class StringReader extends RSReader<String>{
public StringReader(String locator) throws Exception {
super(locator);
}
@Override
public String transform(String serializedItem) throws Exception {
return serializedItem;
}
}

View File

@ -0,0 +1,46 @@
package org.gcube.data.spd.plugin.fwk.util;
import java.io.Serializable;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlRootElement;
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
public class ElementProperty implements Serializable{
/**
*
*/
private static final long serialVersionUID = 1L;
@XmlAttribute
private String name;
@XmlAttribute
private String value;
protected ElementProperty() {
super();
}
public ElementProperty(String name, String value) {
super();
this.name = name;
this.value = value;
}
public String getName() {
return name;
}
public String getValue() {
return value;
}
}

View File

@ -0,0 +1,122 @@
package org.gcube.data.spd.plugin.fwk.util;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import javax.xml.bind.JAXBException;
import org.gcube.data.spd.model.binding.Bindings;
import org.gcube.data.spd.model.exceptions.IdNotValidException;
import org.gcube.data.spd.model.products.OccurrencePoint;
import org.gcube.data.spd.model.products.ResultElement;
import org.gcube.data.spd.plugin.fwk.readers.OccurrencesReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Util {
private static final Logger logger = LoggerFactory.getLogger(Util.class);
public static String keyEnrichment(String provider, String key){
return provider+":"+key;
}
public static String getProviderFromKey(String key) throws IdNotValidException{
int index = key.indexOf(":");
if (index==-1) throw new IdNotValidException();
return key.substring(0, index);
}
public static String getIdFromKey(String key) throws IdNotValidException {
int index = key.indexOf(":");
if (index==-1) throw new IdNotValidException();
return key.substring(index+1, key.length());
}
public static <T extends ResultElement> T copy(T obj) throws JAXBException {
return Bindings.<T>fromXml(Bindings.<T>toXml(obj));
}
public static File getDarwinCoreFile(OccurrencesReader reader) throws Exception{
DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
FileWriter writer = null;
try{
File returnFile = File.createTempFile("darwinCore", "xml");
writer = new FileWriter(returnFile);
writer.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
writer.append("<SimpleDarwinRecordSet xmlns=\"http://rs.tdwg.org/dwc/xsd/simpledarwincore/\" xmlns:dc=\"http://purl.org/dc/terms/\" xmlns:dwc=\"http://rs.tdwg.org/dwc/terms/\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://rs.tdwg.org/dwc/xsd/simpledarwincore/ http://rs.tdwg.org/dwc/xsd/tdwg_dwc_simple.xsd\">");
while (reader.hasNext()){
writer.append("<SimpleDarwinRecord>");
writer.append("<dc:language>en</dc:language>");
OccurrencePoint occurrence= reader.next();
if (occurrence.getModified() != null)
writer.append("<dc:modified>" + df.format(occurrence.getModified().getTime()) + "</dc:modified>");
if (occurrence.getBasisOfRecord() != null)
writer.append("<dwc:basisOfRecord>" + occurrence.getBasisOfRecord().name() + "</dwc:basisOfRecord>");
if (occurrence.getInstitutionCode() != null)
writer.append("<dwc:institutionCode>" + occurrence.getInstitutionCode() + "</dwc:institutionCode>");
if (occurrence.getCollectionCode() != null)
writer.append("<dwc:collectionCode>" + occurrence.getCollectionCode() + "</dwc:collectionCode>");
if (occurrence.getCatalogueNumber() != null)
writer.append("<dwc:catalogNumber>" + occurrence.getCatalogueNumber() + "</dwc:catalogNumber>");
if (occurrence.getRecordedBy() != null)
writer.append("<dwc:recordedBy>" + occurrence.getRecordedBy() + "</dwc:recordedBy>");
if (occurrence.getScientificName() != null)
writer.append("<dwc:scientificName>" + occurrence.getScientificName() + "</dwc:scientificName>");
if (occurrence.getKingdom() != null)
writer.append("<dwc:kingdom>" + occurrence.getKingdom() + "</dwc:kingdom>");
if (occurrence.getFamily() != null)
writer.append("<dwc:family>" + occurrence.getFamily() + "</dwc:family>");
if (occurrence.getLocality() != null)
writer.append("<dwc:locality>" + occurrence.getLocality() + "</dwc:locality>");
if (occurrence.getEventDate() != null)
{
writer.append("<dwc:eventDate>" + df.format(occurrence.getEventDate().getTime()) + "</dwc:eventDate>");
writer.append("<dwc:year>" + occurrence.getEventDate().get(Calendar.YEAR) + "</dwc:year>");
}
if (occurrence.getDecimalLatitude() != 0.0)
writer.append("<dwc:decimalLatitude>" + occurrence.getDecimalLatitude() + "</dwc:decimalLatitude>");
if (occurrence.getDecimalLongitude() != 0.0)
writer.append("<dwc:decimalLongitude>" + occurrence.getDecimalLongitude() + "</dwc:decimalLongitude>");
if (occurrence.getCoordinateUncertaintyInMeters() != null)
writer.append("<dwc:coordinateUncertaintyInMeters>" + occurrence.getCoordinateUncertaintyInMeters() + "</dwc:coordinateUncertaintyInMeters>");
if (occurrence.getMaxDepth() != 0.0)
writer.append("<dwc:maximumDepthInMeters>" + occurrence.getMaxDepth() + "</dwc:maximumDepthInMeters>");
if (occurrence.getMinDepth() != 0.0)
writer.append("<dwc:minimumDepthInMeters>" + occurrence.getMinDepth() + "</dwc:minimumDepthInMeters>");
writer.append("</SimpleDarwinRecord>");
}
writer.append("</SimpleDarwinRecordSet>");
writer.flush();
writer.close();
return returnFile;
}catch (Exception e) {
logger.error("error writeing occurrences as darwin core",e);
throw e;
}finally{
try {
writer.close();
} catch (IOException e) {
logger.warn("error closing the output stream",e);
}
}
}
}

View File

@ -0,0 +1,46 @@
package org.gcube.data.spd.plugin.fwk.writers;
import org.gcube.data.spd.plugin.fwk.writers.rswrapper.AbstractWrapper;
public abstract class AbstractWriter<T> implements ClosableWriter<T>{
//private Logger logger= LoggerFactory.getLogger(AbstractWriter.class);
protected boolean closed;
private int links =0;
private AbstractWrapper<T> wrapper;
protected int wrote;
protected AbstractWriter(AbstractWrapper<T> wrapper) {
this.wrapper = wrapper;
this.wrapper.register();
}
protected AbstractWrapper<T> getWrapper() {
return wrapper;
}
public synchronized void register(){
links++;
}
public synchronized void register(int links){
this.links+=links;
}
public synchronized void close() {
links--;
if (links<=0){
if (!closed){
if (!this.wrapper.isClosed())this.wrapper.unregister();
closed=true;
} else throw new IllegalStateException("writer already closed");
}
}
}

View File

@ -0,0 +1,8 @@
package org.gcube.data.spd.plugin.fwk.writers;
public interface ClosableWriter<T> extends ObjectWriter<T> {
public void close();
}

View File

@ -0,0 +1,17 @@
package org.gcube.data.spd.plugin.fwk.writers;
import org.gcube.data.spd.model.exceptions.StreamException;
public interface ObjectWriter<T> {
public boolean write(T t);
public boolean write(StreamException error);
public boolean isAlive();
}

View File

@ -0,0 +1,13 @@
package org.gcube.data.spd.plugin.fwk.writers;
public interface RecordWriter<T> {
boolean put(T element);
boolean put(Exception error);
void close();
boolean isClosed();
}

View File

@ -0,0 +1,32 @@
package org.gcube.data.spd.plugin.fwk.writers;
import org.gcube.data.spd.model.products.ResultElement;
import org.gcube.data.spd.plugin.fwk.util.Util;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class ResultElementWriterManager<T extends ResultElement> extends WriterManager<T> {
private static final Logger logger = LoggerFactory.getLogger(ResultElementWriterManager.class);
protected String provider;
public ResultElementWriterManager(String provider) {
super();
this.provider = provider;
}
@Override
public T enrich(T obj) {
try{
return _enrich(Util.<T>copy(obj));
}catch (Exception e) {
logger.error("error enriching object",e);
return null;
}
}
protected abstract T _enrich(T obj);
}

View File

@ -0,0 +1,79 @@
package org.gcube.data.spd.plugin.fwk.writers;
import org.gcube.data.spd.model.exceptions.InvalidRecordException;
import org.gcube.data.spd.model.exceptions.StreamBlockingException;
import org.gcube.data.spd.model.exceptions.StreamException;
import org.gcube.data.spd.model.exceptions.WrapperAlreadyDisposedException;
import org.gcube.data.spd.plugin.fwk.writers.rswrapper.AbstractWrapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Writer<F> extends AbstractWriter<F>{
private static final Logger logger = LoggerFactory.getLogger(Writer.class);
private WriterManager<F> manager = new DefaultWriter();
public Writer(AbstractWrapper<F> wrapper) {
super(wrapper);
}
public Writer(AbstractWrapper<F> wrapper, WriterManager<F> manager) {
super(wrapper);
this.manager = manager;
}
public class DefaultWriter extends WriterManager<F>{}
@Override
public synchronized boolean write(F t) {
if(this.getWrapper().isClosed()) return false;
if (manager.filter(t)){
try{
getWrapper().add(manager.enrich(t));
}catch (InvalidRecordException e) {
logger.warn("error putting a result in the Writer",e);
return false;
}catch (WrapperAlreadyDisposedException e) {
logger.warn("wrapper already disposed",e);
return false;
}
} else{
logger.debug("a result has been filtered");
return false;
}
wrote++;
return true;
}
@Override
public synchronized boolean write(StreamException error) {
try{
getWrapper().add(error);
}catch (InvalidRecordException e) {
logger.warn("error putting execption in the Writer",e);
return false;
}catch (WrapperAlreadyDisposedException e) {
logger.warn("wrapper already disposed",e);
return false;
}
if (error instanceof StreamBlockingException ){
getWrapper().close();
return false;
}
return true;
}
@Override
public boolean isAlive() {
return !getWrapper().isClosed();
}
}

View File

@ -0,0 +1,30 @@
package org.gcube.data.spd.plugin.fwk.writers;
import org.gcube.data.streams.exceptions.StreamSkipSignal;
import org.gcube.data.streams.generators.Generator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class WriterManager<T> implements Generator<T, T> {
private static final Logger logger = LoggerFactory.getLogger(WriterManager.class);
public T enrich(T obj){ return obj;}
public boolean filter(T obj){return true;}
@Override
public T yield(T element) {
try {
T enrichedElement = this.enrich(element);
if (enrichedElement==null) throw new Exception("error enriching element");
return enrichedElement;
} catch (Exception e) {
logger.debug("skipping the result", e);
throw new StreamSkipSignal();
}
}
}

View File

@ -0,0 +1,36 @@
package org.gcube.data.spd.plugin.fwk.writers.rswrapper;
import java.util.UUID;
import java.util.concurrent.BlockingQueue;
public abstract class AbstractLocalWrapper<T> extends AbstractWrapper<T> {
private String locator;
protected boolean closed = false;
public AbstractLocalWrapper() {
super();
this.locator = UUID.randomUUID().toString();
}
public AbstractLocalWrapper(int queueSize) {
super();
this.locator = UUID.randomUUID().toString();
}
public abstract BlockingQueue<T> getQueue();
@Override
public String getLocator() {
return this.locator;
}
@Override
public boolean isClosed() {
return closed;
}
public abstract void disposeBuffer();
}

View File

@ -0,0 +1,42 @@
package org.gcube.data.spd.plugin.fwk.writers.rswrapper;
import org.gcube.data.spd.model.exceptions.InvalidRecordException;
import org.gcube.data.spd.model.exceptions.StreamException;
import org.gcube.data.spd.model.exceptions.WrapperAlreadyDisposedException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class AbstractWrapper<T> {
Logger logger= LoggerFactory.getLogger(AbstractWrapper.class);
protected int links;
public abstract String getLocator();
public abstract boolean add(T result) throws InvalidRecordException, WrapperAlreadyDisposedException;
public abstract boolean add(StreamException result) throws InvalidRecordException, WrapperAlreadyDisposedException;
public abstract void close();
public synchronized void unregister(){
logger.info(Thread.currentThread().getId()+" - closing wrapper");
links--;
if (links<=0){
if (!this.isClosed()){
this.close();
}
else throw new IllegalStateException("wrapper already closed");
}
}
public synchronized void register(){
this.links++;
}
public abstract boolean isClosed();
}

View File

@ -0,0 +1,118 @@
package org.gcube.data.spd.plugin.fwk.writers.rswrapper;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.TimeUnit;
import org.gcube.data.spd.model.exceptions.InvalidRecordException;
import org.gcube.data.spd.model.exceptions.StreamException;
import org.gcube.data.spd.model.exceptions.WrapperAlreadyDisposedException;
import java.util.Collections;
public class LocalWrapper<T> extends AbstractLocalWrapper<T> {
private static Map<String, LocalWrapper<?>> wrapperLocatorMap = new HashMap<String, LocalWrapper<?>>();
public static LocalWrapper<?> getWrapper(String locator){
return wrapperLocatorMap.get(locator);
}
private String locator;
private ArrayBlockingQueue<T> queue;
private List<StreamException> errorList = new ArrayList<StreamException>();
private boolean forceOpen = false;
private int timeoutTimeInMinutes =1;
public LocalWrapper() {
super();
this.locator = UUID.randomUUID().toString();
this.queue = new ArrayBlockingQueue<T>(100);
}
public LocalWrapper(int queueSize) {
super();
this.locator = UUID.randomUUID().toString();
this.queue = new ArrayBlockingQueue<T>(queueSize);
}
@Override
public String getLocator() {
return this.locator;
}
@Override
public synchronized boolean add(T input) throws InvalidRecordException, WrapperAlreadyDisposedException {
if (this.closed) new WrapperAlreadyDisposedException("the local wrapper has been disposed");
try{
return this.queue.offer(input, timeoutTimeInMinutes,TimeUnit.MINUTES);
}catch (InterruptedException e) {
this.close();
this.queue= null;
throw new WrapperAlreadyDisposedException("the local wrapper has been disposed");
}
}
@Override
public void close(){
if (!isForceOpen()){
this.closed= true;
}
else logger.warn("cannot close the Wrapper, forceOpen enabled");
}
@Override
public ArrayBlockingQueue<T> getQueue(){
return queue;
}
public void setTimeoutTimeInMinutes(int timeoutTimeInMinutes) {
this.timeoutTimeInMinutes = timeoutTimeInMinutes;
}
public boolean isForceOpen() {
return forceOpen;
}
public void forceOpen() {
this.forceOpen = true;
}
public void disableForceOpen() {
this.forceOpen = false;
}
public void disableForceOpenAndClose() {
this.forceOpen = false;
this.close();
}
@Override
public void disposeBuffer() {
queue = null;
}
@Override
public boolean add(StreamException result) throws InvalidRecordException,
WrapperAlreadyDisposedException {
errorList.add(result);
return true;
}
public List<StreamException> getErrors(){
return Collections.unmodifiableList(this.errorList);
}
}

View File

@ -0,0 +1,75 @@
package org.gcube.data.spd.plugin.fwk.writers.rswrapper;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import org.gcube.data.spd.model.exceptions.InvalidRecordException;
import org.gcube.data.spd.model.exceptions.StreamException;
import org.gcube.data.spd.model.exceptions.WrapperAlreadyDisposedException;
import org.gcube.data.spd.plugin.fwk.writers.RecordWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ResultWrapper<T> extends AbstractWrapper<T>{
private static final Logger logger = LoggerFactory.getLogger(ResultWrapper.class);
private static Map<String, ResultWrapper<?>> wrapperLocatorMap = new HashMap<String, ResultWrapper<?>>();
public static ResultWrapper<?> getWrapper(String locator){
return wrapperLocatorMap.get(locator);
}
private String locator;
private RecordWriter<T> writer=null;
public ResultWrapper(RecordWriter<T> rw) {
this.writer = rw;
this.locator = UUID.randomUUID().toString();
}
public synchronized boolean add(T input) throws InvalidRecordException, WrapperAlreadyDisposedException{
try {
return writer.put(input);
}catch (Exception e) {
logger.trace("the writer is already disposed (trying to write something when it is closed)");
throw new WrapperAlreadyDisposedException(e);
}
}
public void close(){
this.writer.close();
}
@Override
public boolean isClosed() {
return this.writer.isClosed();
}
@Override
public boolean add(StreamException result) throws InvalidRecordException,
WrapperAlreadyDisposedException {
try {
return writer.put(result);
}catch (Exception e) {
logger.trace("the writer is already disposed (trying to write something when it is closed)");
throw new WrapperAlreadyDisposedException(e);
}
}
@Override
public String getLocator() {
return this.locator;
}
}

View File

@ -0,0 +1,49 @@
package org.gcube.data.spd.plugin;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.gcube.data.spd.model.KeyValue;
import org.gcube.data.spd.model.PointInfo;
import org.gcube.data.spd.model.binding.Bindings;
import org.gcube.data.spd.model.products.OccurrencePoint;
public class BindingTest {
public static void main(String[] args) throws Exception{
occurrenceTest();
}
static private String occurrencePoint = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?><occurrencePoint basisOfRecord=\"PreservedSpecimen\" minDepth=\"0.0\" maxDepth=\"0.0\" decimalLongitude=\"-59.167\" decimalLatitude=\"50.617\" country=\"Canada\" locality=\"\" scientificName=\"Carcharodon carcharias\" recordedBy=\"Mareoux, A.; Mareoux, C.\" catalogueNumber=\"CMNFI 1989-0126.1\" collectionCode=\"CMNFI\" institutionCode=\"CMN\" credits=\"Biodiversity occurrence data published by: Ocean Biogeographic Information System (Accessed through GBIF Data Portal, data.gbif.org, 2013-08-30)\" author=\"\" provider=\"\" id=\"17589157\"><dataSet id=\"344\"><citation>Canadian Museum of Nature - Fish Collection</citation><name>Canadian Museum of Nature - Fish Collection (OBIS Canada)</name><dataProvider id=\"82\"><name>Ocean Biogeographic Information System</name></dataProvider></dataSet></occurrencePoint>";
public static void pointInfoTest() throws Exception{
PointInfo pi= new PointInfo(12.3, 15.5);
List<KeyValue> keyvaluelist= new ArrayList<KeyValue>();
keyvaluelist.add(new KeyValue("test", "value"));
//System.out.println(Arrays.toString(pi.getPropertiesList().toArray(new KeyValue[0])));
pi.setPropertiesList(keyvaluelist);
System.out.println(Arrays.toString(pi.getPropertiesList().toArray(new KeyValue[0])));
String xml =Bindings.toXml(pi);
System.out.println(xml);
PointInfo resPi = Bindings.fromXml(xml);
System.out.println(Arrays.toString(pi.getPropertiesList().toArray(new KeyValue[0])));
}
public static void occurrenceTest() throws Exception{
OccurrencePoint point = (OccurrencePoint)Bindings.fromXml(occurrencePoint);
System.out.println(Bindings.toXml(point));
}
}

View File

@ -0,0 +1,18 @@
# Set root category priority to WARN and its only appender to A1.
log4j.rootCategory=TRACE, A1
log4j.appender.A1=org.apache.log4j.ConsoleAppender
# A2 uses PatternLayout.
log4j.appender.A1.layout=org.apache.log4j.PatternLayout
log4j.appender.A1.layout.ConversionPattern=[CodelistManager] %d{HH:mm:ss,SSS} %-5p %c{2} [%t,%M:%L] %m%n
# Display any warnings generated by our code
log4j.category.org.gcube.data.spd=TRACE,A1
log4j.category.org.gcube=INFO, A1
log4j.additivity.org.gcube.data.spd=false