release 4.3
git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/branches/data-access/species-products-discovery/3.0@142430 82a268e6-3cf1-43bd-a215-b396298e98cfmaster
commit
eb87c90a65
@ -0,0 +1,36 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<classpath>
|
||||
<classpathentry kind="src" output="target/classes" path="src/main/java">
|
||||
<attributes>
|
||||
<attribute name="optional" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
|
||||
<attributes>
|
||||
<attribute name="optional" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry excluding="**" kind="src" output="target/test-classes" path="src/test/resources">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="output" path="target/classes"/>
|
||||
</classpath>
|
@ -0,0 +1,23 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>species-product-discovery</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.jdt.core.javabuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.m2e.core.maven2Builder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||
<nature>org.eclipse.m2e.core.maven2Nature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
@ -0,0 +1,5 @@
|
||||
eclipse.preferences.version=1
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
|
||||
org.eclipse.jdt.core.compiler.compliance=1.7
|
||||
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
|
||||
org.eclipse.jdt.core.compiler.source=1.7
|
@ -0,0 +1,4 @@
|
||||
activeProfiles=
|
||||
eclipse.preferences.version=1
|
||||
resolveWorkspaceProjects=true
|
||||
version=1
|
@ -0,0 +1,4 @@
|
||||
gCube System - License
|
||||
------------------------------------------------------------
|
||||
|
||||
${gcube.license}
|
@ -0,0 +1,66 @@
|
||||
The gCube System - ${name}
|
||||
--------------------------------------------------
|
||||
|
||||
${description}
|
||||
|
||||
|
||||
${gcube.description}
|
||||
|
||||
${gcube.funding}
|
||||
|
||||
|
||||
Version
|
||||
--------------------------------------------------
|
||||
|
||||
${version} (${buildDate})
|
||||
|
||||
Please see the file named "changelog.xml" in this directory for the release notes.
|
||||
|
||||
|
||||
Authors
|
||||
--------------------------------------------------
|
||||
|
||||
* Lucio Lelii (lucio.lelii@isti.cnr.it), CNR, Italy
|
||||
|
||||
|
||||
Maintainers
|
||||
-----------
|
||||
|
||||
* Lucio Lelii (lucio.lelii@isti.cnr.it), CNR, Italy
|
||||
|
||||
|
||||
Download information
|
||||
--------------------------------------------------
|
||||
|
||||
Source code is available from SVN:
|
||||
${scm.url}
|
||||
|
||||
Binaries can be downloaded from the gCube website:
|
||||
${gcube.website}
|
||||
|
||||
|
||||
Installation
|
||||
--------------------------------------------------
|
||||
|
||||
Installation documentation is available on-line in the gCube Wiki:
|
||||
${gcube.wikiRoot}
|
||||
|
||||
|
||||
Documentation
|
||||
--------------------------------------------------
|
||||
|
||||
Documentation is available on-line in the gCube Wiki:
|
||||
${gcube.wikiRoot}
|
||||
|
||||
|
||||
Support
|
||||
--------------------------------------------------
|
||||
|
||||
Bugs and support requests can be reported in the gCube issue tracking tool:
|
||||
${gcube.issueTracking}
|
||||
|
||||
|
||||
Licensing
|
||||
--------------------------------------------------
|
||||
|
||||
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.
|
@ -0,0 +1,30 @@
|
||||
<ReleaseNotes>
|
||||
<Changeset component="org.gcube.data-access.species-products-discovery.1-0-0" date="2012-05-04">
|
||||
<Change>species product discovery release</Change>
|
||||
</Changeset>
|
||||
<Changeset component="org.gcube.data-access.species-products-discovery.1-1-0" date="2012-09-14">
|
||||
<Change>added caching for slow external repository</Change>
|
||||
<Change>added use of SPQL language</Change>
|
||||
</Changeset>
|
||||
<Changeset component="org.gcube.data-access.species-products-discovery.1-3-0" date="2012-11-30">
|
||||
<Change>a new port type for jobs execution added</Change>
|
||||
</Changeset>
|
||||
<Changeset component="org.gcube.data-access.species-products-discovery.2-0-0" date="2013-05-02">
|
||||
<Change>integration with the spql parser version 2.0.0</Change>
|
||||
</Changeset>
|
||||
<Changeset component="org.gcube.data-access.species-products-discovery.2-1-0" date="2013-05-02">
|
||||
<Change>integration with the spql parser version 2.1.0</Change>
|
||||
<Change>added Worker for unfold</Change>
|
||||
<Change>error file for jobs added</Change>
|
||||
<Change>retry for every calls in case of external repository error</Change>
|
||||
</Changeset>
|
||||
<Changeset component="org.gcube.data-access.species-products-discovery.2-2-0" date="2013-09-13">
|
||||
<Change>support ticket #688 [http://support.d4science.research-infrastructures.eu/ticket/688]</Change>
|
||||
</Changeset>
|
||||
<Changeset component="org.gcube.data-access.species-products-discovery.2-2-1" date="2013-10-24">
|
||||
<Change>task #2299 [https://issue.imarine.research-infrastructures.eu/ticket/2299]</Change>
|
||||
</Changeset>
|
||||
<Changeset component="org.gcube.data-access.species-products-discovery.3-0-0" date="2017-02-09">
|
||||
<Change>service moved to smartgears</Change>
|
||||
</Changeset>
|
||||
</ReleaseNotes>
|
@ -0,0 +1,32 @@
|
||||
<assembly
|
||||
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
|
||||
<id>servicearchive</id>
|
||||
<formats>
|
||||
<format>tar.gz</format>
|
||||
</formats>
|
||||
<baseDirectory>/</baseDirectory>
|
||||
<fileSets>
|
||||
<fileSet>
|
||||
<directory>${distroDirectory}</directory>
|
||||
<outputDirectory>/</outputDirectory>
|
||||
<useDefaultExcludes>true</useDefaultExcludes>
|
||||
<includes>
|
||||
<include>README</include>
|
||||
<include>LICENSE</include>
|
||||
<include>changelog.xml</include>
|
||||
<include>profile.xml</include>
|
||||
</includes>
|
||||
<fileMode>755</fileMode>
|
||||
<filtered>true</filtered>
|
||||
</fileSet>
|
||||
</fileSets>
|
||||
<files>
|
||||
<file>
|
||||
<source>target/${build.finalName}.${project.packaging}</source>
|
||||
<outputDirectory>/${artifactId}</outputDirectory>
|
||||
</file>
|
||||
|
||||
</files>
|
||||
</assembly>
|
@ -0,0 +1,13 @@
|
||||
<application mode='online'>
|
||||
<name>SpeciesProductsDiscovery</name>
|
||||
<group>DataAccess</group>
|
||||
<version>${version}</version>
|
||||
<description>SpeciesProductsDiscovery service</description>
|
||||
<local-persistence location='target' />
|
||||
|
||||
<exclude>/gcube/service/resultset/*</exclude>
|
||||
<exclude handlers='request-accounting'>/gcube/service/*</exclude>
|
||||
|
||||
|
||||
</application>
|
||||
|
@ -0,0 +1,30 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Resource>
|
||||
<ID></ID>
|
||||
<Type>Service</Type>
|
||||
<Profile>
|
||||
<Description>${description}</Description>
|
||||
<Class>DataAccess</Class>
|
||||
<Name>${artifactId}</Name>
|
||||
<Version>1.0.0</Version>
|
||||
<Packages>
|
||||
<Software>
|
||||
<Description>${description}</Description>
|
||||
<Name>${artifactId}</Name>
|
||||
<Version>${version}</Version>
|
||||
<MavenCoordinates>
|
||||
<groupId>${groupId}</groupId>
|
||||
<artifactId>${artifactId}</artifactId>
|
||||
<version>${version}</version>
|
||||
</MavenCoordinates>
|
||||
<Type>library</Type>
|
||||
<Files>
|
||||
<File>${build.finalName}.war</File>
|
||||
</Files>
|
||||
</Software>
|
||||
</Packages>
|
||||
</Profile>
|
||||
</Resource>
|
||||
|
||||
|
||||
|
@ -0,0 +1,12 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<web-app>
|
||||
<servlet>
|
||||
<servlet-name>org.gcube.data.spd.SpeciesProductsDiscovery</servlet-name>
|
||||
</servlet>
|
||||
<servlet-mapping>
|
||||
<servlet-name>org.gcube.data.spd.SpeciesProductsDiscovery</servlet-name>
|
||||
<url-pattern>/gcube/service/*</url-pattern>
|
||||
</servlet-mapping>
|
||||
|
||||
|
||||
</web-app>
|
@ -0,0 +1,281 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.gcube.data.spd</groupId>
|
||||
<artifactId>species-products-discovery</artifactId>
|
||||
<version>3.0.0-SNAPSHOT</version>
|
||||
<packaging>war</packaging>
|
||||
<name>species product discovery</name>
|
||||
|
||||
<dependencyManagement>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.gcube.distribution</groupId>
|
||||
<artifactId>maven-smartgears-bom</artifactId>
|
||||
<version>2.0.0-SNAPSHOT</version>
|
||||
<type>pom</type>
|
||||
<scope>import</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.glassfish.jersey</groupId>
|
||||
<artifactId>jersey-bom</artifactId>
|
||||
<version>2.23.2</version>
|
||||
<type>pom</type>
|
||||
<scope>import</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
|
||||
<properties>
|
||||
<webappDirectory>${project.basedir}/src/main/webapp/WEB-INF</webappDirectory>
|
||||
<distroDirectory>${project.basedir}/distro</distroDirectory>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.core</groupId>
|
||||
<artifactId>common-smartgears</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.core</groupId>
|
||||
<artifactId>common-smartgears-app</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.resources</groupId>
|
||||
<artifactId>registry-publisher</artifactId>
|
||||
</dependency>
|
||||
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.servlet</groupId>
|
||||
<artifactId>javax.servlet-api</artifactId>
|
||||
<version>3.0.1</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- GEO -->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.spatial.data</groupId>
|
||||
<artifactId>gis-interface</artifactId>
|
||||
<version>[2.3.0-SNAPSHOT,3.0.0-SNAPSHOT)</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>ehcache</artifactId>
|
||||
<groupId>net.sf.ehcache</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.contentmanagement</groupId>
|
||||
<artifactId>storage-manager-core</artifactId>
|
||||
<version>[2.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.contentmanagement</groupId>
|
||||
<artifactId>storage-manager-wrapper</artifactId>
|
||||
<version>[2.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.common</groupId>
|
||||
<artifactId>csv4j</artifactId>
|
||||
<version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.data.spd</groupId>
|
||||
<artifactId>spql-parser</artifactId>
|
||||
<version>[2.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<version>1.9.5</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.glassfish.jersey.containers</groupId>
|
||||
<artifactId>jersey-container-servlet-core</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.glassfish.jersey.containers</groupId>
|
||||
<!-- if your container implements Servlet API older than 3.0, use "jersey-container-servlet-core" -->
|
||||
<artifactId>jersey-container-servlet</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.websocket</groupId>
|
||||
<artifactId>javax.websocket-api</artifactId>
|
||||
<version>1.1</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.servlet</groupId>
|
||||
<artifactId>javax.servlet-api</artifactId>
|
||||
<version>3.0.1</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>net.sf.ehcache</groupId>
|
||||
<artifactId>ehcache-core</artifactId>
|
||||
<version>2.5.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.resources</groupId>
|
||||
<artifactId>common-gcore-resources</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.resources.discovery</groupId>
|
||||
<artifactId>ic-client</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.data.spd</groupId>
|
||||
<artifactId>having-engine</artifactId>
|
||||
<version>[1.0.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.data.spd</groupId>
|
||||
<artifactId>spd-plugin-framework</artifactId>
|
||||
<version>[3.0.0-SNAPSHOT,4.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.gcube.data.spd</groupId>
|
||||
<artifactId>spd-model</artifactId>
|
||||
<version>[3.0.0-SNAPSHOT,4.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.data.spd</groupId>
|
||||
<artifactId>gbif-spd-plugin</artifactId>
|
||||
<version>[1.8.2-SNAPSHOT,)</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.data.spd</groupId>
|
||||
<artifactId>obis-spd-plugin</artifactId>
|
||||
<version>[1.8.2-SNAPSHOT,)</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.data.spd</groupId>
|
||||
<artifactId>worms-spd-plugin</artifactId>
|
||||
<version>[1.0.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.data.spd</groupId>
|
||||
<artifactId>wordss-spd-plugin</artifactId>
|
||||
<version>[1.0.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.data.spd</groupId>
|
||||
<artifactId>brazilian-flora-spd-plugin</artifactId>
|
||||
<version>[1.0.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.data.spd</groupId>
|
||||
<artifactId>catalogue-of-life-spd-plugin</artifactId>
|
||||
<version>[1.0.0-SNAPSHOT,2.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
|
||||
<!-- https://mvnrepository.com/artifact/postgresql/postgresql -->
|
||||
<dependency>
|
||||
<groupId>postgresql</groupId>
|
||||
<artifactId>postgresql</artifactId>
|
||||
<version>8.4-702.jdbc4</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>4.10</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<finalName>${artifactId}</finalName>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<version>2.3.2</version>
|
||||
<configuration>
|
||||
<source>1.7</source>
|
||||
<target>1.7</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-resources-plugin</artifactId>
|
||||
<version>2.6</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>copy-profile</id>
|
||||
<goals>
|
||||
<goal>copy-resources</goal>
|
||||
</goals>
|
||||
<phase>process-resources</phase>
|
||||
<configuration>
|
||||
<outputDirectory>${webappDirectory}</outputDirectory>
|
||||
<resources>
|
||||
<resource>
|
||||
<directory>${distroDirectory}</directory>
|
||||
<filtering>true</filtering>
|
||||
</resource>
|
||||
</resources>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-war-plugin</artifactId>
|
||||
<configuration>
|
||||
<warName>species-products-discovery</warName>
|
||||
<failOnMissingWebXml>false</failOnMissingWebXml>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<version>2.2</version>
|
||||
<configuration>
|
||||
<descriptors>
|
||||
<descriptor>${distroDirectory}/descriptor.xml</descriptor>
|
||||
</descriptors>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>servicearchive</id>
|
||||
<phase>install</phase>
|
||||
<goals>
|
||||
<goal>single</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
@ -0,0 +1,52 @@
|
||||
package org.gcube.data.spd;
|
||||
|
||||
import javax.xml.namespace.QName;
|
||||
|
||||
public class Constants {
|
||||
|
||||
|
||||
/** Service name. */
|
||||
//public static final String SERVICE_NAME = "SpeciesProductsDiscovery";
|
||||
/** Service class. */
|
||||
//public static final String SERVICE_CLASS = "DataAccess";
|
||||
/** Namespace. */
|
||||
public static final String NS = "http://gcube-system.org/namespaces/data/speciesproductsdiscovery";
|
||||
|
||||
/** JNDI Base Name. */
|
||||
public static final String JNDI_NAME = "gcube/data/speciesproductsdiscovery";
|
||||
|
||||
/** Relative endpoint of the Occurrences port-type. */
|
||||
public static final String OCCURRENCES_PT_NAME = JNDI_NAME+"/occurrences";
|
||||
/** Relative endpoint of the Manager port-type. */
|
||||
public static final String MANAGER_PT_NAME = JNDI_NAME+"/manager";
|
||||
|
||||
public static final String CLASSIFICATION_PT_NAME = JNDI_NAME+"/classification";
|
||||
|
||||
/** Name of the plugin RP of the Binder resource. */
|
||||
public static final String PLUGIN_DESCRIPTION_RPNAME = "PluginMap";
|
||||
/** Fully qualified name of the Plugin RP of the Binder resource. */
|
||||
public static final QName BINDER_PLUGIN_RP = new QName(NS, PLUGIN_DESCRIPTION_RPNAME);
|
||||
|
||||
public static final String FACTORY_RESORCE_NAME="manager";
|
||||
|
||||
public static final String SERVICE_NAME="SpeciesProductsDiscovery";
|
||||
|
||||
public static final String SERVICE_CLASS="DataAccess";
|
||||
|
||||
public static final String TAXON_RETURN_TYPE = "taxon";
|
||||
|
||||
public static final String OCCURRENCE_RETURN_TYPE = "occurrence";
|
||||
|
||||
public static final String RESULITEM_RETURN_TYPE = "resultItem";
|
||||
|
||||
public static final int JOB_CALL_RETRIES = 10;
|
||||
|
||||
public static final long RETRY_JOBS_MILLIS = 2000;
|
||||
|
||||
public static final int QUERY_CALL_RETRIES = 5;
|
||||
|
||||
public static final long RETRY_QUERY_MILLIS = 1000;
|
||||
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,15 @@
|
||||
package org.gcube.data.spd;
|
||||
|
||||
import javax.ws.rs.ApplicationPath;
|
||||
|
||||
import org.gcube.data.spd.model.Constants;
|
||||
import org.glassfish.jersey.server.ResourceConfig;
|
||||
|
||||
@ApplicationPath(Constants.APPLICATION_ROOT_PATH)
|
||||
public class SpeciesProductsDiscovery extends ResourceConfig {
|
||||
|
||||
public SpeciesProductsDiscovery() {
|
||||
packages("org.gcube.data.spd.resources");
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,109 @@
|
||||
package org.gcube.data.spd.caching;
|
||||
import java.io.Serializable;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
public class CacheKey implements Serializable {
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 1L;
|
||||
private String searchName;
|
||||
private Class<?> clazz;
|
||||
private String propsAsString;
|
||||
//TODO: properties
|
||||
|
||||
public CacheKey(String searchName, String propsAsString,
|
||||
Class<?> clazz) {
|
||||
super();
|
||||
this.searchName = searchName;
|
||||
this.propsAsString = propsAsString;
|
||||
this.clazz = clazz;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
public String getSearchName() {
|
||||
return searchName;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
public void setSearchName(String searchName) {
|
||||
this.searchName = searchName;
|
||||
}
|
||||
|
||||
|
||||
public Class<?> getClazz() {
|
||||
return clazz;
|
||||
}
|
||||
|
||||
public String getPropsAsString() {
|
||||
return propsAsString;
|
||||
}
|
||||
|
||||
public void setPropsAsString(String propsAsString) {
|
||||
this.propsAsString = propsAsString;
|
||||
}
|
||||
|
||||
public void setClazz(Class<?> clazz) {
|
||||
this.clazz = clazz;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((clazz == null) ? 0 : clazz.getName().hashCode());
|
||||
result = prime * result
|
||||
+ ((propsAsString == null) ? 0 : propsAsString.hashCode());
|
||||
result = prime * result
|
||||
+ ((searchName == null) ? 0 : searchName.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
CacheKey other = (CacheKey) obj;
|
||||
if (clazz == null) {
|
||||
if (other.clazz != null)
|
||||
return false;
|
||||
} else if (!clazz.getName().equals(other.clazz.getName()))
|
||||
return false;
|
||||
if (propsAsString == null) {
|
||||
if (other.propsAsString != null)
|
||||
return false;
|
||||
} else if (!propsAsString.equals(other.propsAsString))
|
||||
return false;
|
||||
if (searchName == null) {
|
||||
if (other.searchName != null)
|
||||
return false;
|
||||
} else if (!searchName.equals(other.searchName))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,71 @@
|
||||
package org.gcube.data.spd.caching;
|
||||
|
||||
import javax.xml.bind.JAXBException;
|
||||
|
||||
import org.gcube.data.spd.model.exceptions.StreamBlockingException;
|
||||
import org.gcube.data.spd.model.exceptions.StreamException;
|
||||
import org.gcube.data.spd.model.products.ResultElement;
|
||||
import org.gcube.data.spd.plugin.fwk.util.Util;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ClosableWriter;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class CacheWriter<T extends ResultElement> implements ObjectWriter<T>, ClosableWriter<T> {
|
||||
|
||||
Logger logger= LoggerFactory.getLogger(CacheWriter.class);
|
||||
|
||||
ObjectWriter<T> writer;
|
||||
|
||||
QueryCache<T> cache;
|
||||
|
||||
boolean error = false;
|
||||
|
||||
boolean closed = false;
|
||||
|
||||
public CacheWriter(ObjectWriter<T> writer, QueryCache<T> cache) {
|
||||
super();
|
||||
this.writer = writer;
|
||||
this.cache = cache;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
logger.trace("closing cachewriter with error "+error);
|
||||
closed= true;
|
||||
if (!error) cache.closeStore();
|
||||
else cache.setValid(false);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean write(T t) {
|
||||
T copyObj = null;
|
||||
try{
|
||||
copyObj = Util.copy(t);
|
||||
boolean external = writer.write(t);
|
||||
if (!writer.isAlive()) error= true;
|
||||
if (!error) cache.store(copyObj);
|
||||
return external;
|
||||
}catch (JAXBException e) {
|
||||
logger.warn("error copying element "+t.getId()+" in "+t.getProvider(), e);
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean write(StreamException error) {
|
||||
if (error instanceof StreamBlockingException )
|
||||
this.error= true;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAlive() {
|
||||
if (!closed && !writer.isAlive())
|
||||
error = true;
|
||||
return writer.isAlive();
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,74 @@
|
||||
package org.gcube.data.spd.caching;
|
||||
|
||||
import net.sf.ehcache.CacheException;
|
||||
import net.sf.ehcache.Ehcache;
|
||||
import net.sf.ehcache.Element;
|
||||
import net.sf.ehcache.event.CacheEventListener;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class MyCacheEventListener implements CacheEventListener {
|
||||
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(MyCacheEventListener.class);
|
||||
|
||||
@Override
|
||||
public Object clone() throws CloneNotSupportedException {
|
||||
return super.clone();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void notifyElementRemoved(Ehcache cache, Element element)
|
||||
throws CacheException {
|
||||
CacheKey key = (CacheKey) element.getKey();
|
||||
QueryCache<?> value = (QueryCache<?>) element.getValue();
|
||||
logger.trace("event removed notified "+cache.getName()+" "+key.getSearchName()+" "+value.isValid());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void notifyElementPut(Ehcache cache, Element element)
|
||||
throws CacheException {
|
||||
CacheKey key = (CacheKey) element.getKey();
|
||||
QueryCache<?> value = (QueryCache<?>) element.getValue();
|
||||
logger.trace("event put notified "+cache.getName()+" "+key.getSearchName()+" "+value.isValid());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void notifyElementUpdated(Ehcache cache, Element element)
|
||||
throws CacheException {
|
||||
logger.trace("event update notified " );
|
||||
}
|
||||
|
||||
@Override
|
||||
public void notifyElementExpired(Ehcache cache, Element element) {
|
||||
CacheKey key = (CacheKey) element.getKey();
|
||||
QueryCache<?> value = (QueryCache<?>) element.getValue();
|
||||
logger.trace("event exipered notified "+cache.getName()+" "+key.getSearchName()+" "+value.isValid());
|
||||
((QueryCache<?>)element.getValue()).dispose();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void notifyElementEvicted(Ehcache cache, Element element) {
|
||||
CacheKey key = (CacheKey) element.getKey();
|
||||
QueryCache<?> value = (QueryCache<?>) element.getValue();
|
||||
logger.trace("event evicted notified "+cache.getName()+" "+key.getSearchName()+" "+value.isValid());
|
||||
((QueryCache<?>)element.getValue()).dispose();
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void notifyRemoveAll(Ehcache cache) {
|
||||
// TODO Auto-generated method stub
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dispose() {
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,179 @@
|
||||
package org.gcube.data.spd.caching;
|
||||
|
||||
import java.io.EOFException;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.io.Serializable;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.locks.Lock;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
import java.util.zip.Deflater;
|
||||
import java.util.zip.DeflaterOutputStream;
|
||||
import java.util.zip.Inflater;
|
||||
import java.util.zip.InflaterInputStream;
|
||||
|
||||
import org.gcube.data.spd.model.binding.Bindings;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
||||
public class QueryCache<T> implements Serializable{
|
||||
|
||||
public static Lock lock = new ReentrantLock(true);
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(QueryCache.class);
|
||||
|
||||
private String tableId;
|
||||
private boolean valid;
|
||||
private boolean tableCreated;
|
||||
private boolean empty;
|
||||
private boolean error;
|
||||
private File file;
|
||||
private File persistencePath;
|
||||
private transient ObjectOutputStream writer;
|
||||
|
||||
public QueryCache(String pluginName, String persistencePath) {
|
||||
this.persistencePath = new File(persistencePath);
|
||||
this.tableId = pluginName+"_"+UUID.randomUUID().toString().replace("-", "_");
|
||||
}
|
||||
|
||||
public String getTableId() {
|
||||
return tableId;
|
||||
}
|
||||
|
||||
public void setTableId(String tableId) {
|
||||
this.tableId = tableId;
|
||||
}
|
||||
|
||||
public boolean isTableCreated() {
|
||||
return tableCreated;
|
||||
}
|
||||
|
||||
public void setTableCreated(boolean tableCreated) {
|
||||
this.tableCreated = tableCreated;
|
||||
}
|
||||
|
||||
public void setValid(boolean valid) {
|
||||
this.valid = valid;
|
||||
}
|
||||
|
||||
public void setEmpty(boolean empty) {
|
||||
this.empty = empty;
|
||||
}
|
||||
|
||||
|
||||
public boolean isError() {
|
||||
return error;
|
||||
}
|
||||
|
||||
public void setError(boolean error) {
|
||||
this.error = error;
|
||||
}
|
||||
|
||||
public boolean store( T obj){
|
||||
if (file==null){
|
||||
file = new File(this.persistencePath, tableId);
|
||||
FileOutputStream fos = null;
|
||||
DeflaterOutputStream deflater = null;
|
||||
try {
|
||||
file.createNewFile();
|
||||
fos = new FileOutputStream(file);
|
||||
deflater = new DeflaterOutputStream(fos, new Deflater(Deflater.BEST_COMPRESSION, true));
|
||||
writer = new ObjectOutputStream(deflater);
|
||||
} catch (Exception e) {
|
||||
if (file!=null)
|
||||
file.delete();
|
||||
if (deflater!=null)
|
||||
try {
|
||||
deflater.close();
|
||||
} catch (IOException e1) { }
|
||||
if (fos!=null)
|
||||
try {
|
||||
fos.close();
|
||||
} catch (IOException e1) { }
|
||||
this.error = true;
|
||||
logger.error("error initializing storage ",e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
try{
|
||||
writer.writeObject(Bindings.toXml(obj));
|
||||
return true;
|
||||
}catch (Exception e) {
|
||||
logger.warn(" error storing cache ",e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void getAll(ObjectWriter<T> writer){
|
||||
try(FileInputStream fis = new FileInputStream(file); InflaterInputStream iis = new InflaterInputStream(fis, new Inflater(true)); ObjectInputStream ois =new ObjectInputStream(iis) ){
|
||||
String obj = null;
|
||||
while (( obj = (String)ois.readObject())!=null && writer.isAlive())
|
||||
writer.write((T)Bindings.fromXml(obj));
|
||||
}catch (EOFException eof) {
|
||||
logger.debug("EoF erorr reading the cache, it should not be a problem",eof);
|
||||
}catch (Exception e) {
|
||||
logger.warn(" error gettIng element form cache",e);
|
||||
}
|
||||
}
|
||||
|
||||
public void closeStore(){
|
||||
try {
|
||||
if (writer!=null) writer.close();
|
||||
if (file==null) empty =true;
|
||||
this.valid= true;
|
||||
} catch (IOException e) {
|
||||
logger.warn(" error closing outputStream ",e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public boolean isValid(){
|
||||
return this.valid;
|
||||
}
|
||||
|
||||
|
||||
public boolean isEmpty() {
|
||||
return empty;
|
||||
}
|
||||
|
||||
public void dispose(){
|
||||
try{
|
||||
this.valid=false;
|
||||
this.file.delete();
|
||||
}catch (Exception e) {
|
||||
logger.warn(" error disposing cache ",e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + (empty ? 1231 : 1237);
|
||||
result = prime * result + (tableCreated ? 1231 : 1237);
|
||||
result = prime * result + ((tableId == null) ? 0 : tableId.hashCode());
|
||||
result = prime * result + (valid ? 1231 : 1237);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "QueryCache [tableId=" + tableId + ", valid=" + valid
|
||||
+ ", empty=" + empty + ", error=" + error + "]";
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,15 @@
|
||||
package org.gcube.data.spd.caching;
|
||||
|
||||
public class QueryCacheFactory<T> {
|
||||
|
||||
String persistencePath;
|
||||
|
||||
public QueryCacheFactory(String persistencePath){
|
||||
this.persistencePath = persistencePath;
|
||||
}
|
||||
|
||||
public QueryCache<T> create(String pluginName){
|
||||
return new QueryCache<T>(pluginName, persistencePath);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,29 @@
|
||||
package org.gcube.data.spd.exception;
|
||||
|
||||
public class MaxRetriesReachedException extends Exception {
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
public MaxRetriesReachedException() {
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
public MaxRetriesReachedException(String message) {
|
||||
super(message);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
public MaxRetriesReachedException(Throwable cause) {
|
||||
super(cause);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
public MaxRetriesReachedException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,30 @@
|
||||
package org.gcube.data.spd.exception;
|
||||
|
||||
public class ServiceException extends Exception {
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
public ServiceException() {
|
||||
super();
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
public ServiceException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
public ServiceException(String message) {
|
||||
super(message);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
public ServiceException(Throwable cause) {
|
||||
super(cause);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,24 @@
|
||||
package org.gcube.data.spd.executor.jobs;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Calendar;
|
||||
|
||||
import org.gcube.data.spd.model.service.types.JobStatus;
|
||||
|
||||
public interface SpeciesJob extends Serializable, Runnable {
|
||||
|
||||
public JobStatus getStatus() ;
|
||||
|
||||
public void setStatus(JobStatus status) ;
|
||||
|
||||
public String getId();
|
||||
|
||||
public boolean validateInput(String input);
|
||||
|
||||
public int getCompletedEntries();
|
||||
|
||||
public Calendar getStartDate();
|
||||
|
||||
public Calendar getEndDate();
|
||||
|
||||
}
|
@ -0,0 +1,9 @@
|
||||
package org.gcube.data.spd.executor.jobs;
|
||||
|
||||
public interface URLJob extends SpeciesJob {
|
||||
|
||||
public String getResultURL() ;
|
||||
|
||||
public String getErrorURL() ;
|
||||
|
||||
}
|
@ -0,0 +1,34 @@
|
||||
package org.gcube.data.spd.executor.jobs.csv;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.gcube.data.spd.model.products.OccurrencePoint;
|
||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||
|
||||
public class CSVCreator extends CSVJob{
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
|
||||
private static transient OccurrenceCSVConverter converter;
|
||||
|
||||
public CSVCreator(Map<String, AbstractPlugin> plugins) {
|
||||
super(plugins);
|
||||
CSVCreator.converter = new OccurrenceCSVConverter();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Converter<OccurrencePoint, List<String>> getConverter() {
|
||||
if (CSVCreator.converter==null) converter = new OccurrenceCSVConverter();
|
||||
return CSVCreator.converter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getHeader() {
|
||||
return OccurrenceCSVConverter.HEADER;
|
||||
}
|
||||
}
|
@ -0,0 +1,35 @@
|
||||
package org.gcube.data.spd.executor.jobs.csv;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.gcube.data.spd.model.products.OccurrencePoint;
|
||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||
|
||||
public class CSVCreatorForOMJob extends CSVJob{
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
|
||||
private static transient OccurrenceCSVConverterOpenModeller converter;
|
||||
|
||||
public CSVCreatorForOMJob(Map<String, AbstractPlugin> plugins) {
|
||||
super(plugins);
|
||||
converter = new OccurrenceCSVConverterOpenModeller();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Converter<OccurrencePoint, List<String>> getConverter() {
|
||||
if (CSVCreatorForOMJob.converter==null) converter = new OccurrenceCSVConverterOpenModeller();
|
||||
return CSVCreatorForOMJob.converter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getHeader() {
|
||||
return OccurrenceCSVConverterOpenModeller.HEADER;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,194 @@
|
||||
package org.gcube.data.spd.executor.jobs.csv;
|
||||
|
||||
import static org.gcube.data.streams.dsl.Streams.convert;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.util.Calendar;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
import net.sf.csv4j.CSVWriter;
|
||||
|
||||
import org.gcube.contentmanagement.blobstorage.service.IClient;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.AccessType;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.StorageClient;
|
||||
import org.gcube.data.spd.Constants;
|
||||
import org.gcube.data.spd.executor.jobs.URLJob;
|
||||
import org.gcube.data.spd.model.products.OccurrencePoint;
|
||||
import org.gcube.data.spd.model.products.TaxonomyItem;
|
||||
import org.gcube.data.spd.model.service.types.CompleteJobStatus;
|
||||
import org.gcube.data.spd.model.service.types.JobStatus;
|
||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||
import org.gcube.data.spd.plugin.fwk.readers.LocalReader;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.Writer;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.rswrapper.LocalWrapper;
|
||||
import org.gcube.data.spd.utils.DynamicMap;
|
||||
import org.gcube.data.streams.Stream;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
abstract class CSVJob implements URLJob{
|
||||
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(CSVJob.class);
|
||||
|
||||
private int completedEntries = 0;
|
||||
|
||||
private String resultURL = null;
|
||||
|
||||
private Calendar endDate, startDate;
|
||||
|
||||
private JobStatus status;
|
||||
|
||||
private String id;
|
||||
|
||||
private Map<TaxonomyItem, CompleteJobStatus> mapSubJobs;
|
||||
|
||||
private Map<String, AbstractPlugin> plugins;
|
||||
|
||||
public CSVJob(Map<String, AbstractPlugin> plugins) {
|
||||
this.mapSubJobs = new HashMap<TaxonomyItem, CompleteJobStatus>();
|
||||
this.id = UUID.randomUUID().toString();
|
||||
this.status = JobStatus.PENDING;
|
||||
this.plugins = plugins;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
File csvFile = null;
|
||||
try{
|
||||
this.startDate = Calendar.getInstance();
|
||||
this.status = JobStatus.RUNNING;
|
||||
|
||||
csvFile = File.createTempFile(this.id.replace("-", ""), ".csv");
|
||||
logger.trace("outputfile "+csvFile.getAbsolutePath());
|
||||
|
||||
LocalWrapper<OccurrencePoint> localWrapper = new LocalWrapper<OccurrencePoint>(1000);
|
||||
localWrapper.forceOpen();
|
||||
|
||||
final LocalWrapper<String> errorWrapper = new LocalWrapper<String>(2000);
|
||||
errorWrapper.forceOpen();
|
||||
Writer<String> errorWriter = new Writer<String>(errorWrapper);
|
||||
errorWriter.register();
|
||||
|
||||
Stream<String> ids =convert(DynamicMap.get(this.id));
|
||||
|
||||
OccurrenceReaderByKey occurrenceReader =new OccurrenceReaderByKey(localWrapper, ids, plugins);
|
||||
|
||||
new Thread(occurrenceReader).start();
|
||||
|
||||
FileWriter fileWriter = new FileWriter(csvFile);
|
||||
CSVWriter csvWriter = new CSVWriter(fileWriter);
|
||||
|
||||
csvWriter.writeLine(getHeader());
|
||||
|
||||
LocalReader<OccurrencePoint> ocReader= new LocalReader<OccurrencePoint>(localWrapper);
|
||||
|
||||
Converter<OccurrencePoint, List<String>> csvConverter = getConverter();
|
||||
|
||||
logger.debug("starting to read from localReader");
|
||||
|
||||
while (ocReader.hasNext()){
|
||||
OccurrencePoint op = ocReader.next();
|
||||
csvWriter.writeLine(csvConverter.convert(op));
|
||||
completedEntries++;
|
||||
}
|
||||
|
||||
if (completedEntries==0)
|
||||
throw new Exception("no record waswritten");
|
||||
|
||||
logger.debug("closing file, writing it to the storage");
|
||||
|
||||
fileWriter.close();
|
||||
csvWriter.close();
|
||||
|
||||
IClient client = new StorageClient(Constants.SERVICE_CLASS, Constants.SERVICE_NAME, "CSV", AccessType.SHARED).getClient();
|
||||
|
||||
String filePath = "/csv/"+this.id.replace("-", "")+".csv";
|
||||
|
||||
client.put(true).LFile(csvFile.getAbsolutePath()).RFile(filePath);
|
||||
|
||||
this.resultURL=client.getUrl().RFile(filePath);
|
||||
|
||||
logger.debug("job completed");
|
||||
|
||||
this.status = JobStatus.COMPLETED;
|
||||
}catch (Exception e) {
|
||||
logger.error("error executing CSVJob",e);
|
||||
this.status = JobStatus.FAILED;
|
||||
return;
|
||||
}finally{
|
||||
if (csvFile!=null)
|
||||
csvFile.delete();
|
||||
this.endDate = Calendar.getInstance();
|
||||
DynamicMap.remove(this.id);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public JobStatus getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public void setStatus(JobStatus status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public Map<TaxonomyItem, CompleteJobStatus> getMapSubJobs() {
|
||||
return mapSubJobs;
|
||||
}
|
||||
|
||||
public String getResultURL() {
|
||||
return resultURL;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getErrorURL() {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
public abstract Converter<OccurrencePoint, List<String>> getConverter();
|
||||
|
||||
public abstract List<String> getHeader();
|
||||
|
||||
@Override
|
||||
public boolean validateInput(String input) {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public int getCompletedEntries() {
|
||||
return completedEntries;
|
||||
}
|
||||
|
||||
|
||||
public Calendar getEndDate() {
|
||||
return endDate;
|
||||
}
|
||||
|
||||
|
||||
public Calendar getStartDate() {
|
||||
return startDate;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,7 @@
|
||||
package org.gcube.data.spd.executor.jobs.csv;
|
||||
|
||||
|
||||
public interface Converter<T, D> {
|
||||
|
||||
public D convert(T input) throws Exception;
|
||||
}
|
@ -0,0 +1,104 @@
|
||||
package org.gcube.data.spd.executor.jobs.csv;
|
||||
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.data.spd.model.products.OccurrencePoint;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @author "Federico De Faveri defaveri@isti.cnr.it"
|
||||
*
|
||||
*/
|
||||
public class OccurrenceCSVConverter implements Converter<OccurrencePoint, List<String>> {
|
||||
|
||||
protected static SimpleDateFormat dateFormatter = new SimpleDateFormat("MM/dd/yyyy HH:mm:ss");
|
||||
|
||||
public static final List<String> HEADER = Arrays.asList(new String[]{
|
||||
"institutionCode",
|
||||
"collectionCode",
|
||||
"catalogueNumber",
|
||||
"dataSet",
|
||||
"dataProvider",
|
||||
"dataSource",
|
||||
|
||||
"scientificNameAuthorship",
|
||||
"identifiedBy",
|
||||
// "lsid",
|
||||
"credits",
|
||||
|
||||
"recordedBy",
|
||||
"eventDate",
|
||||
"modified",
|
||||
"scientificName",
|
||||
"kingdom",
|
||||
"family",
|
||||
"locality",
|
||||
"country",
|
||||
"citation",
|
||||
"decimalLatitude",
|
||||
"decimalLongitude",
|
||||
"coordinateUncertaintyInMeters",
|
||||
"maxDepth",
|
||||
"minDepth",
|
||||
"basisOfRecord"});
|
||||
|
||||
@Override
|
||||
public List<String> convert(OccurrencePoint input) throws Exception {
|
||||
|
||||
List<String> fields = new LinkedList<String>();
|
||||
fields.add(cleanValue(input.getInstitutionCode()));
|
||||
fields.add(cleanValue(input.getCollectionCode()));
|
||||
fields.add(cleanValue(input.getCatalogueNumber()));
|
||||
|
||||
if(input.getDataSet()!=null){
|
||||
fields.add(cleanValue(input.getDataSet().getName()));
|
||||
if(input.getDataSet().getDataProvider()!=null)
|
||||
fields.add(cleanValue(input.getDataSet().getDataProvider().getName()));
|
||||
else
|
||||
fields.add("");
|
||||
}else{
|
||||
fields.add("");
|
||||
fields.add("");
|
||||
}
|
||||
|
||||
fields.add(cleanValue(input.getProvider()));
|
||||
fields.add(cleanValue(input.getScientificNameAuthorship()));
|
||||
fields.add(cleanValue(input.getIdentifiedBy()));
|
||||
// fields.add(cleanValue(input.getLsid()));
|
||||
fields.add(cleanValue(input.getCredits()));
|
||||
|
||||
fields.add(cleanValue(input.getRecordedBy()));
|
||||
|
||||
if (input.getEventDate() != null)
|
||||
fields.add(cleanValue(dateFormatter.format(input.getEventDate().getTime())));
|
||||
else fields.add("");
|
||||
if (input.getModified() != null)
|
||||
fields.add(cleanValue(dateFormatter.format(input.getModified().getTime())));
|
||||
else fields.add("");
|
||||
|
||||
fields.add(cleanValue(input.getScientificName()));
|
||||
fields.add(cleanValue(input.getKingdom()));
|
||||
fields.add(cleanValue(input.getFamily()));
|
||||
fields.add(cleanValue(input.getLocality()));
|
||||
fields.add(cleanValue(input.getCountry()));
|
||||
fields.add(cleanValue(input.getCitation()));
|
||||
fields.add(cleanValue(new Double(input.getDecimalLatitude()).toString()));
|
||||
fields.add(cleanValue(new Double(input.getDecimalLongitude()).toString()));
|
||||
fields.add(cleanValue(input.getCoordinateUncertaintyInMeters()));
|
||||
fields.add(cleanValue(new Double(input.getMaxDepth()).toString()));
|
||||
fields.add(cleanValue(new Double(input.getMinDepth()).toString()));
|
||||
fields.add(cleanValue(input.getBasisOfRecord().name()));
|
||||
return fields;
|
||||
}
|
||||
|
||||
protected static String cleanValue(String value)
|
||||
{
|
||||
if (value==null) return "";
|
||||
return value;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,44 @@
|
||||
package org.gcube.data.spd.executor.jobs.csv;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.data.spd.model.products.OccurrencePoint;
|
||||
|
||||
/**
|
||||
* @author "Federico De Faveri defaveri@isti.cnr.it"
|
||||
*
|
||||
*/
|
||||
public class OccurrenceCSVConverterOpenModeller implements Converter<OccurrencePoint, List<String>> {
|
||||
|
||||
public static final List<String> HEADER = Arrays.asList(new String[]{
|
||||
"#id",
|
||||
"label",
|
||||
"long",
|
||||
"lat",
|
||||
"abundance"});
|
||||
|
||||
// id, label, longitude, latitude, abundance
|
||||
|
||||
protected final static String PRESENCE = "1"; //Abundance should be 1 for a presence point.
|
||||
|
||||
@Override
|
||||
public List<String> convert(OccurrencePoint input) throws Exception {
|
||||
|
||||
List<String> fields = new LinkedList<String>();
|
||||
fields.add(cleanValue(input.getId()));
|
||||
fields.add(cleanValue(input.getScientificName()));
|
||||
fields.add(cleanValue(new Double(input.getDecimalLongitude()).toString()));
|
||||
fields.add(cleanValue(new Double(input.getDecimalLatitude()).toString()));
|
||||
fields.add(PRESENCE);
|
||||
return fields;
|
||||
}
|
||||
|
||||
protected static String cleanValue(String value)
|
||||
{
|
||||
if (value==null) return "";
|
||||
return value;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,73 @@
|
||||
package org.gcube.data.spd.executor.jobs.csv;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.gcube.data.spd.manager.OccurrenceWriterManager;
|
||||
import org.gcube.data.spd.model.exceptions.StreamBlockingException;
|
||||
import org.gcube.data.spd.model.products.OccurrencePoint;
|
||||
import org.gcube.data.spd.model.service.exceptions.UnsupportedCapabilityException;
|
||||
import org.gcube.data.spd.model.service.exceptions.UnsupportedPluginException;
|
||||
import org.gcube.data.spd.model.util.Capabilities;
|
||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||
import org.gcube.data.spd.plugin.fwk.util.Util;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.Writer;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.rswrapper.LocalWrapper;
|
||||
import org.gcube.data.streams.Stream;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class OccurrenceReaderByKey implements Runnable{
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(OccurrenceReaderByKey.class);
|
||||
|
||||
private final LocalWrapper<OccurrencePoint> ocWrapper;
|
||||
private Stream<String> stream;
|
||||
private Map<String, AbstractPlugin> plugins;
|
||||
|
||||
public OccurrenceReaderByKey(LocalWrapper<OccurrencePoint> ocWrapper, Stream<String> stream, Map<String, AbstractPlugin> plugins) {
|
||||
this.ocWrapper = ocWrapper;
|
||||
this.stream = stream;
|
||||
this.plugins = plugins;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
HashMap<String, OccurrenceWriterManager> managerPerProvider = new HashMap<String, OccurrenceWriterManager>();
|
||||
try{
|
||||
while(this.stream.hasNext()){
|
||||
String key = this.stream.next();
|
||||
try{
|
||||
String provider = Util.getProviderFromKey(key);
|
||||
if (!managerPerProvider.containsKey(provider))
|
||||
managerPerProvider.put(provider, new OccurrenceWriterManager(provider));
|
||||
|
||||
Writer<OccurrencePoint> ocWriter = new Writer<OccurrencePoint>(ocWrapper, managerPerProvider.get(provider) );
|
||||
String id = Util.getIdFromKey(key);
|
||||
AbstractPlugin plugin = plugins.get(provider);
|
||||
if (plugin==null) throw new UnsupportedPluginException();
|
||||
if (!plugin.getSupportedCapabilities().contains(Capabilities.Occurrence))
|
||||
throw new UnsupportedCapabilityException();
|
||||
ocWriter.register();
|
||||
plugin.getOccurrencesInterface().getOccurrencesByProductKeys(ocWriter, Collections.singletonList(id).iterator());
|
||||
}catch (Exception e) {
|
||||
logger.warn("error getting occurrence points with key "+key, e);
|
||||
}
|
||||
}
|
||||
}catch(Exception e){
|
||||
logger.error("Error reading keys",e);
|
||||
try {
|
||||
ocWrapper.add(new StreamBlockingException(""));
|
||||
} catch (Exception e1) {
|
||||
logger.error("unexpected error", e1);
|
||||
}
|
||||
}
|
||||
try {
|
||||
ocWrapper.disableForceOpenAndClose();
|
||||
} catch (Exception e) {
|
||||
logger.warn("error closing the local reader", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,263 @@
|
||||
package org.gcube.data.spd.executor.jobs.darwincore;
|
||||
|
||||
import static org.gcube.data.streams.dsl.Streams.convert;
|
||||
import static org.gcube.data.streams.dsl.Streams.pipe;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.text.DateFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Calendar;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.contentmanagement.blobstorage.service.IClient;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.AccessType;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.StorageClient;
|
||||
import org.gcube.data.spd.Constants;
|
||||
import org.gcube.data.spd.executor.jobs.URLJob;
|
||||
import org.gcube.data.spd.executor.jobs.csv.OccurrenceReaderByKey;
|
||||
import org.gcube.data.spd.model.exceptions.StreamException;
|
||||
import org.gcube.data.spd.model.products.OccurrencePoint;
|
||||
import org.gcube.data.spd.model.products.TaxonomyItem;
|
||||
import org.gcube.data.spd.model.service.types.JobStatus;
|
||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||
import org.gcube.data.spd.plugin.fwk.readers.LocalReader;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.rswrapper.LocalWrapper;
|
||||
import org.gcube.data.spd.utils.DynamicMap;
|
||||
import org.gcube.data.spd.utils.Utils;
|
||||
import org.gcube.data.streams.Stream;
|
||||
import org.gcube.data.streams.exceptions.StreamSkipSignal;
|
||||
import org.gcube.data.streams.exceptions.StreamStopSignal;
|
||||
import org.gcube.data.streams.generators.Generator;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class DarwinCoreJob implements URLJob{
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(DarwinCoreJob.class);
|
||||
|
||||
private int completedEntries = 0;
|
||||
|
||||
private Calendar endDate, startDate;
|
||||
|
||||
private String resultURL = null;
|
||||
|
||||
private String errorFileURL = null;
|
||||
|
||||
private JobStatus status;
|
||||
|
||||
private String id;
|
||||
|
||||
private Map<TaxonomyItem, JobStatus> mapSubJobs;
|
||||
|
||||
private Map<String, AbstractPlugin> plugins;
|
||||
|
||||
public DarwinCoreJob(Map<String, AbstractPlugin> plugins) {
|
||||
this.id = UUID.randomUUID().toString();
|
||||
this.status = JobStatus.PENDING;
|
||||
this.plugins = plugins;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
File darwincoreFile =null;
|
||||
File errorFile = null;
|
||||
try{
|
||||
this.startDate = Calendar.getInstance();
|
||||
this.status = JobStatus.RUNNING;
|
||||
|
||||
LocalWrapper<OccurrencePoint> localWrapper = new LocalWrapper<OccurrencePoint>(2000);
|
||||
localWrapper.forceOpen();
|
||||
|
||||
Stream<String> ids =convert(DynamicMap.get(this.id));
|
||||
|
||||
OccurrenceReaderByKey occurrenceReader = new OccurrenceReaderByKey(localWrapper, ids, plugins);
|
||||
|
||||
new Thread(occurrenceReader).start();
|
||||
|
||||
LocalReader<OccurrencePoint> ocReader= new LocalReader<OccurrencePoint>(localWrapper);
|
||||
|
||||
IClient client = new StorageClient(Constants.SERVICE_CLASS, Constants.SERVICE_NAME, "DarwinCore", AccessType.SHARED).getClient();
|
||||
|
||||
darwincoreFile =getDarwinCoreFile(ocReader);
|
||||
String resultPath = "/darwincore/"+this.id.replace("-", "");
|
||||
client.put(true).LFile(darwincoreFile.getAbsolutePath()).RFile(resultPath);
|
||||
this.resultURL=client.getUrl().RFile(resultPath);
|
||||
|
||||
errorFile = Utils.createErrorFile(
|
||||
pipe(convert(localWrapper.getErrors())).through(new Generator<StreamException, String>() {
|
||||
|
||||
@Override
|
||||
public String yield(StreamException element)
|
||||
throws StreamSkipSignal, StreamStopSignal {
|
||||
return element.getRepositoryName()+" "+element.getIdentifier();
|
||||
}
|
||||
}));
|
||||
|
||||
if (errorFile!=null){
|
||||
String errorFilePath = "/darwincore/"+this.id.replace("-", "")+"-ERRORS.txt";
|
||||
client.put(true).LFile(darwincoreFile.getAbsolutePath()).RFile(errorFilePath);
|
||||
this.errorFileURL=client.getUrl().RFile(errorFilePath);
|
||||
}
|
||||
|
||||
logger.trace("filePath is "+darwincoreFile.getAbsolutePath());
|
||||
this.status = JobStatus.COMPLETED;
|
||||
}catch (Exception e) {
|
||||
logger.error("error executing DWCAJob",e);
|
||||
this.status = JobStatus.FAILED;
|
||||
return;
|
||||
}finally{
|
||||
if (darwincoreFile!=null)
|
||||
darwincoreFile.delete();
|
||||
if (errorFile!=null)
|
||||
errorFile.delete();
|
||||
this.endDate = Calendar.getInstance();
|
||||
DynamicMap.remove(this.id);
|
||||
}
|
||||
}
|
||||
|
||||
public JobStatus getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public void setStatus(JobStatus status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public Calendar getEndDate() {
|
||||
return endDate;
|
||||
}
|
||||
|
||||
|
||||
public Calendar getStartDate() {
|
||||
return startDate;
|
||||
}
|
||||
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public Map<TaxonomyItem, JobStatus> getMapSubJobs() {
|
||||
return mapSubJobs;
|
||||
}
|
||||
|
||||
public String getResultURL() {
|
||||
return resultURL;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getErrorURL() {
|
||||
return this.errorFileURL;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean validateInput(String input) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getCompletedEntries() {
|
||||
return completedEntries;
|
||||
}
|
||||
|
||||
|
||||
private File getDarwinCoreFile(Iterator<OccurrencePoint> reader) throws Exception{
|
||||
|
||||
DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
|
||||
|
||||
OutputStreamWriter writer = new OutputStreamWriter(new ByteArrayOutputStream());
|
||||
|
||||
try{
|
||||
File returnFile = File.createTempFile("darwinCore", "xml");
|
||||
writer = new FileWriter(returnFile);
|
||||
|
||||
|
||||
writer.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
|
||||
writer.append("<SimpleDarwinRecordSet xmlns=\"http://rs.tdwg.org/dwc/xsd/simpledarwincore/\" xmlns:dc=\"http://purl.org/dc/terms/\" xmlns:dwc=\"http://rs.tdwg.org/dwc/terms/\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://rs.tdwg.org/dwc/xsd/simpledarwincore/ http://rs.tdwg.org/dwc/xsd/tdwg_dwc_simple.xsd\">");
|
||||
|
||||
while (reader.hasNext()){
|
||||
|
||||
writer.append("<SimpleDarwinRecord>");
|
||||
writer.append("<dc:language>en</dc:language>");
|
||||
|
||||
OccurrencePoint occurrence= reader.next();
|
||||
|
||||
if (occurrence.getModified() != null)
|
||||
writer.append("<dc:modified>" + df.format(occurrence.getModified().getTime()) + "</dc:modified>");
|
||||
if (occurrence.getBasisOfRecord() != null)
|
||||
writer.append("<dwc:basisOfRecord>" + occurrence.getBasisOfRecord().name() + "</dwc:basisOfRecord>");
|
||||
if (occurrence.getScientificNameAuthorship() != null)
|
||||
writer.append("<dwc:scientificNameAuthorship>" + occurrence.getScientificNameAuthorship() + "</dwc:scientificNameAuthorship>");
|
||||
if (occurrence.getInstitutionCode() != null)
|
||||
writer.append("<dwc:institutionCode>" + occurrence.getInstitutionCode() + "</dwc:institutionCode>");
|
||||
if (occurrence.getCollectionCode() != null)
|
||||
writer.append("<dwc:collectionCode>" + occurrence.getCollectionCode() + "</dwc:collectionCode>");
|
||||
if (occurrence.getCatalogueNumber() != null)
|
||||
writer.append("<dwc:catalogNumber>" + occurrence.getCatalogueNumber() + "</dwc:catalogNumber>");
|
||||
if (occurrence.getIdentifiedBy() != null)
|
||||
writer.append("<dwc:identifiedBy>" + occurrence.getIdentifiedBy() + "</dwc:identifiedBy>");
|
||||
if (occurrence.getRecordedBy() != null)
|
||||
writer.append("<dwc:recordedBy>" + occurrence.getRecordedBy() + "</dwc:recordedBy>");
|
||||
if (occurrence.getScientificName() != null)
|
||||
writer.append("<dwc:scientificName>" + occurrence.getScientificName() + "</dwc:scientificName>");
|
||||
if (occurrence.getKingdom() != null)
|
||||
writer.append("<dwc:kingdom>" + occurrence.getKingdom() + "</dwc:kingdom>");
|
||||
if (occurrence.getFamily() != null)
|
||||
writer.append("<dwc:family>" + occurrence.getFamily() + "</dwc:family>");
|
||||
if (occurrence.getLocality() != null)
|
||||
writer.append("<dwc:locality>" + occurrence.getLocality() + "</dwc:locality>");
|
||||
if (occurrence.getEventDate() != null)
|
||||
{
|
||||
writer.append("<dwc:eventDate>" + df.format(occurrence.getEventDate().getTime()) + "</dwc:eventDate>");
|
||||
writer.append("<dwc:year>" + occurrence.getEventDate().get(Calendar.YEAR) + "</dwc:year>");
|
||||
}
|
||||
if (occurrence.getDecimalLatitude() != 0.0)
|
||||
writer.append("<dwc:decimalLatitude>" + occurrence.getDecimalLatitude() + "</dwc:decimalLatitude>");
|
||||
if (occurrence.getDecimalLongitude() != 0.0)
|
||||
writer.append("<dwc:decimalLongitude>" + occurrence.getDecimalLongitude() + "</dwc:decimalLongitude>");
|
||||
if (occurrence.getCoordinateUncertaintyInMeters() != null)
|
||||
writer.append("<dwc:coordinateUncertaintyInMeters>" + occurrence.getCoordinateUncertaintyInMeters() + "</dwc:coordinateUncertaintyInMeters>");
|
||||
if (occurrence.getMaxDepth() != 0.0)
|
||||
writer.append("<dwc:maximumDepthInMeters>" + occurrence.getMaxDepth() + "</dwc:maximumDepthInMeters>");
|
||||
if (occurrence.getMinDepth() != 0.0)
|
||||
writer.append("<dwc:minimumDepthInMeters>" + occurrence.getMinDepth() + "</dwc:minimumDepthInMeters>");
|
||||
|
||||
writer.append("</SimpleDarwinRecord>");
|
||||
completedEntries++;
|
||||
}
|
||||
|
||||
writer.append("</SimpleDarwinRecordSet>");
|
||||
writer.flush();
|
||||
writer.close();
|
||||
return returnFile;
|
||||
}catch (Exception e) {
|
||||
logger.error("error writing occurrences as darwin core",e);
|
||||
throw e;
|
||||
}finally{
|
||||
try {
|
||||
writer.close();
|
||||
} catch (IOException e) {
|
||||
logger.warn("error closing the output stream",e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,324 @@
|
||||
package org.gcube.data.spd.executor.jobs.dwca;
|
||||
|
||||
import gr.uoa.di.madgik.commons.utils.FileUtils;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Calendar;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.contentmanagement.blobstorage.service.IClient;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.AccessType;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.StorageClient;
|
||||
import org.gcube.data.spd.Constants;
|
||||
import org.gcube.data.spd.exception.MaxRetriesReachedException;
|
||||
import org.gcube.data.spd.executor.jobs.URLJob;
|
||||
import org.gcube.data.spd.manager.TaxonomyItemWriterManager;
|
||||
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
|
||||
import org.gcube.data.spd.model.exceptions.IdNotValidException;
|
||||
import org.gcube.data.spd.model.products.TaxonomyItem;
|
||||
import org.gcube.data.spd.model.service.exceptions.UnsupportedCapabilityException;
|
||||
import org.gcube.data.spd.model.service.exceptions.UnsupportedPluginException;
|
||||
import org.gcube.data.spd.model.service.types.JobStatus;
|
||||
import org.gcube.data.spd.model.util.Capabilities;
|
||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||
import org.gcube.data.spd.plugin.fwk.readers.LocalReader;
|
||||
import org.gcube.data.spd.plugin.fwk.util.Util;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.Writer;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.rswrapper.LocalWrapper;
|
||||
import org.gcube.data.spd.utils.JobRetryCall;
|
||||
import org.gcube.data.spd.utils.Utils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class DWCAJobByChildren implements URLJob{
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(DWCAJobByChildren.class);
|
||||
|
||||
private int completedEntries = 0;
|
||||
|
||||
private String resultURL = null;
|
||||
|
||||
private String errorFileURL = null;
|
||||
|
||||
private JobStatus status;
|
||||
|
||||
private Calendar endDate, startDate;
|
||||
|
||||
private Map<String, AbstractPlugin> plugins;
|
||||
|
||||
private String id;
|
||||
|
||||
private Map<TaxonomyItem, JobStatus> mapSubJobs;
|
||||
|
||||
private String taxonKey;
|
||||
|
||||
|
||||
public DWCAJobByChildren(String taxonKey, Map<String, AbstractPlugin> plugins) {
|
||||
logger.trace("the Taxon Key is "+taxonKey);
|
||||
this.mapSubJobs = new HashMap<TaxonomyItem, JobStatus>();
|
||||
this.id = UUID.randomUUID().toString();
|
||||
this.taxonKey = taxonKey;
|
||||
this.status = JobStatus.PENDING;
|
||||
this.plugins = plugins;
|
||||
}
|
||||
|
||||
private AbstractPlugin pluginToUse = null;
|
||||
|
||||
private AbstractPlugin getPlugin(String key) throws Exception{
|
||||
if (pluginToUse==null){
|
||||
String pluginName = Util.getProviderFromKey(key);
|
||||
if (!plugins.containsKey(pluginName))
|
||||
throw new UnsupportedPluginException();
|
||||
return plugins.get(pluginName);
|
||||
} else return pluginToUse;
|
||||
}
|
||||
|
||||
//ONLY FOR TEST PURPOSE
|
||||
public void setPluginToUse(AbstractPlugin plugin){
|
||||
this.pluginToUse = plugin;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
File errorFile = null;
|
||||
File dwcaFile = null;
|
||||
try{
|
||||
this.startDate = Calendar.getInstance();
|
||||
this.status = JobStatus.RUNNING;
|
||||
|
||||
AbstractPlugin plugin = getPlugin(this.taxonKey);
|
||||
|
||||
logger.trace("plugin for this job is"+ plugin.getRepositoryName());
|
||||
String id = Util.getIdFromKey(this.taxonKey);
|
||||
|
||||
if (!plugin.getSupportedCapabilities().contains(Capabilities.Classification)) throw new UnsupportedCapabilityException();
|
||||
|
||||
//TODO add ERROR on this method
|
||||
List<TaxonomyItem> taxa = getChildrenWithRetry(id, plugin);
|
||||
if (taxa==null) throw new Exception("failed contacting external repository");
|
||||
if (taxa.size()==0) throw new Exception("the taxon with key "+this.taxonKey+" has no children" );
|
||||
|
||||
TaxonomyItem rootItem = plugin.getClassificationInterface().retrieveTaxonById(id);
|
||||
|
||||
for (TaxonomyItem taxon : taxa){
|
||||
taxon.setParent(rootItem);
|
||||
mapSubJobs.put(taxon, JobStatus.PENDING);
|
||||
}
|
||||
|
||||
final LocalWrapper<TaxonomyItem> localWrapper = new LocalWrapper<TaxonomyItem>(2000);
|
||||
|
||||
Writer<TaxonomyItem> writer = new Writer<TaxonomyItem>(localWrapper, new TaxonomyItemWriterManager(plugin.getRepositoryName()));
|
||||
writer.register();
|
||||
|
||||
final LocalWrapper<String> errorWrapper = new LocalWrapper<String>(2000);
|
||||
Writer<String> errorWriter = new Writer<String>(errorWrapper);
|
||||
errorWriter.register();
|
||||
|
||||
do{
|
||||
writer.write(rootItem);
|
||||
rootItem = rootItem.getParent();
|
||||
} while (rootItem !=null);
|
||||
|
||||
new TaxonReader(writer, errorWriter, plugin).start();
|
||||
|
||||
LocalReader<TaxonomyItem> reader = new LocalReader<TaxonomyItem>(localWrapper);
|
||||
|
||||
MapDwCA dwca = new MapDwCA();
|
||||
dwcaFile =dwca.createDwCA(reader);
|
||||
|
||||
logger.trace("the file is null ?"+(dwcaFile==null));
|
||||
|
||||
logger.trace("filePath is "+dwcaFile.getAbsolutePath());
|
||||
|
||||
IClient client = new StorageClient(Constants.SERVICE_CLASS, Constants.SERVICE_NAME, "DWCA", AccessType.SHARED).getClient();
|
||||
|
||||
String resultPath = "/dwca/"+this.id.replace("-", "")+".zip";
|
||||
|
||||
client.put(true).LFile(dwcaFile.getAbsolutePath()).RFile(resultPath);
|
||||
|
||||
this.resultURL=client.getUrl().RFile(resultPath);
|
||||
|
||||
LocalReader<String> errorReader = new LocalReader<String>(errorWrapper);
|
||||
errorFile = Utils.createErrorFile(errorReader);
|
||||
errorReader.close();
|
||||
|
||||
if (errorFile!=null){
|
||||
String errorFilePath = "/dwca/"+this.id.replace("-", "")+"-ERRORS.txt";
|
||||
client.put(true).LFile(errorFile.getAbsolutePath()).RFile(errorFilePath);
|
||||
this.errorFileURL= client.getUrl().RFile(errorFilePath);
|
||||
}
|
||||
|
||||
logger.trace("files stored");
|
||||
|
||||
this.status = JobStatus.COMPLETED;
|
||||
}catch (Exception e) {
|
||||
logger.error("error executing DWCAJob",e);
|
||||
this.status = JobStatus.FAILED;
|
||||
return;
|
||||
} finally{
|
||||
if (dwcaFile!=null && dwcaFile.exists())
|
||||
FileUtils.CleanUp(dwcaFile.getParentFile());
|
||||
if (errorFile!=null && errorFile.exists())
|
||||
errorFile.delete();
|
||||
this.endDate = Calendar.getInstance();
|
||||
}
|
||||
}
|
||||
|
||||
public JobStatus getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public void setStatus(JobStatus status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public Map<TaxonomyItem, JobStatus> getMapSubJobs() {
|
||||
return mapSubJobs;
|
||||
}
|
||||
|
||||
public Calendar getEndDate() {
|
||||
return endDate;
|
||||
}
|
||||
|
||||
public Calendar getStartDate() {
|
||||
return startDate;
|
||||
}
|
||||
|
||||
public String getResultURL() {
|
||||
return resultURL;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getErrorURL() {
|
||||
return errorFileURL;
|
||||
}
|
||||
|
||||
public class TaxonReader extends Thread{
|
||||
|
||||
private Writer<TaxonomyItem> writer;
|
||||
private Writer<String> errorWriter;
|
||||
private AbstractPlugin plugin;
|
||||
|
||||
public TaxonReader(Writer<TaxonomyItem> writer, Writer<String> errorWriter, AbstractPlugin plugin) {
|
||||
this.writer = writer;
|
||||
this.plugin = plugin;
|
||||
this.errorWriter = errorWriter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
for (Entry<TaxonomyItem, JobStatus> entry: mapSubJobs.entrySet()){
|
||||
entry.setValue(JobStatus.RUNNING);
|
||||
try{
|
||||
retrieveTaxaTree(writer, errorWriter, entry.getKey(), plugin);
|
||||
entry.setValue(JobStatus.COMPLETED);
|
||||
completedEntries++;
|
||||
}catch (Exception e) {
|
||||
errorWriter.write(entry.getKey().getScientificName());
|
||||
entry.setValue(JobStatus.FAILED);
|
||||
logger.warn("failed computing job for taxon "+entry.getKey());
|
||||
}
|
||||
}
|
||||
this.writer.close();
|
||||
this.errorWriter.close();
|
||||
}
|
||||
|
||||
|
||||
private void retrieveTaxaTree(ObjectWriter<TaxonomyItem> writer, ObjectWriter<String> errorWriter, TaxonomyItem taxon, AbstractPlugin plugin) throws IdNotValidException, Exception{
|
||||
writer.write(taxon);
|
||||
|
||||
//retrieving references
|
||||
if (taxon.getStatus()!=null && taxon.getStatus().getRefId() != null){
|
||||
String id = taxon.getStatus().getRefId();
|
||||
try {
|
||||
TaxonomyItem tempTaxon = retrieveTaxonIdWithRetry(id, plugin);
|
||||
do{
|
||||
writer.write(tempTaxon);
|
||||
tempTaxon = tempTaxon.getParent();
|
||||
}while(tempTaxon!=null);
|
||||
} catch (Exception e) {
|
||||
logger.warn("refId "+id+" not retrieved for plugin "+plugin.getRepositoryName(),e);
|
||||
errorWriter.write(plugin.getRepositoryName()+" - "+taxon.getId()+" - "+taxon.getScientificName());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
List<TaxonomyItem> items = null;
|
||||
|
||||
try{
|
||||
items = getChildrenWithRetry(taxon.getId(), plugin);
|
||||
}catch (MaxRetriesReachedException e) {
|
||||
logger.trace("error retrieving element with id {} and scientific name {} ",taxon.getId(),taxon.getScientificName());
|
||||
errorWriter.write(plugin.getRepositoryName()+" - "+taxon.getId()+" - "+taxon.getScientificName());
|
||||
}
|
||||
|
||||
if (items!=null)
|
||||
for(TaxonomyItem item : items){
|
||||
item.setParent(taxon);
|
||||
logger.trace("sending request for item with id "+item.getId());
|
||||
retrieveTaxaTree(writer, errorWriter, item, plugin);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean validateInput(String input) {
|
||||
try{
|
||||
Util.getIdFromKey(input);
|
||||
Util.getProviderFromKey(input);
|
||||
}catch (Exception e) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private List<TaxonomyItem> getChildrenWithRetry(final String id, final AbstractPlugin plugin) throws IdNotValidException, MaxRetriesReachedException{
|
||||
return new JobRetryCall<List<TaxonomyItem>, IdNotValidException>() {
|
||||
|
||||
@Override
|
||||
protected List<TaxonomyItem> execute() throws ExternalRepositoryException, IdNotValidException {
|
||||
return plugin.getClassificationInterface().retrieveTaxonChildrenByTaxonId(id);
|
||||
}
|
||||
}.call();
|
||||
}
|
||||
|
||||
private TaxonomyItem retrieveTaxonIdWithRetry(final String id, final AbstractPlugin plugin) throws IdNotValidException, MaxRetriesReachedException{
|
||||
return new JobRetryCall<TaxonomyItem, IdNotValidException>() {
|
||||
|
||||
@Override
|
||||
protected TaxonomyItem execute() throws ExternalRepositoryException, IdNotValidException {
|
||||
return plugin.getClassificationInterface().retrieveTaxonById(id);
|
||||
}
|
||||
}.call();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public int getCompletedEntries() {
|
||||
return completedEntries;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,260 @@
|
||||
package org.gcube.data.spd.executor.jobs.dwca;
|
||||
|
||||
import static org.gcube.data.streams.dsl.Streams.convert;
|
||||
import gr.uoa.di.madgik.commons.utils.FileUtils;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Calendar;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.common.authorization.library.AuthorizedTasks;
|
||||
import org.gcube.contentmanagement.blobstorage.service.IClient;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.AccessType;
|
||||
import org.gcube.contentmanager.storageclient.wrapper.StorageClient;
|
||||
import org.gcube.data.spd.Constants;
|
||||
import org.gcube.data.spd.exception.MaxRetriesReachedException;
|
||||
import org.gcube.data.spd.executor.jobs.URLJob;
|
||||
import org.gcube.data.spd.manager.TaxonomyItemWriterManager;
|
||||
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
|
||||
import org.gcube.data.spd.model.exceptions.IdNotValidException;
|
||||
import org.gcube.data.spd.model.exceptions.StreamNonBlockingException;
|
||||
import org.gcube.data.spd.model.products.TaxonomyItem;
|
||||
import org.gcube.data.spd.model.service.types.JobStatus;
|
||||
import org.gcube.data.spd.model.util.Capabilities;
|
||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||
import org.gcube.data.spd.plugin.fwk.readers.LocalReader;
|
||||
import org.gcube.data.spd.plugin.fwk.util.Util;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.Writer;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.rswrapper.AbstractWrapper;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.rswrapper.LocalWrapper;
|
||||
import org.gcube.data.spd.utils.DynamicMap;
|
||||
import org.gcube.data.spd.utils.JobRetryCall;
|
||||
import org.gcube.data.spd.utils.Utils;
|
||||
import org.gcube.data.streams.Stream;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class DWCAJobByIds implements URLJob{
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(DWCAJobByChildren.class);
|
||||
|
||||
private String resultURL = null;
|
||||
|
||||
private String errorURL = null;
|
||||
|
||||
private Calendar endDate, startDate;
|
||||
|
||||
private int completedEntries = 0;
|
||||
|
||||
private JobStatus status;
|
||||
|
||||
private String id;
|
||||
|
||||
private Map<String, AbstractPlugin> plugins;
|
||||
|
||||
public DWCAJobByIds(Map<String, AbstractPlugin> plugins) {
|
||||
//this.mapSubJobs = new HashMap<TaxonomyItem, JobStatus>();
|
||||
this.id = UUID.randomUUID().toString();
|
||||
this.plugins = plugins;
|
||||
this.status = JobStatus.PENDING;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
File errorsFile= null;
|
||||
File dwcaFile = null;
|
||||
try{
|
||||
this.startDate = Calendar.getInstance();
|
||||
this.status = JobStatus.RUNNING;
|
||||
|
||||
final LocalWrapper<TaxonomyItem> localWrapper = new LocalWrapper<TaxonomyItem>(2000);
|
||||
|
||||
final LocalWrapper<String> errorWrapper = new LocalWrapper<String>(2000);
|
||||
Writer<String> errorWriter = new Writer<String>(errorWrapper);
|
||||
errorWriter.register();
|
||||
|
||||
TaxonReader taxonReader = new TaxonReader(localWrapper, errorWriter, this.id);
|
||||
new Thread(AuthorizedTasks.bind(taxonReader)).start();
|
||||
|
||||
LocalReader<TaxonomyItem> reader = new LocalReader<TaxonomyItem>(localWrapper);
|
||||
|
||||
MapDwCA dwca = new MapDwCA();
|
||||
dwcaFile =dwca.createDwCA(reader);
|
||||
|
||||
logger.trace("the file is null ?"+(dwcaFile==null));
|
||||
|
||||
IClient client = new StorageClient(Constants.SERVICE_CLASS, Constants.SERVICE_NAME, "DWCA", AccessType.SHARED).getClient();
|
||||
|
||||
String resultPath = "/dwca/"+this.id.replace("-", "")+".zip";
|
||||
client.put(true).LFile(dwcaFile.getAbsolutePath()).RFile(resultPath);
|
||||
this.resultURL=client.getUrl().RFile(resultPath);
|
||||
|
||||
LocalReader<String> errorReader = new LocalReader<String>(errorWrapper);
|
||||
errorsFile = Utils.createErrorFile(errorReader);
|
||||
errorReader.close();
|
||||
|
||||
if (errorsFile!=null){
|
||||
String errorFilePath = "/dwca/"+this.id.replace("-", "")+"-ERRORS.txt";
|
||||
client.put(true).LFile(errorsFile.getAbsolutePath()).RFile(errorFilePath);
|
||||
this.errorURL=client.getUrl().RFile(errorFilePath);
|
||||
}
|
||||
|
||||
logger.trace("filePath is "+dwcaFile.getAbsolutePath());
|
||||
this.status = JobStatus.COMPLETED;
|
||||
}catch (Exception e) {
|
||||
logger.error("error executing DWCAJob",e);
|
||||
this.status = JobStatus.FAILED;
|
||||
return;
|
||||
}finally{
|
||||
if (dwcaFile!=null)
|
||||
FileUtils.CleanUp(dwcaFile.getParentFile());
|
||||
|
||||
if (errorsFile!=null)
|
||||
errorsFile.delete();
|
||||
this.endDate = Calendar.getInstance();
|
||||
}
|
||||
}
|
||||
|
||||
public JobStatus getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public void setStatus(JobStatus status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
|
||||
public String getResultURL() {
|
||||
return resultURL;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getErrorURL() {
|
||||
return this.errorURL;
|
||||
}
|
||||
|
||||
public Calendar getEndDate() {
|
||||
return endDate;
|
||||
}
|
||||
|
||||
|
||||
public Calendar getStartDate() {
|
||||
return startDate;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public class TaxonReader implements Runnable{
|
||||
|
||||
|
||||
private AbstractWrapper<TaxonomyItem> wrapper;
|
||||
private String dynamicListId;
|
||||
private Writer<String> errorWriter;
|
||||
|
||||
public TaxonReader(AbstractWrapper<TaxonomyItem> wrapper, Writer<String> errorWriter, String dynamicListId) {
|
||||
this.wrapper = wrapper;
|
||||
this.dynamicListId = dynamicListId;
|
||||
this.errorWriter= errorWriter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try{
|
||||
Stream<String> ids =convert(DynamicMap.get(this.dynamicListId));
|
||||
Map<String, Writer<TaxonomyItem>> pluginMap = new HashMap<String, Writer<TaxonomyItem>>();
|
||||
|
||||
while(ids.hasNext()){
|
||||
String key = ids.next();
|
||||
String id = null;
|
||||
String provider = null;
|
||||
try{
|
||||
id = Util.getIdFromKey(key);
|
||||
provider = Util.getProviderFromKey(key);
|
||||
Writer<TaxonomyItem> writer;
|
||||
AbstractPlugin plugin = plugins.get(provider);
|
||||
if (plugin!=null && plugin.getSupportedCapabilities().contains(Capabilities.Classification)){
|
||||
if (!pluginMap.containsKey(provider)){
|
||||
writer = new Writer<TaxonomyItem>(wrapper, new TaxonomyItemWriterManager(plugin.getRepositoryName()));
|
||||
writer.register();
|
||||
pluginMap.put(plugin.getRepositoryName(), writer);
|
||||
}else
|
||||
writer = pluginMap.get(plugin.getRepositoryName());
|
||||
TaxonomyItem item = retrieveByIdWithRetry(plugin, id);
|
||||
writer.write(item);
|
||||
//retrieving references
|
||||
if (item.getStatus()!=null && item.getStatus().getRefId() != null){
|
||||
String refId = item.getStatus().getRefId();
|
||||
try {
|
||||
TaxonomyItem tempTaxon = retrieveByIdWithRetry(plugin, refId);
|
||||
do{
|
||||
writer.write(tempTaxon);
|
||||
tempTaxon = tempTaxon.getParent();
|
||||
}while(tempTaxon!=null);
|
||||
} catch (IdNotValidException e) {
|
||||
logger.warn("refId "+id+" not retrieved for plugin "+plugin.getRepositoryName(),e);
|
||||
writer.write(new StreamNonBlockingException(plugin.getRepositoryName(), refId));
|
||||
}
|
||||
}
|
||||
while (item.getParent()!=null)
|
||||
writer.write(item = item.getParent());
|
||||
}
|
||||
else logger.warn("taxon capability or plugin not found for key " +key);
|
||||
|
||||
completedEntries++;
|
||||
}catch (IdNotValidException e) {
|
||||
logger.error("error retrieving key "+key,e);
|
||||
errorWriter.write(provider+" - "+id);
|
||||
}catch (MaxRetriesReachedException e) {
|
||||
logger.error("max retry reached for "+provider,e);
|
||||
errorWriter.write(provider+" - "+id);
|
||||
}
|
||||
|
||||
}
|
||||
for(Writer<TaxonomyItem> writer: pluginMap.values())
|
||||
writer.close();
|
||||
this.errorWriter.close();
|
||||
}finally{
|
||||
DynamicMap.remove(this.dynamicListId);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean validateInput(String input) {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public int getCompletedEntries() {
|
||||
return completedEntries;
|
||||
}
|
||||
|
||||
private TaxonomyItem retrieveByIdWithRetry(final AbstractPlugin plugin, final String id) throws MaxRetriesReachedException, IdNotValidException{
|
||||
return new JobRetryCall<TaxonomyItem, IdNotValidException>() {
|
||||
|
||||
@Override
|
||||
protected TaxonomyItem execute()
|
||||
throws ExternalRepositoryException, IdNotValidException {
|
||||
return plugin.getClassificationInterface().retrieveTaxonById(id);
|
||||
}
|
||||
}.call();
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,463 @@
|
||||
package org.gcube.data.spd.executor.jobs.dwca;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.text.DateFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.Hashtable;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
import org.gcube.data.spd.model.CommonName;
|
||||
import org.gcube.data.spd.model.products.TaxonomyItem;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class MapDwCA {
|
||||
|
||||
static Logger logger = LoggerFactory.getLogger(MapDwCA.class);
|
||||
|
||||
private BufferedWriter vernacularFile;
|
||||
private File tempFolder;
|
||||
private List<File> fileList = new ArrayList<File>();
|
||||
private String archiveZip = "archive-tax.zip";
|
||||
|
||||
public MapDwCA() {
|
||||
super();
|
||||
}
|
||||
|
||||
|
||||
public synchronized File createDwCA(Iterator<TaxonomyItem> taxa) throws Exception{
|
||||
createMetaXml();
|
||||
createMetadata();
|
||||
createHeaders();
|
||||
createTaxaTxt(taxa);
|
||||
getAllFiles(tempFolder);
|
||||
return writeZipFile(tempFolder);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create file meta.xml
|
||||
*/
|
||||
private void createMetaXml(){
|
||||
|
||||
try {
|
||||
BufferedWriter bw = null;
|
||||
BufferedReader br = null;
|
||||
tempFolder = File.createTempFile("DwCA-folder", "" );
|
||||
tempFolder.delete();
|
||||
tempFolder.mkdir();
|
||||
File output = new File(tempFolder + "/meta.xml") ;
|
||||
|
||||
bw = new BufferedWriter(new FileWriter(output));
|
||||
br = new BufferedReader(new InputStreamReader(MapDwCA.class.getResourceAsStream("/org/gcube/data/spd/dwca/meta.xml")));
|
||||
String line;
|
||||
|
||||
while ((line = br.readLine()) != null) {
|
||||
bw.write(line);
|
||||
bw.write('\n');
|
||||
|
||||
}
|
||||
bw.close();
|
||||
br.close();
|
||||
} catch (IOException e) {
|
||||
logger.error("IO Error", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Create headers in taxa.txt and vernacular.txt
|
||||
*/
|
||||
private void createHeaders(){
|
||||
|
||||
try {
|
||||
|
||||
BufferedWriter file = new BufferedWriter(new FileWriter(tempFolder + "/" + "taxa.txt", true));
|
||||
vernacularFile = new BufferedWriter(new FileWriter(tempFolder + "/" + "VernacularName.txt", true));
|
||||
|
||||
//header
|
||||
file.write("taxonID\t");
|
||||
file.write("acceptedNameUsageID\t");
|
||||
file.write("parentNameUsageID\t");
|
||||
file.write("scientificName\t");
|
||||
file.write("scientificNameAuthorship\t");
|
||||
file.write("nameAccordingTo\t");
|
||||
file.write("kingdom\t");
|
||||
file.write("phylum\t");
|
||||
file.write("class\t");
|
||||
file.write("order\t");
|
||||
file.write("family\t");
|
||||
file.write("genus\t");
|
||||
file.write("subgenus\t");
|
||||
file.write("specificEpithet\t");
|
||||
file.write("infraspecificEpithet\t");
|
||||
file.write("verbatimTaxonRank\t");
|
||||
file.write("taxonRank\t");
|
||||
file.write("taxonomicStatus\t");
|
||||
file.write("modified\t");
|
||||
file.write("bibliographicCitation\t");
|
||||
file.write("taxonRemarks\t");
|
||||
file.write("scientificNameID\n");
|
||||
file.close();
|
||||
|
||||
|
||||
//header VernacularName.txt
|
||||
vernacularFile.write("taxonID\t");
|
||||
vernacularFile.write("vernacularName\t");
|
||||
vernacularFile.write("language\t");
|
||||
vernacularFile.write("locality\n");
|
||||
vernacularFile.close();
|
||||
|
||||
} catch (IOException e) {
|
||||
logger.error("IO Error", e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Write taxa.txt
|
||||
*/
|
||||
public void createTaxaTxt(Iterator<TaxonomyItem> taxaReader){
|
||||
|
||||
while (taxaReader.hasNext()) {
|
||||
TaxonomyItem item = taxaReader.next();
|
||||
//logger.trace(item.toString());
|
||||
writeLine(item);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
private void internalWriter(TaxonomyItem taxonomyItem, BufferedWriter file ) throws IOException{
|
||||
String[] name = taxonomyItem.getScientificName().split(" ");
|
||||
|
||||
// Get elemen
|
||||
TaxonomyItem tax = taxonomyItem.getParent();
|
||||
|
||||
Hashtable<String, String> hashTaxa = new Hashtable<String,String>();
|
||||
//create hashtable with taxonomy keys
|
||||
if (tax !=null)
|
||||
getTax(tax, hashTaxa);
|
||||
|
||||
|
||||
//taxonID
|
||||
file.write(taxonomyItem.getId());
|
||||
file.write("\t");
|
||||
|
||||
//acceptedNameUsageID
|
||||
if (taxonomyItem.getStatus()==null){
|
||||
logger.trace("the status is null for "+taxonomyItem.getId());
|
||||
}if (taxonomyItem.getStatus().getRefId() != null){
|
||||
String id = taxonomyItem.getStatus().getRefId();
|
||||
file.write(id);
|
||||
}
|
||||
|
||||
file.write("\t");
|
||||
|
||||
//parentNameUsageID
|
||||
if (tax !=null)
|
||||
file.write(tax.getId());
|
||||
file.write("\t");
|
||||
|
||||
//scientificName
|
||||
file.write(taxonomyItem.getScientificName());
|
||||
|
||||
file.write("\t");
|
||||
|
||||
//scientificNameAuthorship
|
||||
if (taxonomyItem.getScientificNameAuthorship()!= null)
|
||||
file.write(taxonomyItem.getScientificNameAuthorship());
|
||||
file.write("\t");
|
||||
|
||||
if (taxonomyItem.getCitation()!= null)
|
||||
file.write(taxonomyItem.getCitation());
|
||||
file.write("\t");
|
||||
|
||||
//kingdom
|
||||
String kingdom = (String)hashTaxa.get("kingdom");
|
||||
if (kingdom != null)
|
||||
file.write(kingdom);
|
||||
file.write("\t");
|
||||
|
||||
//phylum
|
||||
String phylum = (String) hashTaxa.get("phylum");
|
||||
if (phylum != null)
|
||||
file.write(phylum);
|
||||
file.write("\t");
|
||||
|
||||
//class
|
||||
String claz = (String)hashTaxa.get("class");
|
||||
if (claz != null)
|
||||
file.write(claz);
|
||||
file.write("\t");
|
||||
|
||||
//order
|
||||
String order = (String)hashTaxa.get("order");
|
||||
if (order != null)
|
||||
file.write(order);
|
||||
file.write("\t");
|
||||
|
||||
//family
|
||||
String family = (String)hashTaxa.get("family");
|
||||
if (family != null)
|
||||
file.write(family);
|
||||
file.write("\t");
|
||||
|
||||
//genus
|
||||
String genus = (String)hashTaxa.get("genus");
|
||||
if (genus != null)
|
||||
file.write(genus);
|
||||
file.write("\t");
|
||||
|
||||
//subgenus
|
||||
String subgenus = (String)hashTaxa.get("subgenus");
|
||||
if (subgenus != null)
|
||||
file.write(subgenus);
|
||||
file.write("\t");
|
||||
|
||||
//specificEpithet
|
||||
if (name.length>1)
|
||||
file.write(name[1]);
|
||||
file.write("\t");
|
||||
|
||||
//infraspecificEpithet
|
||||
if (name.length>2){
|
||||
file.write(name[name.length-1]);
|
||||
}
|
||||
file.write("\t");
|
||||
|
||||
//verbatimTaxonRank
|
||||
if (name.length>2){
|
||||
file.write(name[name.length-2]);
|
||||
}
|
||||
file.write("\t");
|
||||
|
||||
//taxonRank
|
||||
if (taxonomyItem.getRank()!= null)
|
||||
file.write(taxonomyItem.getRank().toLowerCase());
|
||||
file.write("\t");
|
||||
|
||||
//taxonomicStatus (accepted, synonym, unkonwn)
|
||||
file.write(taxonomyItem.getStatus().getStatus().toString().toLowerCase());
|
||||
file.write("\t");
|
||||
|
||||
//modified
|
||||
if (taxonomyItem.getModified() !=null){
|
||||
DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
|
||||
Date date = taxonomyItem.getModified().getTime();
|
||||
String s = sdf.format(date);
|
||||
file.write(s);
|
||||
}
|
||||
file.write("\t");
|
||||
|
||||
//source
|
||||
if (taxonomyItem.getCredits() != null)
|
||||
file.write(taxonomyItem.getCredits());
|
||||
file.write("\t");
|
||||
|
||||
//taxonRemarks
|
||||
if (taxonomyItem.getStatus().getStatusAsString() != null)
|
||||
file.write(taxonomyItem.getStatus().getStatusAsString());
|
||||
|
||||
file.write("\t");
|
||||
|
||||
if (taxonomyItem.getLsid() != null)
|
||||
file.write(taxonomyItem.getLsid());
|
||||
file.write("\n");
|
||||
|
||||
|
||||
|
||||
//write varnacular names
|
||||
if (taxonomyItem.getCommonNames()!= null){
|
||||
createVernacularTxt(taxonomyItem.getId(), taxonomyItem.getCommonNames());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert line in taxa.txt
|
||||
*/
|
||||
private void writeLine(TaxonomyItem taxonomyItem){
|
||||
|
||||
BufferedWriter bufferedWriter =null;
|
||||
try {
|
||||
bufferedWriter = new BufferedWriter(new FileWriter(tempFolder + "/" + "taxa.txt", true));
|
||||
internalWriter(taxonomyItem, bufferedWriter);
|
||||
|
||||
|
||||
|
||||
} catch (IOException e) {
|
||||
logger.error("IO Error", e);
|
||||
}finally{
|
||||
try {
|
||||
if (bufferedWriter!=null)
|
||||
bufferedWriter.close();
|
||||
} catch (IOException e) {
|
||||
logger.error("error closing bufferedWriter",e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Write VernacularName.txt
|
||||
*/
|
||||
private void createVernacularTxt(String id, List<CommonName> list){
|
||||
|
||||
try {
|
||||
vernacularFile = new BufferedWriter(new FileWriter(tempFolder + "/" + "VernacularName.txt", true));
|
||||
for (CommonName vernacular : list) {
|
||||
// logger.trace("Vernacular name: " + vernacular.getName());
|
||||
|
||||
//taxonID
|
||||
vernacularFile.write(id);
|
||||
vernacularFile.write("\t");
|
||||
|
||||
//vernacularName
|
||||
vernacularFile.write(vernacular.getName());
|
||||
vernacularFile.write("\t");
|
||||
|
||||
//language
|
||||
if (vernacular.getLanguage()!= null)
|
||||
vernacularFile.write(vernacular.getLanguage());
|
||||
vernacularFile.write("\t");
|
||||
|
||||
//locality
|
||||
if (vernacular.getLocality()!= null)
|
||||
vernacularFile.write(vernacular.getLocality());
|
||||
|
||||
vernacularFile.write("\n");
|
||||
|
||||
|
||||
}
|
||||
vernacularFile.close();
|
||||
} catch (IOException e) {
|
||||
logger.error("IO Error", e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Create hashtable with taxonomy keys
|
||||
*/
|
||||
private void getTax(TaxonomyItem tax, Hashtable<String, String> taxa){
|
||||
taxa.put((tax.getRank()).toLowerCase(), tax.getScientificName());
|
||||
//writeLine(tax);
|
||||
// logger.trace("insert parent " + tax.getId() + " " + tax.getScientificName());
|
||||
if (tax.getParent()!=null)
|
||||
getTax(tax.getParent(), taxa);
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* List files in directory
|
||||
*/
|
||||
private void getAllFiles(File dir) {
|
||||
try {
|
||||
File[] files = dir.listFiles();
|
||||
for (File file : files) {
|
||||
fileList.add(file);
|
||||
if (file.isDirectory()) {
|
||||
logger.trace("directory:" + file.getCanonicalPath());
|
||||
getAllFiles(file);
|
||||
} else {
|
||||
logger.trace(" file:" + file.getCanonicalPath());
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.error("error creating files",e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create zip file
|
||||
*/
|
||||
private File writeZipFile(File directoryToZip) throws Exception {
|
||||
|
||||
File zipFile = new File(directoryToZip + "/" + archiveZip);
|
||||
FileOutputStream fos = new FileOutputStream(zipFile);
|
||||
ZipOutputStream zos = new ZipOutputStream(fos);
|
||||
|
||||
for (File file : fileList) {
|
||||
if (!file.isDirectory()) { // we only zip files, not directories
|
||||
addToZip(directoryToZip, file, zos);
|
||||
}
|
||||
}
|
||||
zos.close();
|
||||
fos.close();
|
||||
return zipFile;
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Add files to zip
|
||||
*/
|
||||
private void addToZip(File directoryToZip, File file, ZipOutputStream zos) throws FileNotFoundException,
|
||||
IOException {
|
||||
|
||||
FileInputStream fis = new FileInputStream(file);
|
||||
|
||||
// we want the zipEntry's path to be a relative path that is relative
|
||||
// to the directory being zipped, so chop off the rest of the path
|
||||
String zipFilePath = file.getCanonicalPath().substring(directoryToZip.getCanonicalPath().length() + 1,
|
||||
file.getCanonicalPath().length());
|
||||
logger.trace("Writing '" + zipFilePath + "' to zip file");
|
||||
ZipEntry zipEntry = new ZipEntry(zipFilePath);
|
||||
zos.putNextEntry(zipEntry);
|
||||
|
||||
byte[] bytes = new byte[1024];
|
||||
int length;
|
||||
while ((length = fis.read(bytes)) >= 0) {
|
||||
zos.write(bytes, 0, length);
|
||||
}
|
||||
|
||||
zos.closeEntry();
|
||||
fis.close();
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Create file em.xml
|
||||
*/
|
||||
public void createMetadata() throws IOException {
|
||||
|
||||
Calendar now = Calendar.getInstance();
|
||||
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");
|
||||
|
||||
File output = new File(tempFolder + "/eml.xml") ;
|
||||
BufferedWriter bw = null;
|
||||
try {
|
||||
bw = new BufferedWriter(new FileWriter(output));
|
||||
} catch (IOException e) {
|
||||
logger.error("IO Error", e);
|
||||
}
|
||||
|
||||
BufferedReader br = new BufferedReader(new InputStreamReader(MapDwCA.class.getResourceAsStream("/org/gcube/data/spd/dwca/eml.xml")));
|
||||
String line;
|
||||
while ((line = br.readLine()) != null) {
|
||||
bw.write(line.replace("<pubDate></pubDate>", "<pubDate>" + format.format(now.getTime()) + "</pubDate>"));
|
||||
bw.write('\n');
|
||||
|
||||
}
|
||||
bw.close();
|
||||
br.close();
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,150 @@
|
||||
package org.gcube.data.spd.executor.jobs.layer;
|
||||
|
||||
import static org.gcube.data.streams.dsl.Streams.convert;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Calendar;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.data.spd.executor.jobs.URLJob;
|
||||
import org.gcube.data.spd.executor.jobs.csv.OccurrenceReaderByKey;
|
||||
import org.gcube.data.spd.model.PointInfo;
|
||||
import org.gcube.data.spd.model.products.OccurrencePoint;
|
||||
import org.gcube.data.spd.model.service.types.JobStatus;
|
||||
import org.gcube.data.spd.model.service.types.MetadataDetails;
|
||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||
import org.gcube.data.spd.plugin.fwk.readers.LocalReader;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.rswrapper.LocalWrapper;
|
||||
import org.gcube.data.spd.utils.DynamicMap;
|
||||
import org.gcube.data.spd.utils.MapUtils;
|
||||
import org.gcube.data.streams.Stream;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.thoughtworks.xstream.XStream;
|
||||
|
||||
public class LayerCreatorJob implements URLJob{
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final long serialVersionUID = -6560318170190865925L;
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(LayerCreatorJob.class);
|
||||
|
||||
private String resultURL = null;
|
||||
|
||||
private String errorFileURL = null;
|
||||
|
||||
private JobStatus status;
|
||||
|
||||
private Calendar endDate, startDate;
|
||||
|
||||
private String id;
|
||||
|
||||
private int completedEntries = 0;
|
||||
|
||||
private Map<String, AbstractPlugin> plugins;
|
||||
|
||||
private MetadataDetails metadata ;
|
||||
|
||||
public LayerCreatorJob(String metadataDetails, Map<String, AbstractPlugin> plugins) {
|
||||
this.id = UUID.randomUUID().toString();
|
||||
this.status = JobStatus.PENDING;
|
||||
this.plugins = plugins;
|
||||
this.metadata = (MetadataDetails) new XStream().fromXML(metadataDetails);
|
||||
}
|
||||
|
||||
@Override
|
||||
public JobStatus getStatus() {
|
||||
return status;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setStatus(JobStatus status) {
|
||||
this.status = status;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getResultURL() {
|
||||
return resultURL;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getErrorURL() {
|
||||
return errorFileURL;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean validateInput(String input) {
|
||||
try{
|
||||
MetadataDetails md = (MetadataDetails) new XStream().fromXML(input);
|
||||
if (md!=null)
|
||||
return true;
|
||||
}catch(Throwable t){}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getCompletedEntries() {
|
||||
return completedEntries;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Calendar getStartDate() {
|
||||
return startDate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Calendar getEndDate() {
|
||||
return endDate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try{
|
||||
this.startDate = Calendar.getInstance();
|
||||
this.status = JobStatus.RUNNING;
|
||||
|
||||
LocalWrapper<OccurrencePoint> localWrapper = new LocalWrapper<OccurrencePoint>(2000);
|
||||
localWrapper.forceOpen();
|
||||
|
||||
Stream<String> ids =convert(DynamicMap.get(this.id));
|
||||
|
||||
OccurrenceReaderByKey occurrenceReader = new OccurrenceReaderByKey(localWrapper, ids, plugins);
|
||||
|
||||
new Thread(occurrenceReader).start();
|
||||
|
||||
LocalReader<OccurrencePoint> ocReader= new LocalReader<OccurrencePoint>(localWrapper);
|
||||
|
||||
ArrayList<PointInfo> points=new ArrayList<>();
|
||||
|
||||
while (ocReader.hasNext()){
|
||||
OccurrencePoint op = ocReader.next();
|
||||
points.add(new PointInfo(op.getDecimalLongitude(), op.getDecimalLatitude()));
|
||||
completedEntries++;
|
||||
}
|
||||
|
||||
org.gcube.data.spd.utils.MapUtils.Map map = MapUtils.publishLayerByCoords(this.metadata, points,false,true);
|
||||
this.resultURL = map.getLayerUUID();
|
||||
this.status = JobStatus.COMPLETED;
|
||||
}catch (Exception e) {
|
||||
logger.error("error executing Layer Job",e);
|
||||
this.status = JobStatus.FAILED;
|
||||
return;
|
||||
} finally{
|
||||
this.endDate = Calendar.getInstance();
|
||||
DynamicMap.remove(this.id);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,38 @@
|
||||
package org.gcube.data.spd.manager;
|
||||
|
||||
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.data.spd.plugin.PluginManager;
|
||||
import org.gcube.data.spd.utils.ExecutorsContainer;
|
||||
import org.gcube.smartgears.ApplicationManager;
|
||||
import org.gcube.smartgears.ContextProvider;
|
||||
import org.gcube.smartgears.context.application.ApplicationContext;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class AppInitializer implements ApplicationManager {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(AppInitializer.class);
|
||||
|
||||
private PluginManager pluginManager;
|
||||
|
||||
private ApplicationContext ctx = ContextProvider.get();
|
||||
|
||||
@Override
|
||||
public void onInit() {
|
||||
log.info("security token is "+SecurityTokenProvider.instance.get());
|
||||
pluginManager = new PluginManager(ctx);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onShutdown() {
|
||||
pluginManager.shutdown();
|
||||
pluginManager = null;
|
||||
ExecutorsContainer.stopAll();
|
||||
log.info("App Initializer shut down on "+ScopeProvider.instance.get());
|
||||
}
|
||||
|
||||
public PluginManager getPluginManager() {
|
||||
return pluginManager;
|
||||
}
|
||||
}
|
@ -0,0 +1,58 @@
|
||||
package org.gcube.data.spd.manager;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.common.validator.ValidationError;
|
||||
import org.gcube.common.validator.ValidatorFactory;
|
||||
import org.gcube.data.spd.model.products.OccurrencePoint;
|
||||
import org.gcube.data.spd.plugin.fwk.util.Util;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ResultElementWriterManager;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class OccurrenceWriterManager extends
|
||||
ResultElementWriterManager<OccurrencePoint> {
|
||||
|
||||
private Logger logger = LoggerFactory.getLogger(OccurrenceWriterManager.class);
|
||||
|
||||
private HashSet<String> idsSet;
|
||||
|
||||
public OccurrenceWriterManager(String provider) {
|
||||
super(provider);
|
||||
this.idsSet = new HashSet<String>(400);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected OccurrencePoint _enrich(OccurrencePoint t) {
|
||||
t.setProvider(provider);
|
||||
t.setId(Util.keyEnrichment(this.provider, t.getId()));
|
||||
return t;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean filter(OccurrencePoint obj) {
|
||||
|
||||
if (obj ==null){
|
||||
logger.trace("("+this.provider+") object null discarded ");
|
||||
return false;
|
||||
}
|
||||
|
||||
List<ValidationError> errors = ValidatorFactory.validator().validate(obj);
|
||||
|
||||
if (errors.size()>0){
|
||||
logger.warn("("+this.provider+") object discarded for the following reasons: "+errors);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (this.idsSet.contains(obj.getId())){
|
||||
logger.trace("("+this.provider+") an item with id "+obj.getId()+" already found");
|
||||
return false;
|
||||
}
|
||||
else {
|
||||
this.idsSet.add(obj.getId());
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,74 @@
|
||||
package org.gcube.data.spd.manager;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.gcube.common.validator.ValidationError;
|
||||
import org.gcube.common.validator.ValidatorFactory;
|
||||
import org.gcube.data.spd.model.products.Product;
|
||||
import org.gcube.data.spd.model.products.ResultItem;
|
||||
import org.gcube.data.spd.model.products.Taxon;
|
||||
import org.gcube.data.spd.plugin.fwk.util.Util;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ResultElementWriterManager;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
||||
|
||||
public class ResultItemWriterManager extends ResultElementWriterManager<ResultItem> {
|
||||
|
||||
|
||||
private Logger logger = LoggerFactory.getLogger(ResultItemWriterManager.class);
|
||||
private Set<String> idsSet;
|
||||
|
||||
|
||||
public ResultItemWriterManager(String repositoryProvider) {
|
||||
super(repositoryProvider);
|
||||
this.idsSet = new HashSet<String>(400);
|
||||
Collections.synchronizedSet(this.idsSet);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ResultItem _enrich(ResultItem t) {
|
||||
t.setProvider(this.provider);
|
||||
t.setId(Util.keyEnrichment(this.provider, t.getId()));
|
||||
Taxon parent = t.getParent();
|
||||
while (parent!=null){
|
||||
parent.setId(Util.keyEnrichment(this.provider, parent.getId()));
|
||||
parent = parent.getParent();
|
||||
}
|
||||
|
||||
if (t.getProducts()!=null)
|
||||
for (Product prod: t.getProducts())
|
||||
prod.setKey(Util.keyEnrichment(this.provider, prod.getKey()));
|
||||
return t;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized boolean filter(ResultItem obj) {
|
||||
|
||||
if (obj ==null){
|
||||
logger.trace("("+this.provider+") object null discarded ");
|
||||
return false;
|
||||
}
|
||||
|
||||
List<ValidationError> errors = ValidatorFactory.validator().validate(obj);
|
||||
|
||||
if (errors.size()>0){
|
||||
logger.warn("("+this.provider+") object discarded for the following reasons: "+errors);
|
||||
return false;
|
||||
}
|
||||
|
||||
String tempId = this.provider+"|"+obj.getId()+"|"+obj.getDataSet().getId()+"|"+obj.getDataSet().getDataProvider().getId();
|
||||
if (idsSet.contains(tempId)){
|
||||
logger.trace("("+this.provider+") an item with id "+obj.getId()+" already found");
|
||||
return false;
|
||||
}else{
|
||||
idsSet.add(tempId);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,71 @@
|
||||
|
||||
package org.gcube.data.spd.manager;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.common.validator.ValidationError;
|
||||
import org.gcube.common.validator.ValidatorFactory;
|
||||
import org.gcube.data.spd.model.products.TaxonomyItem;
|
||||
import org.gcube.data.spd.plugin.fwk.util.Util;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ResultElementWriterManager;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class TaxonomyItemWriterManager extends ResultElementWriterManager<TaxonomyItem> {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(TaxonomyItemWriterManager.class);
|
||||
|
||||
private HashSet<String> idsSet;
|
||||
|
||||
public TaxonomyItemWriterManager(String repositoryProvider) {
|
||||
super(repositoryProvider);
|
||||
this.idsSet = new HashSet<String>(400);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TaxonomyItem _enrich(TaxonomyItem t) {
|
||||
t.setProvider(this.provider);
|
||||
t.setId(Util.keyEnrichment(this.provider, t.getId()));
|
||||
String refId = t.getStatus().getRefId();
|
||||
if (refId!=null)
|
||||
t.getStatus().setRefId(Util.keyEnrichment(this.provider, refId));
|
||||
|
||||
TaxonomyItem parent = t.getParent();
|
||||
while (parent!=null){
|
||||
parent.setId(Util.keyEnrichment(this.provider, parent.getId()));
|
||||
refId = parent.getStatus().getRefId();
|
||||
if (refId!=null)
|
||||
parent.getStatus().setRefId(Util.keyEnrichment(this.provider, refId));
|
||||
parent = parent.getParent();
|
||||
}
|
||||
return t;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean filter(TaxonomyItem obj) {
|
||||
if (obj ==null){
|
||||
logger.trace("("+this.provider+") object null discarded ");
|
||||
return false;
|
||||
}
|
||||
|
||||
List<ValidationError> errors = ValidatorFactory.validator().validate(obj);
|
||||
|
||||
if (errors.size()>0){
|
||||
logger.warn("("+this.provider+") object discarded for the following reasons: "+errors);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (this.idsSet.contains(obj.getId())){
|
||||
logger.trace("("+this.provider+") an item with id "+obj.getId()+" already found");
|
||||
return false;
|
||||
}
|
||||
else {
|
||||
this.idsSet.add(obj.getId());
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,61 @@
|
||||
package org.gcube.data.spd.manager.search;
|
||||
|
||||
import org.gcube.data.spd.manager.search.writers.ConsumerEventHandler;
|
||||
import org.gcube.data.spd.model.exceptions.StreamException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public abstract class EventDispatcher<I> implements ConsumerEventHandler<I>{
|
||||
|
||||
protected Logger logger = LoggerFactory.getLogger(EventDispatcher.class);
|
||||
|
||||
private ConsumerEventHandler<I> standardWorker;
|
||||
private ConsumerEventHandler<I> alternativeWorker;
|
||||
|
||||
boolean alternativeClosed= false, standardClosed = false;
|
||||
|
||||
public EventDispatcher(ConsumerEventHandler<I> standardWorker,
|
||||
ConsumerEventHandler<I> alternativeWorker) {
|
||||
super();
|
||||
this.standardWorker = standardWorker;
|
||||
this.alternativeWorker = alternativeWorker;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean onElementReady(I element) {
|
||||
boolean sendToStandardWriter = sendToStandardWriter(element);
|
||||
if (sendToStandardWriter){
|
||||
if (!standardClosed){
|
||||
standardClosed = !standardWorker.onElementReady(element);
|
||||
return !standardClosed;
|
||||
}
|
||||
}else{
|
||||
if (!alternativeClosed){
|
||||
alternativeClosed= !alternativeWorker.onElementReady(element);
|
||||
return !alternativeClosed;
|
||||
}
|
||||
}
|
||||
return (!standardClosed && !alternativeClosed);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onClose() {
|
||||
logger.trace("on close called in "+this.getClass().getSimpleName());
|
||||
standardWorker.onClose();
|
||||
alternativeWorker.onClose();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(StreamException exception) {
|
||||
standardWorker.onError(exception);
|
||||
alternativeWorker.onError(exception);
|
||||
}
|
||||
|
||||
public abstract boolean sendToStandardWriter(I input);
|
||||
|
||||
@Override
|
||||
public boolean isConsumerAlive() {
|
||||
return (!standardClosed && !alternativeClosed);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,177 @@
|
||||
package org.gcube.data.spd.manager.search;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import net.sf.ehcache.CacheManager;
|
||||
|
||||
import org.gcube.common.authorization.library.AuthorizedTasks;
|
||||
import org.gcube.data.spd.caching.QueryCacheFactory;
|
||||
import org.gcube.data.spd.manager.search.workers.CacheReaderWorker;
|
||||
import org.gcube.data.spd.manager.search.workers.HavingFilterWorker;
|
||||
import org.gcube.data.spd.manager.search.workers.ObjectManagerWorker;
|
||||
import org.gcube.data.spd.manager.search.workers.SearchCachingEventDispatcher;
|
||||
import org.gcube.data.spd.manager.search.workers.SearchWorker;
|
||||
import org.gcube.data.spd.manager.search.writers.ConsumerEventHandler;
|
||||
import org.gcube.data.spd.manager.search.writers.WorkerWriterPool;
|
||||
import org.gcube.data.spd.model.Condition;
|
||||
import org.gcube.data.spd.model.products.ResultElement;
|
||||
import org.gcube.data.spd.model.service.exceptions.UnsupportedCapabilityException;
|
||||
import org.gcube.data.spd.model.service.exceptions.UnsupportedPluginException;
|
||||
import org.gcube.data.spd.plugin.PluginUtils;
|
||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||
import org.gcube.data.spd.plugin.fwk.Searchable;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ClosableWriter;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ResultElementWriterManager;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.Writer;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.rswrapper.AbstractWrapper;
|
||||
import org.gcube.data.spd.utils.ExecutorsContainer;
|
||||
import org.gcube.dataaccess.spd.havingengine.HavingStatement;
|
||||
import org.gcube.dataaccess.spd.havingengine.HavingStatementFactory;
|
||||
import org.gcube.dataaccess.spd.havingengine.exl.HavingStatementFactoryEXL;
|
||||
import org.gcube.dataaccess.spql.model.ExpandClause;
|
||||
import org.gcube.dataaccess.spql.model.Query;
|
||||
import org.gcube.dataaccess.spql.model.ResolveClause;
|
||||
import org.gcube.dataaccess.spql.model.Term;
|
||||
import org.gcube.dataaccess.spql.model.UnfoldClause;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class Search<T extends ResultElement> {
|
||||
|
||||
Logger logger = LoggerFactory.getLogger(Search.class);
|
||||
|
||||
AbstractWrapper<T> wrapper;
|
||||
|
||||
Map<String, AbstractPlugin> plugins ;
|
||||
|
||||
CacheManager cacheManager;
|
||||
|
||||
QueryCacheFactory<T> queryCacheFactory;
|
||||
|
||||
Class<? extends ResultElementWriterManager<T>> writerManagerClass;
|
||||
|
||||
public Search(AbstractWrapper<T> wrapper, Map<String, AbstractPlugin> plugins,
|
||||
Class<? extends ResultElementWriterManager<T>> writerManagerClass, QueryCacheFactory<T> queryCacheFactory) {
|
||||
this.wrapper = wrapper;
|
||||
this.writerManagerClass = writerManagerClass;
|
||||
this.cacheManager = CacheManager.getInstance();
|
||||
this.plugins = plugins;
|
||||
this.queryCacheFactory = queryCacheFactory;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void search(Map<String, Searchable<T>> searchableMapping, Query parsedQuery, Condition ... properties) throws UnsupportedCapabilityException, UnsupportedPluginException, Exception {
|
||||
ClosableWriter<T> outputWriter = new Writer<T>(wrapper);
|
||||
//preparing the query (and checking semantic)
|
||||
List<Worker<?, ?>> workers = new ArrayList<Worker<?, ?>>();
|
||||
logger.info("HAVING expression is null ?? "+(parsedQuery.getHavingExpression()==null));
|
||||
//adding Having filter if specified
|
||||
WorkerWriterPool<T> havingInputWriterPool = null;
|
||||
if (parsedQuery.getHavingExpression()!=null){
|
||||
HavingStatementFactory factory = new HavingStatementFactoryEXL();
|
||||
HavingStatement<T> havingFilter = factory.compile(parsedQuery.getHavingExpression().getExpression());
|
||||
((Writer<T>)outputWriter).register();
|
||||
Worker<T,T> havingWorker = new HavingFilterWorker<T>(outputWriter, havingFilter);
|
||||
workers.add(havingWorker);
|
||||
havingInputWriterPool = new WorkerWriterPool<T>(havingWorker);
|
||||
logger.debug("adding HavingFilterWorker");
|
||||
}
|
||||
|
||||
List<ConsumerEventHandler<String>> consumers = new ArrayList<ConsumerEventHandler<String>>();
|
||||
for (Entry<String, Searchable<T>> entry: searchableMapping.entrySet()){
|
||||
boolean cachablePlugin = plugins.get(entry.getKey()).isUseCache();
|
||||
if(havingInputWriterPool==null)
|
||||
((Writer<T>)outputWriter).register();
|
||||
else
|
||||
outputWriter = havingInputWriterPool.get();
|
||||
ObjectManagerWorker<T> managerWorker = new ObjectManagerWorker<T>(outputWriter, writerManagerClass.getConstructor(String.class).newInstance(entry.getKey()));
|
||||
WorkerWriterPool<T> writerPool = new WorkerWriterPool<T>(managerWorker);
|
||||
logger.debug("("+entry.getKey()+") creating search worker ");
|
||||
SearchWorker<T> searchWorker = new SearchWorker<T>( writerPool.get(),entry.getKey(), cachablePlugin,
|
||||
entry.getValue(), cacheManager, queryCacheFactory, properties);
|
||||
workers.add(managerWorker);
|
||||
workers.add(searchWorker);
|
||||
if (cachablePlugin){
|
||||
logger.trace("key is "+entry.getKey()+" and value "+entry.getValue());
|
||||
CacheReaderWorker<T> cacheReaderWorker = new CacheReaderWorker<T>( writerPool.get(),
|
||||
cacheManager, entry.getKey(), properties, entry.getValue().getHandledClass());
|
||||
workers.add(cacheReaderWorker);
|
||||
consumers.add(new SearchCachingEventDispatcher<ResultElement>(searchWorker, cacheReaderWorker,
|
||||
cacheManager, entry.getKey(), properties, entry.getValue().getHandledClass()));
|
||||
}else
|
||||
consumers.add(searchWorker);
|
||||
}
|
||||
|
||||
List<SearchFlow> searchFlows = extractFlows(parsedQuery);
|
||||
for (SearchFlow flow: searchFlows)
|
||||
workers.addAll(flow.createWorkers(consumers.toArray(new ConsumerEventHandler[consumers.size()])));
|
||||
|
||||
//starting workers
|
||||
for (Worker<?, ?> worker: workers)
|
||||
ExecutorsContainer.execSearch(AuthorizedTasks.bind(worker));
|
||||
|
||||
for (SearchFlow flow: searchFlows)
|
||||
flow.injectWords();
|
||||
}
|
||||
|
||||
|
||||
private List<SearchFlow> extractFlows(Query parsedQuery) throws UnsupportedCapabilityException, UnsupportedPluginException{
|
||||
List<SearchFlow> flows = new ArrayList<SearchFlow>();
|
||||
for (Term term :parsedQuery.getTerms()){
|
||||
List<String> words = term.getWords();
|
||||
|
||||
Collection<AbstractPlugin> expanders = getExpanders(term.getExpandClause());
|
||||
|
||||
Collection<AbstractPlugin> resolvers = getResolvers(term.getResolveClause());
|
||||
|
||||
SearchFlow flow = new SearchFlow(words, expanders, resolvers);
|
||||
|
||||
UnfoldClause unfoldClause = term.getUnfoldClause();
|
||||
if (unfoldClause!=null)
|
||||
flow.setUnfolder(getUnfolder(unfoldClause));
|
||||
|
||||
flows.add(flow);
|
||||
}
|
||||
return flows;
|
||||
|
||||
}
|
||||
|
||||
private Collection<AbstractPlugin> getExpanders(ExpandClause expandClause) throws UnsupportedCapabilityException, UnsupportedPluginException{
|
||||
Collection<AbstractPlugin> expanders = Collections.emptyList();
|
||||
if (expandClause!=null){
|
||||
expanders =expandClause.getDatasources().size()>0?PluginUtils.getPluginsSubList(expandClause.getDatasources(), plugins):
|
||||
PluginUtils.getExtenderPlugins(plugins.values());
|
||||
if (expanders.size()==0) throw new UnsupportedCapabilityException();
|
||||
}
|
||||
return expanders;
|
||||
}
|
||||
|
||||
private Collection<AbstractPlugin> getResolvers(ResolveClause resolveClause) throws UnsupportedCapabilityException, UnsupportedPluginException{
|
||||
Collection<AbstractPlugin> resolvers = Collections.emptyList();
|
||||
if (resolveClause!=null){
|
||||
resolvers =resolveClause.getDatasources().size()>0?PluginUtils.getPluginsSubList(resolveClause.getDatasources(), plugins):
|
||||
PluginUtils.getResolverPlugins(plugins.values());
|
||||
if (resolvers.size()==0) throw new UnsupportedCapabilityException();
|
||||
}
|
||||
return resolvers;
|
||||
}
|
||||
|
||||
private AbstractPlugin getUnfolder(UnfoldClause unfoldClause) throws UnsupportedCapabilityException, UnsupportedPluginException{
|
||||
String datasource = unfoldClause.getDatasource();
|
||||
AbstractPlugin unfolder = plugins.get(datasource);
|
||||
if (unfolder==null){
|
||||
logger.error(datasource+" not found");
|
||||
throw new UnsupportedPluginException();
|
||||
}
|
||||
if (unfolder.getUnfoldInterface()==null)
|
||||
throw new UnsupportedCapabilityException();
|
||||
return unfolder;
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,99 @@
|
||||
package org.gcube.data.spd.manager.search;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.data.spd.manager.search.workers.CommonNameMapping;
|
||||
import org.gcube.data.spd.manager.search.workers.SynonymsRetriever;
|
||||
import org.gcube.data.spd.manager.search.workers.UnfolderWorker;
|
||||
import org.gcube.data.spd.manager.search.writers.ConsumerEventHandler;
|
||||
import org.gcube.data.spd.manager.search.writers.WorkerWriterPool;
|
||||
import org.gcube.data.spd.model.service.exceptions.UnsupportedPluginException;
|
||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
||||
public class SearchFlow {
|
||||
|
||||
private Logger logger= LoggerFactory.getLogger(SearchFlow.class);
|
||||
|
||||
private Collection<AbstractPlugin> expanders;
|
||||
|
||||
private Collection<AbstractPlugin> resolvers;
|
||||
|
||||
private List<String> words;
|
||||
|
||||
private ConsumerEventHandler<String>[] consumers;
|
||||
|
||||
private AbstractPlugin unfolder = null;
|
||||
|
||||
public SearchFlow(List<String> words, Collection<AbstractPlugin> expanders, Collection<AbstractPlugin> resolvers) {
|
||||
super();
|
||||
this.resolvers = resolvers;
|
||||
this.expanders = expanders;
|
||||
this.words = words;
|
||||
}
|
||||
|
||||
public void setUnfolder(AbstractPlugin unfolder) {
|
||||
this.unfolder = unfolder;
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<Worker<?, ?>> createWorkers(ConsumerEventHandler<String> ... registeredConsumers) throws UnsupportedPluginException{
|
||||
ConsumerEventHandler<String>[] actualConsumers = registeredConsumers;
|
||||
List<Worker<?, ?>> workersToExecute = new ArrayList<Worker<?,?>>();
|
||||
|
||||
if (expanders.size()>0){
|
||||
logger.trace("preparing "+expanders.size()+" expander");
|
||||
List<Worker<?, ?>> workers = new ArrayList<Worker<?,?>>();
|
||||
|
||||
WorkerWriterPool<String> writerPool = new WorkerWriterPool<String>(actualConsumers);
|
||||
for (AbstractPlugin expander : expanders)
|
||||
workers.add(new SynonymsRetriever(writerPool.get(), expander));
|
||||
actualConsumers = workers.toArray(new Worker[workers.size()]);
|
||||
workersToExecute.addAll(workers);
|
||||
}
|
||||
if (resolvers.size()>0){
|
||||
logger.trace("preparing "+resolvers.size()+" resolver");
|
||||
List<Worker<?, ?>> workers = new ArrayList<Worker<?,?>>();
|
||||
|
||||
WorkerWriterPool<String> writerPool = new WorkerWriterPool<String>(actualConsumers);
|
||||
for (AbstractPlugin resolver : resolvers)
|
||||
workers.add(new CommonNameMapping(writerPool.get(), resolver));
|
||||
actualConsumers = workers.toArray(new Worker[workers.size()]);
|
||||
workersToExecute.addAll(workers);
|
||||
}
|
||||
|
||||
if (unfolder!=null){
|
||||
WorkerWriterPool<String> writerPool = new WorkerWriterPool<String>(actualConsumers);
|
||||
Worker<?,?> unfolderWorker = new UnfolderWorker(writerPool.get(), unfolder);
|
||||
actualConsumers = new Worker[]{unfolderWorker};
|
||||
workersToExecute.add(unfolderWorker);
|
||||
}
|
||||
|
||||
this.consumers = actualConsumers;
|
||||
|
||||
return workersToExecute;
|
||||
|
||||
}
|
||||
|
||||
public void injectWords() {
|
||||
|
||||
if (consumers == null)
|
||||
new RuntimeException("search flow not started");
|
||||
|
||||
for (String word: this.words){
|
||||
logger.trace("injecting "+word);
|
||||
for (ConsumerEventHandler<String> actualConsumer : this.consumers)
|
||||
actualConsumer.onElementReady(word);
|
||||
}
|
||||
|
||||
for (ConsumerEventHandler<String> actualConsumer : this.consumers)
|
||||
actualConsumer.onClose();
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,96 @@
|
||||
package org.gcube.data.spd.manager.search;
|
||||
|
||||
import java.util.concurrent.LinkedBlockingQueue;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.gcube.data.spd.manager.search.writers.ConsumerEventHandler;
|
||||
import org.gcube.data.spd.model.exceptions.StreamBlockingException;
|
||||
import org.gcube.data.spd.model.exceptions.StreamException;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ClosableWriter;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public abstract class Worker<I,O> implements Runnable, ConsumerEventHandler<I> {
|
||||
|
||||
protected Logger logger = LoggerFactory.getLogger(Worker.class);
|
||||
|
||||
private LinkedBlockingQueue<I> queue = new LinkedBlockingQueue<I>();
|
||||
|
||||
protected boolean stop = false;
|
||||
|
||||
boolean producerClosed= false;
|
||||
|
||||
boolean alive = true;
|
||||
|
||||
private ClosableWriter<O> writer;
|
||||
|
||||
public Worker(ClosableWriter<O> writer) {
|
||||
super();
|
||||
this.writer = writer;
|
||||
}
|
||||
|
||||
public void run(){
|
||||
logger.trace(this.getClass().getSimpleName()+" - worker started");
|
||||
|
||||
try{
|
||||
while(!stop && (!producerClosed || !queue.isEmpty()) && writer.isAlive() ){
|
||||
I element = null;
|
||||
try {
|
||||
element = queue.poll(2, TimeUnit.SECONDS);
|
||||
} catch (InterruptedException e) {
|
||||
logger.warn("interrupt exception worker ", e);
|
||||
}
|
||||
if (element!=null)
|
||||
execute(element, writer);
|
||||
}
|
||||
}catch (Throwable e) {
|
||||
logger.warn("strange error on worker ",e);
|
||||
}
|
||||
writer.close();
|
||||
this.alive = false;
|
||||
logger.trace(this.getClass().getSimpleName()+" - worker stopped");
|
||||
}
|
||||
|
||||
protected abstract void execute(I input, ObjectWriter<O> outputWriter);
|
||||
|
||||
|
||||
@Override
|
||||
public synchronized boolean onElementReady(I element) {
|
||||
if (!stop && writer.isAlive()){
|
||||
try {
|
||||
return queue.offer(element, 1, TimeUnit.MINUTES);
|
||||
} catch (InterruptedException e) {
|
||||
logger.warn("error in event onReadyElement",e);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onClose(){
|
||||
this.producerClosed = true;
|
||||
}
|
||||
|
||||
|
||||
public ClosableWriter<O> getWriter() {
|
||||
return writer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void onError(StreamException exception) {
|
||||
logger.warn("error on stream ",exception);
|
||||
if (exception instanceof StreamBlockingException ){
|
||||
this.stop=true;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isConsumerAlive() {
|
||||
return alive ;
|
||||
}
|
||||
|
||||
public String descriptor(){
|
||||
return this.getClass().getSimpleName();
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,40 @@
|
||||
package org.gcube.data.spd.manager.search.workers;
|
||||
|
||||
import net.sf.ehcache.Cache;
|
||||
import net.sf.ehcache.CacheManager;
|
||||
|
||||
import org.gcube.data.spd.caching.CacheKey;
|
||||
import org.gcube.data.spd.caching.QueryCache;
|
||||
import org.gcube.data.spd.manager.search.Worker;
|
||||
import org.gcube.data.spd.model.Condition;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ClosableWriter;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
|
||||
import org.gcube.data.spd.utils.Utils;
|
||||
|
||||
public class CacheReaderWorker<T> extends Worker<String, T> {
|
||||
|
||||
private CacheManager cacheManager;
|
||||
private String propertiesAsString;
|
||||
private Class<?> handledClass;
|
||||
private String pluginName;
|
||||
|
||||
public CacheReaderWorker(ClosableWriter<T> writer, CacheManager cacheManager, String pluginName,
|
||||
Condition[] properties, Class<?> handledClass) {
|
||||
super(writer);
|
||||
this.cacheManager = cacheManager;
|
||||
this.propertiesAsString = Utils.getPropsAsString(properties);
|
||||
this.handledClass = handledClass;
|
||||
this.pluginName = pluginName;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void execute(String input, ObjectWriter<T> outputWriter) {
|
||||
logger.trace("starting cache reader worker for "+input);
|
||||
CacheKey key = new CacheKey(input, propertiesAsString, handledClass);
|
||||
Cache cache = cacheManager.getCache(pluginName);
|
||||
@SuppressWarnings("unchecked")
|
||||
QueryCache<T> cacheReader = ((QueryCache<T>)cache.get(key).getValue());
|
||||
cacheReader.getAll(outputWriter);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,49 @@
|
||||
package org.gcube.data.spd.manager.search.workers;
|
||||
|
||||
import org.gcube.data.spd.manager.search.Worker;
|
||||
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
|
||||
import org.gcube.data.spd.model.exceptions.StreamNonBlockingException;
|
||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ClosableWriter;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
|
||||
import org.gcube.data.spd.utils.QueryRetryCall;
|
||||
import org.gcube.data.spd.utils.VOID;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class CommonNameMapping extends Worker<String, String> {
|
||||
|
||||
|
||||
|
||||
private AbstractPlugin plugin;
|
||||
|
||||
Logger logger = LoggerFactory.getLogger(CommonNameMapping.class);
|
||||
|
||||
|
||||
|
||||
public CommonNameMapping(ClosableWriter<String> writer, AbstractPlugin plugin){
|
||||
super(writer);
|
||||
this.plugin = plugin;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void execute(final String input, final ObjectWriter<String> outputWriter) {
|
||||
logger.debug("retieving mapping for "+input);
|
||||
|
||||
try {
|
||||
new QueryRetryCall(){
|
||||
@Override
|
||||
protected VOID execute() throws ExternalRepositoryException {
|
||||
plugin.getMappingInterface().getRelatedScientificNames(outputWriter, input);
|
||||
return VOID.instance();
|
||||
}
|
||||
}.call();
|
||||
} catch (Exception e) {
|
||||
outputWriter.write(new StreamNonBlockingException(plugin.getRepositoryName(), input));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,24 @@
|
||||
package org.gcube.data.spd.manager.search.workers;
|
||||
|
||||
import org.gcube.data.spd.manager.search.Worker;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ClosableWriter;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
|
||||
import org.gcube.dataaccess.spd.havingengine.HavingStatement;
|
||||
|
||||
public class HavingFilterWorker<T> extends Worker<T, T> {
|
||||
|
||||
private HavingStatement<T> having;
|
||||
|
||||
public HavingFilterWorker(ClosableWriter<T> writer, HavingStatement<T> having) {
|
||||
super(writer);
|
||||
this.having = having;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void execute(T input, ObjectWriter<T> outputWriter) {
|
||||
if (having.accept(input))
|
||||
outputWriter.write(input);
|
||||
else logger.trace("object discarded by having clause");
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,26 @@
|
||||
package org.gcube.data.spd.manager.search.workers;
|
||||
|
||||
import org.gcube.data.spd.manager.search.Worker;
|
||||
import org.gcube.data.spd.model.products.ResultElement;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ClosableWriter;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ResultElementWriterManager;
|
||||
|
||||
|
||||
public class ObjectManagerWorker<I extends ResultElement> extends Worker<I, I> {
|
||||
|
||||
ResultElementWriterManager<I> writerManager;
|
||||
|
||||
public ObjectManagerWorker(ClosableWriter<I> writer, ResultElementWriterManager<I> writerManager) {
|
||||
super(writer);
|
||||
this.writerManager = writerManager;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void execute(I input, ObjectWriter<I> outputWriter) {
|
||||
if (writerManager.filter(input))
|
||||
outputWriter.write(writerManager.enrich(input));
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,46 @@
|
||||
package org.gcube.data.spd.manager.search.workers;
|
||||
|
||||
import net.sf.ehcache.Cache;
|
||||
import net.sf.ehcache.CacheManager;
|
||||
import org.gcube.data.spd.caching.CacheKey;
|
||||
import org.gcube.data.spd.caching.QueryCache;
|
||||
import org.gcube.data.spd.manager.search.EventDispatcher;
|
||||
import org.gcube.data.spd.manager.search.writers.ConsumerEventHandler;
|
||||
import org.gcube.data.spd.model.Condition;
|
||||
import org.gcube.data.spd.model.products.ResultElement;
|
||||
import org.gcube.data.spd.utils.Utils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class SearchCachingEventDispatcher<T extends ResultElement> extends EventDispatcher<String> {
|
||||
|
||||
private CacheManager cacheManager;
|
||||
private String propertiesAsString;
|
||||
private Class<?> handledClass;
|
||||
private String pluginName;
|
||||
|
||||
private Logger logger = LoggerFactory.getLogger(SearchCachingEventDispatcher.class);
|
||||
|
||||
public SearchCachingEventDispatcher(ConsumerEventHandler<String> standardWorker,
|
||||
ConsumerEventHandler<String> cacheReaderWorker, CacheManager cacheManager, String pluginName,
|
||||
Condition[] properties, Class<?> handledClass) {
|
||||
super(standardWorker, cacheReaderWorker);
|
||||
this.cacheManager = cacheManager;
|
||||
this.propertiesAsString = Utils.getPropsAsString(properties);
|
||||
this.handledClass = handledClass;
|
||||
this.pluginName = pluginName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized boolean sendToStandardWriter(String input) {
|
||||
CacheKey key = new CacheKey(input, propertiesAsString, handledClass);
|
||||
Cache cache = cacheManager.getCache(pluginName);
|
||||
logger.trace("is key in cache? "+cache.isKeyInCache(key));
|
||||
logger.trace("QueryCacheEntry is "+key);
|
||||
boolean toReturn = !(cache.isKeyInCache(key) && cache.get(key)!=null && ((QueryCache<?>)cache.get(key).getValue()).isValid());
|
||||
logger.trace("sending it to the "+(toReturn?"standard":"secondary")+" worker");
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,137 @@
|
||||
package org.gcube.data.spd.manager.search.workers;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import net.sf.ehcache.Cache;
|
||||
import net.sf.ehcache.CacheManager;
|
||||
import net.sf.ehcache.Element;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.data.spd.caching.CacheKey;
|
||||
import org.gcube.data.spd.caching.CacheWriter;
|
||||
import org.gcube.data.spd.caching.QueryCache;
|
||||
import org.gcube.data.spd.caching.QueryCacheFactory;
|
||||
import org.gcube.data.spd.exception.MaxRetriesReachedException;
|
||||
import org.gcube.data.spd.manager.search.Worker;
|
||||
import org.gcube.data.spd.model.Condition;
|
||||
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
|
||||
import org.gcube.data.spd.model.exceptions.StreamNonBlockingException;
|
||||
import org.gcube.data.spd.model.products.ResultElement;
|
||||
import org.gcube.data.spd.plugin.fwk.Searchable;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ClosableWriter;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
|
||||
import org.gcube.data.spd.utils.QueryRetryCall;
|
||||
import org.gcube.data.spd.utils.Utils;
|
||||
import org.gcube.data.spd.utils.VOID;
|
||||
|
||||
public class SearchWorker<T extends ResultElement> extends Worker<String, T> {
|
||||
|
||||
private Searchable<T> searchable;
|
||||
private String pluginName;
|
||||
private String propertiesAsString;
|
||||
private Condition[] properties;
|
||||
boolean cachable = false;
|
||||
private CacheManager cacheManager;
|
||||
private QueryCacheFactory<T> queryCacheFactory;
|
||||
Set<String> searchDone;
|
||||
|
||||
|
||||
public SearchWorker(ClosableWriter<T> writer, String pluginName, boolean cachable,
|
||||
Searchable<T> searchable, CacheManager cacheManager, QueryCacheFactory<T> queryCacheFactory, Condition ...properties) {
|
||||
super(writer);
|
||||
this.pluginName = pluginName;
|
||||
this.propertiesAsString = Utils.getPropsAsString(properties);
|
||||
this.properties = properties;
|
||||
this.searchable = searchable;
|
||||
this.cachable = cachable;
|
||||
this.cacheManager = cacheManager;
|
||||
searchDone = Collections.synchronizedSet(new HashSet<String>());
|
||||
this.queryCacheFactory = queryCacheFactory;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void execute(final String input, final ObjectWriter<T> writer) {
|
||||
|
||||
logger.debug("("+pluginName+") searching for "+input+" with outputWriter alive? "+writer.isAlive());
|
||||
|
||||
logger.trace("("+pluginName+") searchDone.contains(input)?"+(searchDone.contains(input)));
|
||||
|
||||
if (searchDone.contains(input)) return;
|
||||
else searchDone.add(input);
|
||||
|
||||
try {
|
||||
new QueryRetryCall(){
|
||||
|
||||
@Override
|
||||
protected VOID execute() throws ExternalRepositoryException {
|
||||
search(input, writer);
|
||||
return VOID.instance();
|
||||
}
|
||||
|
||||
}.call();
|
||||
} catch (MaxRetriesReachedException e) {
|
||||
logger.error("max retries reached for "+pluginName,e);
|
||||
writer.write(new StreamNonBlockingException(pluginName, input));
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
private void search(String input, ObjectWriter<T> writer) throws ExternalRepositoryException{
|
||||
//add cache search using pluginName
|
||||
logger.trace("("+pluginName+") scope in search worker is set as "+ScopeProvider.instance.get());
|
||||
|
||||
if (cachable){
|
||||
logger.debug("("+pluginName+") using cache");
|
||||
CacheKey key = new CacheKey(input, propertiesAsString, searchable.getHandledClass());
|
||||
Cache cache = cacheManager.getCache(pluginName);
|
||||
//logger.trace("lock is null? "+(QueryCache.lock==null ));
|
||||
QueryCache.lock.lock();
|
||||
if((cache.isKeyInCache(key) && cache.get(key)!=null && ((QueryCache<?>)cache.get(key).getValue()).isError())
|
||||
|| !cache.isKeyInCache(key)){
|
||||
if (cache.isKeyInCache(key)){
|
||||
logger.trace("removing invalid entry in cache ...");
|
||||
try{
|
||||
logger.trace("acquiring write lock "+pluginName);
|
||||
cache.acquireWriteLockOnKey(key);
|
||||
logger.trace("acquired write lock "+pluginName);
|
||||
cache.remove(key);
|
||||
}catch (Exception e) {
|
||||
logger.warn("problem removing cache ",e);
|
||||
}finally{
|
||||
logger.trace("releasing write lock "+pluginName);
|
||||
cache.releaseWriteLockOnKey(key);
|
||||
logger.trace("released write lock "+pluginName);
|
||||
}
|
||||
logger.trace("cache removed ...");
|
||||
}
|
||||
QueryCache<T> queryCache = this.queryCacheFactory.create(pluginName);
|
||||
cache.put(new Element(key, queryCache));
|
||||
QueryCache.lock.unlock();
|
||||
CacheWriter<T> cacheWriter = new CacheWriter<T>(writer, queryCache);
|
||||
searchable.searchByScientificName(input, cacheWriter, properties);
|
||||
cacheWriter.close();
|
||||
cache.put(new Element(key, queryCache));
|
||||
}else{ //execute normal query (in case someone else is filling this cache)
|
||||
QueryCache.lock.unlock();
|
||||
logger.debug("("+pluginName+") executing normal query in cachable plugin");
|
||||
searchable.searchByScientificName(input, writer, properties);
|
||||
}
|
||||
} else{ //execute normal query
|
||||
logger.debug("("+this.pluginName+") executing normal query for "+input);
|
||||
searchable.searchByScientificName(input, writer, properties);
|
||||
}
|
||||
logger.debug("("+pluginName+") finished search for "+input);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String descriptor() {
|
||||
return super.descriptor()+" - "+pluginName;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,43 @@
|
||||
package org.gcube.data.spd.manager.search.workers;
|
||||
|
||||
import org.gcube.data.spd.exception.MaxRetriesReachedException;
|
||||
import org.gcube.data.spd.manager.search.Worker;
|
||||
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
|
||||
import org.gcube.data.spd.model.exceptions.StreamBlockingException;
|
||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ClosableWriter;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
|
||||
import org.gcube.data.spd.utils.QueryRetryCall;
|
||||
import org.gcube.data.spd.utils.VOID;
|
||||
|
||||
|
||||
public class SynonymsRetriever extends Worker<String, String> {
|
||||
|
||||
private AbstractPlugin plugin;
|
||||
|
||||
public SynonymsRetriever(ClosableWriter<String> writer,AbstractPlugin plugin) {
|
||||
super(writer);
|
||||
this.plugin = plugin;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void execute(final String input, final ObjectWriter<String> outputWriter) {
|
||||
logger.debug("executing expander for "+input+" in plugin "+plugin.getRepositoryName());
|
||||
outputWriter.write(input);
|
||||
try {
|
||||
new QueryRetryCall() {
|
||||
|
||||
@Override
|
||||
protected VOID execute() throws ExternalRepositoryException {
|
||||
plugin.getExpansionInterface().getSynonyms(outputWriter, input);
|
||||
return VOID.instance();
|
||||
}
|
||||
|
||||
}.call();
|
||||
} catch (MaxRetriesReachedException e) {
|
||||
logger.error("error retrieving synonyms",e);
|
||||
outputWriter.write(new StreamBlockingException(plugin.getRepositoryName()));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,44 @@
|
||||
package org.gcube.data.spd.manager.search.workers;
|
||||
|
||||
import org.gcube.data.spd.exception.MaxRetriesReachedException;
|
||||
import org.gcube.data.spd.manager.search.Worker;
|
||||
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
|
||||
import org.gcube.data.spd.model.exceptions.StreamBlockingException;
|
||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ClosableWriter;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
|
||||
import org.gcube.data.spd.utils.QueryRetryCall;
|
||||
import org.gcube.data.spd.utils.VOID;
|
||||
|
||||
public class UnfolderWorker extends Worker<String, String>{
|
||||
|
||||
|
||||
private AbstractPlugin plugin;
|
||||
|
||||
public UnfolderWorker(ClosableWriter<String> writer, AbstractPlugin plugin) {
|
||||
super(writer);
|
||||
this.plugin = plugin;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void execute(final String item, final ObjectWriter<String> outputWriter) {
|
||||
outputWriter.write(item);
|
||||
try {
|
||||
new QueryRetryCall(){
|
||||
|
||||
@Override
|
||||
protected VOID execute() throws ExternalRepositoryException {
|
||||
plugin.getUnfoldInterface().unfold(outputWriter, item);
|
||||
return VOID.instance();
|
||||
}
|
||||
|
||||
}.call();
|
||||
} catch (MaxRetriesReachedException e) {
|
||||
logger.error("error executing unfolding",e);
|
||||
outputWriter.write(new StreamBlockingException(plugin.getRepositoryName()));
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,15 @@
|
||||
package org.gcube.data.spd.manager.search.writers;
|
||||
|
||||
import org.gcube.data.spd.model.exceptions.StreamException;
|
||||
|
||||
public interface ConsumerEventHandler<T> {
|
||||
|
||||
public boolean onElementReady(T element);
|
||||
|
||||
public void onError(StreamException streamException);
|
||||
|
||||
public void onClose();
|
||||
|
||||
public boolean isConsumerAlive();
|
||||
|
||||
}
|
@ -0,0 +1,53 @@
|
||||
package org.gcube.data.spd.manager.search.writers;
|
||||
|
||||
import org.gcube.data.spd.model.exceptions.StreamBlockingException;
|
||||
import org.gcube.data.spd.model.exceptions.StreamException;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ClosableWriter;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class WorkerWriter<O> implements ClosableWriter<O>{
|
||||
|
||||
Logger logger = LoggerFactory.getLogger(WorkerWriter.class);
|
||||
|
||||
boolean closed;
|
||||
|
||||
private ConsumerEventHandler<O> consumer;
|
||||
|
||||
protected WorkerWriter(ConsumerEventHandler<O> consumer ){
|
||||
this.consumer = consumer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean write(O t) {
|
||||
if (!consumer.onElementReady(t)){
|
||||
this.close();
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean write(StreamException error) {
|
||||
consumer.onError(error);
|
||||
if (error instanceof StreamBlockingException){
|
||||
this.close();
|
||||
return false;
|
||||
}
|
||||
else return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAlive() {
|
||||
return (!closed && consumer.isConsumerAlive());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
closed = true;
|
||||
if (consumer!=null)
|
||||
consumer.onClose();
|
||||
else logger.trace("found null consumer");
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,85 @@
|
||||
package org.gcube.data.spd.manager.search.writers;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import org.gcube.data.spd.model.exceptions.StreamException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
||||
public class WorkerWriterPool<O> implements ConsumerEventHandler<O>{
|
||||
|
||||
Logger logger = LoggerFactory.getLogger(WorkerWriterPool.class);
|
||||
|
||||
private int createdWriters = 0;
|
||||
|
||||
List<ConsumerEventHandler<O>> consumers;
|
||||
|
||||
|
||||
public WorkerWriterPool(@SuppressWarnings("unchecked") ConsumerEventHandler<O> ... consumers ){
|
||||
this.consumers = new ArrayList<ConsumerEventHandler<O>>();
|
||||
Collections.addAll(this.consumers, consumers);
|
||||
writers= new ArrayList<WorkerWriter<O>>();
|
||||
}
|
||||
|
||||
|
||||
List<WorkerWriter<O>> writers;
|
||||
|
||||
public WorkerWriter<O> get(){
|
||||
WorkerWriter<O> writer = new WorkerWriter<O>(this);
|
||||
this.createdWriters++;
|
||||
writers.add(writer);
|
||||
return writer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onClose() {
|
||||
this.createdWriters--;
|
||||
if (this.createdWriters == 0){
|
||||
for (ConsumerEventHandler<O> consumer : consumers){
|
||||
logger.trace("sending close to the consumer ("+consumer.getClass().getSimpleName()+")");
|
||||
consumer.onClose();
|
||||
}
|
||||
for (WorkerWriter<O> writer:writers)
|
||||
if (writer.isAlive())
|
||||
writer.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized boolean onElementReady(O element) {
|
||||
Iterator<ConsumerEventHandler<O>> it = consumers.iterator();
|
||||
for(;it.hasNext();){
|
||||
ConsumerEventHandler<O> consumer = it.next();
|
||||
boolean onElementWorked = consumer.onElementReady(element);
|
||||
//logger.trace("onElementReady called on "+consumer.getClass().getSimpleName()+" returned "+onElementWorked );
|
||||
if (!onElementWorked)
|
||||
it.remove();
|
||||
}
|
||||
//logger.trace("retained consumers are "+consumers.size());
|
||||
return consumers.size()>0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(StreamException exception) {
|
||||
for (ConsumerEventHandler<O> consumer : consumers)
|
||||
consumer.onError(exception);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isConsumerAlive() {
|
||||
Iterator<ConsumerEventHandler<O>> consumerIt = consumers.iterator();
|
||||
boolean allDead= true;
|
||||
while (consumerIt.hasNext() && allDead){
|
||||
ConsumerEventHandler<O> consumer = consumerIt.next();
|
||||
boolean isAlive = consumer.isConsumerAlive();
|
||||
allDead = !isAlive && allDead;
|
||||
}
|
||||
return !allDead;
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,357 @@
|
||||
package org.gcube.data.spd.plugin;
|
||||
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumMap;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.ServiceLoader;
|
||||
import java.util.Set;
|
||||
|
||||
import net.sf.ehcache.Cache;
|
||||
import net.sf.ehcache.CacheManager;
|
||||
import net.sf.ehcache.ObjectExistsException;
|
||||
import net.sf.ehcache.config.CacheConfiguration;
|
||||
import net.sf.ehcache.store.MemoryStoreEvictionPolicy;
|
||||
|
||||
import org.gcube.common.resources.gcore.GCoreEndpoint;
|
||||
import org.gcube.common.resources.gcore.GCoreEndpoint.Profile.Endpoint;
|
||||
import org.gcube.common.resources.gcore.HostingNode;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint;
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.data.spd.Constants;
|
||||
import org.gcube.data.spd.caching.MyCacheEventListener;
|
||||
import org.gcube.data.spd.model.PluginDescription;
|
||||
import org.gcube.data.spd.model.service.types.PluginDescriptions;
|
||||
import org.gcube.data.spd.model.util.Capabilities;
|
||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||
import org.gcube.data.spd.remoteplugin.RemotePlugin;
|
||||
import org.gcube.data.spd.utils.Utils;
|
||||
import org.gcube.resources.discovery.client.api.DiscoveryClient;
|
||||
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
|
||||
import org.gcube.smartgears.context.application.ApplicationContext;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class PluginManager{
|
||||
|
||||
private static Logger log = LoggerFactory.getLogger(PluginManager.class);
|
||||
|
||||
private static final int CACHE_ENTRIES_PER_PLUGIN =500;
|
||||
private static final String RESOURCE_CATEGORY ="BiodiversityRepository";
|
||||
|
||||
private ServiceLoader<AbstractPlugin> loader;
|
||||
private Map<String,AbstractPlugin> plugins = new HashMap<String, AbstractPlugin>();
|
||||
|
||||
private ApplicationContext ctx;
|
||||
|
||||
private EnumMap<Capabilities, Set<AbstractPlugin>> pluginsPerCapability= new EnumMap<Capabilities, Set<AbstractPlugin>>(Capabilities.class);
|
||||
|
||||
|
||||
public Set<AbstractPlugin> getPluginsPerCapability(Capabilities capability, Collection<AbstractPlugin> plugins){
|
||||
Set<AbstractPlugin> returnSet = new HashSet<AbstractPlugin>();
|
||||
if (pluginsPerCapability.containsKey(capability)){
|
||||
for (AbstractPlugin plugin : plugins)
|
||||
if (pluginsPerCapability.get(capability).contains(plugin)) returnSet.add(plugin);
|
||||
return Collections.unmodifiableSet(returnSet);
|
||||
}else return Collections.emptySet();
|
||||
}
|
||||
|
||||
public Set<AbstractPlugin> getPluginsPerCapability(Capabilities capability){
|
||||
if (pluginsPerCapability.containsKey(capability))
|
||||
return Collections.unmodifiableSet(pluginsPerCapability.get(capability));
|
||||
else return Collections.emptySet();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new instance, installing all the plugins found on the classpath.
|
||||
*/
|
||||
public PluginManager(ApplicationContext context) {
|
||||
log.debug("creating the plugin manager");
|
||||
this.ctx = context;
|
||||
initializePlugins();
|
||||
}
|
||||
|
||||
//update the pluginManager with a new plugin when a runtimeresource is added in its scope
|
||||
public void addRemotePlugins(List<PluginDescription> remotePluginDescriptions, String gCoreEndpointId){
|
||||
for (PluginDescription description : remotePluginDescriptions )
|
||||
try{
|
||||
if (!plugins.containsKey(description.getName()) && !description.isRemote()){
|
||||
RemotePlugin plugin = new RemotePlugin();
|
||||
plugin.remoteIntitializer(description, gCoreEndpointId);
|
||||
log.debug("found remote plugin for "+plugin.getRepositoryName());
|
||||
checkPlugin(plugin);
|
||||
//initializing cache per plugin
|
||||
if (plugin.isUseCache()) createCache(plugin.getRepositoryName());
|
||||
log.trace("created remote plugin "+plugin.getRepositoryName()+" with endpoints id "+plugin.getRemoteUris());
|
||||
}else {
|
||||
AbstractPlugin plugin = plugins.get(description.getName());
|
||||
if(plugin.isRemote()){
|
||||
((RemotePlugin) plugin).addUrl(gCoreEndpointId);
|
||||
log.trace("added remote Plugin "+plugin.getRepositoryName()+" from endpoint id "+gCoreEndpointId);
|
||||
}
|
||||
|
||||
}
|
||||
}catch (Exception e) {
|
||||
log.error("initialization failed for remote plugin "+description.getName(),e);
|
||||
}
|
||||
}
|
||||
|
||||
/*public void update(ServiceEndpoint resource){
|
||||
try {
|
||||
if (!resource.scopes().contains(this.scope.toString()))
|
||||
this.removePlugin(resource.profile().name());
|
||||
else if (!plugins.containsKey(resource.profile().name())) {
|
||||
add(resource);
|
||||
}else
|
||||
plugins.get(resource.profile().name()).initialize(resource);
|
||||
} catch (Exception e) {
|
||||
log.error("error updateting plugin "+resource.profile().name(),e);
|
||||
}
|
||||
}*/
|
||||
|
||||
/**
|
||||
* Returns the installed plugins, indexed by name.
|
||||
* @return the plugins
|
||||
*/
|
||||
public Map<String,AbstractPlugin> plugins() {
|
||||
return plugins;
|
||||
}
|
||||
|
||||
private void retrievePlugins(Map<String, ServiceEndpoint> runtimeResourcePerPlugin){
|
||||
for (AbstractPlugin plugin : loader) {
|
||||
|
||||
ServiceEndpoint resource=null;
|
||||
|
||||
if ((resource=runtimeResourcePerPlugin.get(plugin.getRepositoryName()))==null)
|
||||
continue;
|
||||
|
||||
log.debug("found a repo plugin for "+plugin.getRepositoryName());
|
||||
if (plugin.getRepositoryName()==null) {
|
||||
log.error("plugin "+plugin.getClass().getSimpleName()+" has a null repository name");
|
||||
continue;
|
||||
}
|
||||
|
||||
if (plugin.getRepositoryName().contains(":")) {
|
||||
log.error("plugin "+plugin.getClass().getSimpleName()+" contains an invalid character");
|
||||
continue;
|
||||
}
|
||||
|
||||
if (plugin.getDescription()==null) {
|
||||
log.warn("plugin "+plugin.getClass().getSimpleName()+" has a null description");
|
||||
continue;
|
||||
}
|
||||
try{
|
||||
if(!plugin.isInitialized()){
|
||||
plugin.initialize(resource);
|
||||
log.debug("initialization finished for plugin "+plugin.getRepositoryName());
|
||||
}
|
||||
|
||||
checkPlugin(plugin);
|
||||
|
||||
//initializing cache per plugin
|
||||
if (plugin.isUseCache()) createCache(plugin.getRepositoryName());
|
||||
}catch (Exception e) {
|
||||
log.error("initialization failed for plugin "+plugin.getRepositoryName(),e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
public void retrieveRemotePlugins(){
|
||||
|
||||
List<PluginDescription> descriptions = new ArrayList<PluginDescription>(plugins.size());
|
||||
for (AbstractPlugin plugin : plugins.values())
|
||||
if (!plugin.isRemote())
|
||||
descriptions.add(Utils.getPluginDescription(plugin));
|
||||
PluginDescriptions myDescriptions = new PluginDescriptions(descriptions);
|
||||
|
||||
|
||||
|
||||
for (GCoreEndpoint address: retrieveTwinServicesAddresses()) {
|
||||
String endpointId = ctx.profile(GCoreEndpoint.class).id();
|
||||
List<PluginDescription> pluginDescriptions =null;
|
||||
URI uri = null;
|
||||
try {
|
||||
for (Endpoint endpoint : address.profile().endpoints())
|
||||
if (endpoint.name().equals("remote-dispatcher")){
|
||||
uri = endpoint.uri();
|
||||
break;
|
||||
}
|
||||
if (uri!=null){
|
||||
//TODO : call remote rest service
|
||||
//RemoteDispatcher remoteDispatcher = org.gcube.data.spd.client.Constants.getRemoteDispatcherService(uri.toString());
|
||||
//pluginDescriptions = remoteDispatcher.exchangePlugins(myDescriptions, endpointId).getDescriptions();
|
||||
}
|
||||
}catch (Throwable e) {
|
||||
log.warn("error contacting remote plugin hosted on a Whn id "+address.profile().ghnId());
|
||||
continue;
|
||||
}
|
||||
|
||||
if (pluginDescriptions==null) continue;
|
||||
|
||||
|
||||
log.trace("plugins in Pluginmanager are "+plugins.keySet());
|
||||
|
||||
addRemotePlugins(pluginDescriptions, endpointId);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private List<GCoreEndpoint> retrieveTwinServicesAddresses(){
|
||||
List<GCoreEndpoint> addresses = Collections.emptyList();
|
||||
log.info("retreiving twin services in context {} ",ScopeProvider.instance.get());
|
||||
try{
|
||||
SimpleQuery query = queryFor(GCoreEndpoint.class);
|
||||
|
||||
query.addCondition("$resource/Profile/ServiceName/text() eq '"+Constants.SERVICE_NAME+"'")
|
||||
.addCondition("$resource/Profile/ServiceClass/text() eq '"+Constants.SERVICE_CLASS+"'")
|
||||
.addCondition("$resource/Profile/DeploymentData/Status/text() eq 'ready'")
|
||||
.addCondition("not($resource/Profile/GHN[@UniqueID='"+ctx.container().profile(HostingNode.class).id()+"'])");
|
||||
//gcube/data/speciesproductsdiscovery/manager
|
||||
DiscoveryClient<GCoreEndpoint> client = clientFor(GCoreEndpoint.class);
|
||||
|
||||
addresses = client.submit(query);
|
||||
|
||||
}catch(Exception e){
|
||||
log.warn("error discoverying twin services",e);
|
||||
}
|
||||
|
||||
log.trace("retieved "+addresses.size()+" gcore endpoints");
|
||||
|
||||
return addresses;
|
||||
}
|
||||
|
||||
private void checkPlugin(AbstractPlugin plugin){
|
||||
plugins.put(plugin.getRepositoryName(),plugin);
|
||||
for (Capabilities capability :plugin.getSupportedCapabilities()){
|
||||
if (pluginsPerCapability.containsKey(capability))
|
||||
pluginsPerCapability.get(capability).add(plugin);
|
||||
else {
|
||||
HashSet<AbstractPlugin> pluginsSet = new HashSet<AbstractPlugin>();
|
||||
pluginsSet.add(plugin);
|
||||
pluginsPerCapability.put(capability, pluginsSet);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void createCache(String pluginName){
|
||||
try{
|
||||
|
||||
Cache pluginCache = new Cache( new CacheConfiguration(pluginName, CACHE_ENTRIES_PER_PLUGIN)
|
||||
.memoryStoreEvictionPolicy(MemoryStoreEvictionPolicy.LFU)
|
||||
.overflowToDisk(false)
|
||||
.eternal(false)
|
||||
.timeToLiveSeconds(60*60*24*7)
|
||||
.timeToIdleSeconds(0)
|
||||
.diskPersistent(true)
|
||||
.diskExpiryThreadIntervalSeconds(0)
|
||||
.diskStorePath(ctx.persistence().location()));
|
||||
|
||||
pluginCache.getCacheEventNotificationService().registerListener(new MyCacheEventListener());
|
||||
|
||||
CacheManager.getInstance().addCache(pluginCache);
|
||||
log.trace("cache created for plugin "+ pluginName);
|
||||
}catch (ObjectExistsException e) {
|
||||
log.warn("the cache for plugin "+pluginName+" already exists");
|
||||
log.trace("the size is "+ CacheManager.getInstance().getCache(pluginName).getSize());
|
||||
}
|
||||
}
|
||||
|
||||
private void initializePlugins(){
|
||||
|
||||
log.trace("initializing plugins");
|
||||
if (loader==null){
|
||||
log.warn("ServiceLoader is null intializing plugins");
|
||||
loader=ServiceLoader.load(AbstractPlugin.class);
|
||||
}
|
||||
|
||||
Map<String, ServiceEndpoint> runtimeResourcePerPlugin = new HashMap<String, ServiceEndpoint>();
|
||||
try{
|
||||
SimpleQuery query = queryFor(ServiceEndpoint.class);
|
||||
|
||||
query.addCondition("$resource/Profile/Category/text() eq '"+RESOURCE_CATEGORY+"'");
|
||||
|
||||
DiscoveryClient<ServiceEndpoint> client = clientFor(ServiceEndpoint.class);
|
||||
|
||||
List<ServiceEndpoint> resources = client.submit(query);
|
||||
|
||||
for (ServiceEndpoint resource: resources)
|
||||
runtimeResourcePerPlugin.put(resource.profile().name(), resource);
|
||||
}catch(Exception e){
|
||||
log.warn("error discoverying runtime resources",e);
|
||||
}
|
||||
|
||||
retrievePlugins(runtimeResourcePerPlugin);
|
||||
retrieveRemotePlugins();
|
||||
}
|
||||
|
||||
|
||||
public void removePlugin(String pluginName){
|
||||
AbstractPlugin plugin = this.plugins.get(pluginName);
|
||||
|
||||
for (Capabilities capability :plugin.getSupportedCapabilities()){
|
||||
if (pluginsPerCapability.containsKey(capability)){
|
||||
pluginsPerCapability.get(capability).remove(plugin);
|
||||
if (pluginsPerCapability.get(capability).size()==0)
|
||||
pluginsPerCapability.remove(capability);
|
||||
}
|
||||
}
|
||||
this.plugins.remove(pluginName);
|
||||
}
|
||||
|
||||
public void removePlugins() {
|
||||
initializePlugins();
|
||||
}
|
||||
|
||||
public void removeRemotePlugin(String gCoreEndpointId) {
|
||||
List<String> pluginToRemove = new ArrayList<String>();
|
||||
for (AbstractPlugin plugin : plugins.values())
|
||||
if (plugin.isRemote()){
|
||||
RemotePlugin rPlugin =(RemotePlugin) plugin;
|
||||
rPlugin.getRemoteUris().remove(gCoreEndpointId);
|
||||
if (rPlugin.getRemoteUris().isEmpty())
|
||||
pluginToRemove.add(rPlugin.getRepositoryName());
|
||||
|
||||
}
|
||||
for (String pluginName: pluginToRemove){
|
||||
log.info("removing remote plugin {}", pluginName);
|
||||
this.removePlugin(pluginName);
|
||||
}
|
||||
}
|
||||
|
||||
public void shutdown(){
|
||||
notifyRemoteServicesOnShutdown();
|
||||
}
|
||||
|
||||
private void notifyRemoteServicesOnShutdown(){
|
||||
for (GCoreEndpoint address: retrieveTwinServicesAddresses()) {
|
||||
String endpointId = ctx.profile(GCoreEndpoint.class).id();
|
||||
URI uri = null;
|
||||
try {
|
||||
for (Endpoint endpoint : address.profile().endpoints())
|
||||
if (endpoint.name().equals("remote-dispatcher")){
|
||||
uri = endpoint.uri();
|
||||
break;
|
||||
}
|
||||
if (uri!=null){
|
||||
//TODO : call remote rest service
|
||||
//RemoteDispatcher remoteDispatcher = org.gcube.data.spd.client.Constants.getRemoteDispatcherService(uri.toString());
|
||||
//remoteDispatcher.removeAll(endpointId);
|
||||
}
|
||||
}catch (Throwable e) {
|
||||
log.warn("error contacting remote plugin hosted on a Whn id "+address.profile().ghnId());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,38 @@
|
||||
package org.gcube.data.spd.plugin;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.gcube.data.spd.model.service.exceptions.UnsupportedCapabilityException;
|
||||
import org.gcube.data.spd.model.service.exceptions.UnsupportedPluginException;
|
||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||
|
||||
public class PluginUtils {
|
||||
|
||||
public static Collection<AbstractPlugin> getPluginsSubList(Collection<String> pluginsName, Map<String, AbstractPlugin> plugins) throws UnsupportedPluginException{
|
||||
Set<AbstractPlugin> selectedPlugins = new HashSet<AbstractPlugin>();
|
||||
for (String pluginName: pluginsName)
|
||||
if (plugins.containsKey(pluginName)) selectedPlugins.add(plugins.get(pluginName));
|
||||
else throw new UnsupportedPluginException();
|
||||
return selectedPlugins;
|
||||
}
|
||||
|
||||
public static Collection<AbstractPlugin> getExtenderPlugins(Collection<AbstractPlugin> plugins) throws UnsupportedCapabilityException{
|
||||
Set<AbstractPlugin> selectedPlugins = new HashSet<AbstractPlugin>();
|
||||
for (AbstractPlugin plugin: plugins)
|
||||
if (plugin.getExpansionInterface()!=null) selectedPlugins.add(plugin);
|
||||
if (selectedPlugins.size()==0) throw new UnsupportedCapabilityException();
|
||||
return selectedPlugins;
|
||||
}
|
||||
|
||||
public static Collection<AbstractPlugin> getResolverPlugins(Collection<AbstractPlugin> plugins) throws UnsupportedCapabilityException{
|
||||
Set<AbstractPlugin> selectedPlugins = new HashSet<AbstractPlugin>();
|
||||
for (AbstractPlugin plugin: plugins)
|
||||
if (plugin.getMappingInterface()!=null) selectedPlugins.add(plugin);
|
||||
if (selectedPlugins.size()==0) throw new UnsupportedCapabilityException();
|
||||
return selectedPlugins;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,153 @@
|
||||
package org.gcube.data.spd.remoteplugin;
|
||||
|
||||
|
||||
import static org.gcube.data.streams.dsl.Streams.convert;
|
||||
import static org.gcube.data.streams.dsl.Streams.publishStringsIn;
|
||||
|
||||
import java.net.URI;
|
||||
import java.rmi.RemoteException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.gcube.data.spd.Constants;
|
||||
import org.gcube.data.spd.exception.ServiceException;
|
||||
import org.gcube.data.spd.model.Condition;
|
||||
import org.gcube.data.spd.model.Conditions;
|
||||
import org.gcube.data.spd.model.binding.Bindings;
|
||||
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
|
||||
import org.gcube.data.spd.model.exceptions.IdNotValidException;
|
||||
import org.gcube.data.spd.model.exceptions.MethodNotSupportedException;
|
||||
import org.gcube.data.spd.model.products.TaxonomyItem;
|
||||
import org.gcube.data.spd.model.service.exceptions.InvalidIdentifierException;
|
||||
import org.gcube.data.spd.model.service.types.SearchCondition;
|
||||
import org.gcube.data.spd.model.service.types.SearchRequest;
|
||||
import org.gcube.data.spd.plugin.fwk.capabilities.ClassificationCapability;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ClosableWriter;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
|
||||
import org.gcube.data.streams.Stream;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.thoughtworks.xstream.XStream;
|
||||
|
||||
public class RemoteClassificationCapability extends ClassificationCapability {
|
||||
|
||||
private Set<Conditions> props = new HashSet<Conditions>();
|
||||
volatile Logger logger = LoggerFactory.getLogger(RemoteOccurrencesCapability.class);
|
||||
private String parentName;
|
||||
private Collection<String> uris;
|
||||
|
||||
public RemoteClassificationCapability(Conditions[] properties, String parentName, Collection<String> uris){
|
||||
if (properties!=null)
|
||||
for (Conditions prop: properties)
|
||||
props.add(prop);
|
||||
this.parentName = parentName;
|
||||
this.uris = uris;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Conditions> getSupportedProperties() {
|
||||
return props;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void searchByScientificName(String word,
|
||||
ObjectWriter<TaxonomyItem> writer, Condition... properties) throws ExternalRepositoryException {
|
||||
//trasforming properties
|
||||
|
||||
List<SearchCondition> props = Collections.emptyList();
|
||||
if (properties!=null && properties.length>0){
|
||||
props = new ArrayList<SearchCondition>(properties.length);
|
||||
for (int i = 0 ; i<properties.length; i++)
|
||||
props.add(new SearchCondition(properties[i].getType(), properties[i].getOp(), new XStream().toXML(properties[i].getValue())));
|
||||
}
|
||||
|
||||
//TODO : call remote rest service
|
||||
String locator = "";// RemotePlugin.getRemoteDispatcher(uris).search(new SearchRequest(this.parentName, props, Constants.TAXON_RETURN_TYPE, word));
|
||||
Stream<String> items = convert(URI.create(locator)).ofStrings().withDefaults();
|
||||
while(items.hasNext())
|
||||
try{
|
||||
writer.write((TaxonomyItem) Bindings.fromXml(items.next()));
|
||||
}catch (Exception e) {
|
||||
logger.error("error binding result item",e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<TaxonomyItem> retrieveTaxonChildrenByTaxonId(String taxonId)
|
||||
throws IdNotValidException, ExternalRepositoryException {
|
||||
List<TaxonomyItem> itemsList = new ArrayList<TaxonomyItem>();
|
||||
//TODO : call remote rest service
|
||||
String locator = "";// RemotePlugin.getRemoteDispatcher(uris).retrieveTaxonChildrenByTaxonId(taxonId, this.parentName);
|
||||
Stream<String> items = convert(URI.create(locator)).ofStrings().withDefaults();
|
||||
while(items.hasNext())
|
||||
try{
|
||||
itemsList.add((TaxonomyItem) Bindings.fromXml(items.next()));
|
||||
}catch (Exception e) {
|
||||
logger.error("error binding",e);
|
||||
}
|
||||
|
||||
return itemsList;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public TaxonomyItem retrieveTaxonById(String id)
|
||||
throws IdNotValidException, ExternalRepositoryException {
|
||||
try{
|
||||
//TODO : call remote rest service
|
||||
String item = "";// RemotePlugin.getRemoteDispatcher(uris).getTaxonById(id, parentName);
|
||||
return (TaxonomyItem) Bindings.fromXml(item);
|
||||
/*} catch (InvalidIdentifierException e) {
|
||||
logger.error("id not valid "+id+" for plugin "+parentName);
|
||||
throw new IdNotValidException("id not valid "+id+" for plugin "+parentName);*/
|
||||
} catch (Exception e) {
|
||||
logger.error("error retreiveing taxon for plugin "+parentName);
|
||||
throw new ExternalRepositoryException("error retreiveing taxon for plugin "+parentName);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getSynonymnsById(ObjectWriter<TaxonomyItem> writer, String id)
|
||||
throws IdNotValidException, MethodNotSupportedException, ExternalRepositoryException {
|
||||
//TODO : call remote rest service
|
||||
String locator = "";// RemotePlugin.getRemoteDispatcher(uris).getSynonymsById(id, this.parentName);
|
||||
Stream<String> items = convert(URI.create(locator)).ofStrings().withDefaults();
|
||||
while(items.hasNext())
|
||||
try{
|
||||
writer.write((TaxonomyItem) Bindings.fromXml(items.next()));
|
||||
}catch (Exception e) {
|
||||
logger.error("error binding",e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void retrieveTaxonByIds(Iterator<String> ids,
|
||||
ClosableWriter<TaxonomyItem> writer) throws ExternalRepositoryException {
|
||||
try{
|
||||
String inputIdsLocator = publishStringsIn(convert(ids)).withDefaults().toString();
|
||||
//TODO : call remote rest service
|
||||
String locator = ""; // RemotePlugin.getRemoteDispatcher(uris).retrieveTaxaByIds(inputIdsLocator, this.parentName);
|
||||
Stream<String> items = convert(URI.create(locator)).ofStrings().withDefaults();
|
||||
while(items.hasNext())
|
||||
try{
|
||||
writer.write((TaxonomyItem) Bindings.fromXml(items.next()));
|
||||
}catch (Exception e) {
|
||||
logger.error("error binding",e);
|
||||
}
|
||||
|
||||
}finally{
|
||||
writer.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,45 @@
|
||||
package org.gcube.data.spd.remoteplugin;
|
||||
|
||||
import static org.gcube.data.streams.dsl.Streams.convert;
|
||||
|
||||
import java.net.URI;
|
||||
import java.rmi.RemoteException;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.gcube.data.spd.exception.ServiceException;
|
||||
import org.gcube.data.spd.model.Conditions;
|
||||
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
|
||||
import org.gcube.data.spd.plugin.fwk.capabilities.ExpansionCapability;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
|
||||
import org.gcube.data.streams.Stream;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class RemoteExpandCapability implements ExpansionCapability {
|
||||
|
||||
volatile Logger logger = LoggerFactory.getLogger(RemoteExpandCapability.class);
|
||||
|
||||
private Set<Conditions> props = new HashSet<Conditions>();
|
||||
private String parentName;
|
||||
private Collection<String> uris;
|
||||
|
||||
public RemoteExpandCapability(Conditions[] properties, String parentName, Collection<String> uris){
|
||||
if (properties!=null)
|
||||
for (Conditions prop: properties)
|
||||
props.add(prop);
|
||||
this.parentName = parentName;
|
||||
this.uris = uris;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getSynonyms(ObjectWriter<String> writer, String scientificName) throws ExternalRepositoryException {
|
||||
//TODO : call remote rest service
|
||||
String locator = ""; // RemotePlugin.getRemoteDispatcher(uris).expandWithSynonyms(scientificName, this.parentName);
|
||||
Stream<String> synonyms = convert(URI.create(locator)).ofStrings().withDefaults();
|
||||
while (synonyms.hasNext())
|
||||
writer.write(synonyms.next());
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,49 @@
|
||||
package org.gcube.data.spd.remoteplugin;
|
||||
|
||||
import static org.gcube.data.streams.dsl.Streams.convert;
|
||||
|
||||
import java.net.URI;
|
||||
import java.rmi.RemoteException;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.gcube.data.spd.exception.ServiceException;
|
||||
import org.gcube.data.spd.model.Conditions;
|
||||
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
|
||||
import org.gcube.data.spd.plugin.fwk.capabilities.MappingCapability;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
|
||||
import org.gcube.data.streams.Stream;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class RemoteNamesMappingCapability implements MappingCapability {
|
||||
|
||||
volatile Logger logger = LoggerFactory.getLogger(RemoteNamesMappingCapability.class);
|
||||
|
||||
private Set<Conditions> props = new HashSet<Conditions>();
|
||||
private String parentName;
|
||||
private Collection<String> uris;
|
||||
|
||||
public RemoteNamesMappingCapability(Conditions[] properties, String parentName, Collection<String> uris){
|
||||
if (properties!=null)
|
||||
for (Conditions prop: properties)
|
||||
props.add(prop);
|
||||
this.parentName = parentName;
|
||||
this.uris = uris;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void getRelatedScientificNames(ObjectWriter<String> writer,
|
||||
String commonName) throws ExternalRepositoryException{
|
||||
//TODO : call remote rest service
|
||||
String locator = "";// RemotePlugin.getRemoteDispatcher(uris).namesMapping(commonName, this.parentName);
|
||||
Stream<String> names = convert(URI.create(locator)).ofStrings().withDefaults();
|
||||
while (names.hasNext())
|
||||
writer.write(names.next());
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,118 @@
|
||||
package org.gcube.data.spd.remoteplugin;
|
||||
|
||||
import static org.gcube.data.streams.dsl.Streams.convert;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.gcube.data.spd.model.Condition;
|
||||
import org.gcube.data.spd.model.Conditions;
|
||||
import org.gcube.data.spd.model.binding.Bindings;
|
||||
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
|
||||
import org.gcube.data.spd.model.products.OccurrencePoint;
|
||||
import org.gcube.data.spd.model.service.types.SearchCondition;
|
||||
import org.gcube.data.spd.plugin.fwk.capabilities.OccurrencesCapability;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ClosableWriter;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
|
||||
import org.gcube.data.streams.Stream;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.thoughtworks.xstream.XStream;
|
||||
|
||||
public class RemoteOccurrencesCapability extends OccurrencesCapability {
|
||||
|
||||
private Set<Conditions> props = new HashSet<Conditions>();
|
||||
private String parentName;
|
||||
private Collection<String> uris;
|
||||
|
||||
volatile Logger logger = LoggerFactory.getLogger(RemoteOccurrencesCapability.class);
|
||||
|
||||
public RemoteOccurrencesCapability(Conditions[] properties, String parentName, Collection<String> uris) {
|
||||
if (properties!=null)
|
||||
for (Conditions prop: properties)
|
||||
props.add(prop);
|
||||
this.parentName = parentName;
|
||||
this.uris = uris;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Conditions> getSupportedProperties() {
|
||||
return props;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void searchByScientificName(String word,
|
||||
ObjectWriter<OccurrencePoint> writer, Condition... properties) throws ExternalRepositoryException {
|
||||
//trasforming properties
|
||||
List<SearchCondition> props = Collections.emptyList();
|
||||
if (properties!=null && properties.length>0){
|
||||
props = new ArrayList<SearchCondition>(properties.length);
|
||||
for (int i = 0 ; i<properties.length; i++)
|
||||
props.add(new SearchCondition(properties[i].getType(), properties[i].getOp(), new XStream().toXML(properties[i].getValue())));
|
||||
}
|
||||
|
||||
//TODO : call remote rest service
|
||||
String locator = "";// RemotePlugin.getRemoteDispatcher(uris).search(new SearchRequest(this.parentName, props, Constants.OCCURRENCE_RETURN_TYPE, word));
|
||||
Stream<String> items = convert(URI.create(locator)).ofStrings().withDefaults();
|
||||
while(items.hasNext())
|
||||
try{
|
||||
writer.write((OccurrencePoint) Bindings.fromXml(items.next()));
|
||||
}catch (Exception e) {
|
||||
logger.error("error binding",e);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getOccurrencesByProductKeys(
|
||||
ClosableWriter<OccurrencePoint> writer, Iterator<String> keys) throws ExternalRepositoryException {
|
||||
|
||||
logger.trace("remote getOccurrencesByProductKeys called in "+this.parentName);
|
||||
try{
|
||||
//TODO : call remote rest service
|
||||
String locator = ""; //RemotePlugin.getRemoteDispatcher(uris).getOccurrencesByProductKeys(publishStringsIn(convert(keys)).withDefaults().toString(), this.parentName);
|
||||
Stream<String> items = convert(URI.create(locator)).ofStrings().withDefaults();
|
||||
while(items.hasNext()){
|
||||
String item = items.next();
|
||||
try{
|
||||
writer.write((OccurrencePoint) Bindings.fromXml(item));
|
||||
}catch (Exception e) {
|
||||
logger.error("error binding the item:\n"+item+"\n",e);
|
||||
}
|
||||
}
|
||||
|
||||
}finally{
|
||||
writer.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void getOccurrencesByIds(ClosableWriter<OccurrencePoint> writer,
|
||||
Iterator<String> ids) throws ExternalRepositoryException{
|
||||
|
||||
logger.trace("remote getOccurrencesByIds called in "+this.parentName);
|
||||
try{
|
||||
String locator = ""; //RemotePlugin.getRemoteDispatcher(uris).getOccurrencesByProductKeys(publishStringsIn(convert(ids)).withDefaults().toString(), this.parentName);
|
||||
Stream<String> items = convert(URI.create(locator)).ofStrings().withDefaults();
|
||||
while(items.hasNext())
|
||||
try{
|
||||
writer.write((OccurrencePoint) Bindings.fromXml(items.next()));
|
||||
}catch (Exception e) {
|
||||
logger.error("error binding",e);
|
||||
}
|
||||
|
||||
|
||||
}finally{
|
||||
writer.close();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,256 @@
|
||||
package org.gcube.data.spd.remoteplugin;
|
||||
|
||||
import static org.gcube.data.streams.dsl.Streams.convert;
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
import org.gcube.common.resources.gcore.GCoreEndpoint;
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.data.spd.Constants;
|
||||
import org.gcube.data.spd.exception.ServiceException;
|
||||
import org.gcube.data.spd.model.Condition;
|
||||
import org.gcube.data.spd.model.Conditions;
|
||||
import org.gcube.data.spd.model.PluginDescription;
|
||||
import org.gcube.data.spd.model.RepositoryInfo;
|
||||
import org.gcube.data.spd.model.binding.Bindings;
|
||||
import org.gcube.data.spd.model.products.ResultItem;
|
||||
import org.gcube.data.spd.model.service.types.SearchCondition;
|
||||
import org.gcube.data.spd.model.util.Capabilities;
|
||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||
import org.gcube.data.spd.plugin.fwk.capabilities.ClassificationCapability;
|
||||
import org.gcube.data.spd.plugin.fwk.capabilities.ExpansionCapability;
|
||||
import org.gcube.data.spd.plugin.fwk.capabilities.MappingCapability;
|
||||
import org.gcube.data.spd.plugin.fwk.capabilities.OccurrencesCapability;
|
||||
import org.gcube.data.spd.plugin.fwk.capabilities.UnfoldCapability;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
|
||||
import org.gcube.data.streams.Stream;
|
||||
import org.gcube.resources.discovery.client.api.DiscoveryClient;
|
||||
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.thoughtworks.xstream.XStream;
|
||||
|
||||
public class RemotePlugin extends AbstractPlugin {
|
||||
|
||||
volatile static Logger logger = LoggerFactory.getLogger(RemotePlugin.class);
|
||||
|
||||
private ClassificationCapability classification;
|
||||
private MappingCapability mapping;
|
||||
private ExpansionCapability expand;
|
||||
private OccurrencesCapability occurrences;
|
||||
private UnfoldCapability unfold;
|
||||
private String name;
|
||||
private String description;
|
||||
private Set<String> remoteUris = new HashSet<String>();
|
||||
private RepositoryInfo info;
|
||||
private Set<Capabilities> supportedCapabilities = new HashSet<Capabilities>();
|
||||
|
||||
private static Map<String, String> cacheGCoreEnpointsRemoteDispatcherPT = new HashMap<String, String>();
|
||||
|
||||
@Override
|
||||
public RepositoryInfo getRepositoryInfo() {
|
||||
return info;
|
||||
}
|
||||
|
||||
|
||||
protected static String getRemoteDispatcher(Collection<String> endpointIds) throws ServiceException{
|
||||
|
||||
if (endpointIds==null || endpointIds.size()==0)
|
||||
throw new ServiceException("remote service endpoints are empty");
|
||||
|
||||
boolean notCachedFound = false;
|
||||
Set<RemoteUri> uris = new HashSet<RemoteUri>();
|
||||
|
||||
StringBuffer inBuf = new StringBuffer("(");
|
||||
|
||||
for (String endpointId : endpointIds){
|
||||
if (cacheGCoreEnpointsRemoteDispatcherPT.containsKey(endpointId))
|
||||
uris.add(new RemoteUri(endpointId, cacheGCoreEnpointsRemoteDispatcherPT.get(endpointId)));
|
||||
else{
|
||||
inBuf.append("'").append(endpointId).append("'").append(",");
|
||||
notCachedFound = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (notCachedFound){
|
||||
inBuf.replace(inBuf.lastIndexOf(","), inBuf.length(), ")");
|
||||
|
||||
try{
|
||||
SimpleQuery query = queryFor(GCoreEndpoint.class);
|
||||
|
||||
query.addCondition("$resource/Profile/ServiceName/text() eq '"+Constants.SERVICE_NAME+"'")
|
||||
.addCondition("$resource/Profile/ServiceClass/text() eq '"+Constants.SERVICE_CLASS+"'")
|
||||
.addCondition("$resource/Profile/DeploymentData/Status/text() eq 'ready'")
|
||||
.addCondition("$resource/ID/text() in "+inBuf.toString());
|
||||
|
||||
query.setResult("<RemoteUri><id>{$resource/ID/text()}</id>" +
|
||||
"<uri>{$resource/Profile/AccessPoint/RunningInstanceInterfaces//Enpoint[@EntryName/text() eq 'remote-dispatcher'][0]}</uri><RemoteUri>");
|
||||
|
||||
DiscoveryClient<RemoteUri> client = clientFor(RemoteUri.class);
|
||||
|
||||
List<RemoteUri> discoveredUris = client.submit(query);
|
||||
|
||||
for (RemoteUri discoveredUri: discoveredUris){
|
||||
uris.add(discoveredUri);
|
||||
cacheGCoreEnpointsRemoteDispatcherPT.put(discoveredUri.getEndpointId(), discoveredUri.getUri());
|
||||
}
|
||||
|
||||
}catch(Exception e){
|
||||
logger.warn("error discoverying remote gCoreEnpoints",e);
|
||||
}
|
||||
}
|
||||
|
||||
for (RemoteUri uri : uris){
|
||||
try{
|
||||
return uri.getUri();
|
||||
}catch(Exception e){
|
||||
logger.warn("remote dispatcher at "+uri+" is unreachable, it'll be discarded and removed from cache");
|
||||
cacheGCoreEnpointsRemoteDispatcherPT.remove(uri.getEndpointId());
|
||||
}
|
||||
}
|
||||
|
||||
throw new ServiceException("no valid uri found for this remote plugin");
|
||||
|
||||
|
||||
}
|
||||
|
||||
public void addUrl(String url){
|
||||
this.remoteUris.add(url);
|
||||
}
|
||||
|
||||
public void removeUrl(String url){
|
||||
this.remoteUris.remove(url);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void searchByScientificName(String word,
|
||||
ObjectWriter<ResultItem> writer, Condition... properties) {
|
||||
//transforming properties
|
||||
logger.trace("("+this.getRepositoryName()+" - REMOTE) call arrived in scope "+ScopeProvider.instance.get());
|
||||
List<SearchCondition> props = Collections.emptyList();
|
||||
if (properties!=null && properties.length>0){
|
||||
props = new ArrayList<SearchCondition>(properties.length);
|
||||
for (int i = 0 ; i<properties.length; i++)
|
||||
props.add(new SearchCondition(properties[i].getType(), properties[i].getOp(), new XStream().toXML(properties[i].getValue())));
|
||||
}
|
||||
|
||||
logger.trace("properties retrieved");
|
||||
|
||||
try{
|
||||
String locator = "";// getRemoteDispatcher(remoteUris).search(new SearchRequest(this.name, props, Constants.RESULITEM_RETURN_TYPE, word));
|
||||
Stream<String> items = convert(URI.create(locator)).ofStrings().withDefaults();
|
||||
while(items.hasNext())
|
||||
try{
|
||||
writer.write((ResultItem) Bindings.fromXml(items.next()));
|
||||
}catch (Exception e) {
|
||||
logger.error("error binding result item",e);
|
||||
}
|
||||
|
||||
}catch (Exception e) {
|
||||
logger.error("error executing search",e);
|
||||
}
|
||||
}
|
||||
|
||||
public void remoteIntitializer(PluginDescription pd, String uri) throws Exception{
|
||||
this.setUseCache(true);
|
||||
|
||||
|
||||
this.name = pd.getName();
|
||||
this.description = pd.getDescription();
|
||||
this.remoteUris.add(uri);
|
||||
this.info = pd.getInfo();
|
||||
|
||||
//adding supported capabilities
|
||||
for (Entry<Capabilities, List<Conditions>> capabilityDescriptions: pd.getSupportedCapabilities().entrySet()){
|
||||
|
||||
Conditions[] properties = capabilityDescriptions.getValue().toArray(new Conditions[capabilityDescriptions.getValue().size()]);
|
||||
|
||||
switch (capabilityDescriptions.getKey()) {
|
||||
case Classification:
|
||||
this.classification = new RemoteClassificationCapability(properties, this.name, remoteUris);
|
||||
break;
|
||||
case NamesMapping:
|
||||
this.mapping = new RemoteNamesMappingCapability(properties, this.name,remoteUris);
|
||||
break;
|
||||
case Occurrence:
|
||||
this.occurrences = new RemoteOccurrencesCapability(properties, this.name, remoteUris);
|
||||
break;
|
||||
case Expansion:
|
||||
this.expand = new RemoteExpandCapability(properties, this.name, remoteUris);
|
||||
break;
|
||||
case Unfold:
|
||||
this.unfold = new RemoteUnfoldCapability(properties, this.name, remoteUris);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
supportedCapabilities.add(capabilityDescriptions.getKey());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClassificationCapability getClassificationInterface() {
|
||||
return classification;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public OccurrencesCapability getOccurrencesInterface() {
|
||||
return occurrences;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappingCapability getMappingInterface() {
|
||||
return mapping;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public ExpansionCapability getExpansionInterface() {
|
||||
return expand;
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnfoldCapability getUnfoldInterface() {
|
||||
return unfold;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Capabilities> getSupportedCapabilities() {
|
||||
return this.supportedCapabilities;
|
||||
}
|
||||
|
||||
public boolean isRemote(){
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getRepositoryName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
|
||||
public Collection<String> getRemoteUris() {
|
||||
return remoteUris;
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,46 @@
|
||||
package org.gcube.data.spd.remoteplugin;
|
||||
|
||||
import static org.gcube.data.streams.dsl.Streams.convert;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.gcube.data.spd.model.Conditions;
|
||||
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
|
||||
import org.gcube.data.spd.plugin.fwk.capabilities.UnfoldCapability;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
|
||||
import org.gcube.data.streams.Stream;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class RemoteUnfoldCapability implements UnfoldCapability{
|
||||
|
||||
volatile Logger logger = LoggerFactory.getLogger(RemoteNamesMappingCapability.class);
|
||||
|
||||
private Set<Conditions> props = new HashSet<Conditions>();
|
||||
private String parentName;
|
||||
private Collection<String> uris;
|
||||
|
||||
public RemoteUnfoldCapability(Conditions[] properties, String parentName, Collection<String> uris){
|
||||
if (properties!=null)
|
||||
for (Conditions prop: properties)
|
||||
props.add(prop);
|
||||
this.parentName = parentName;
|
||||
this.uris = uris;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void unfold(ObjectWriter<String> writer, String scientificName)
|
||||
throws ExternalRepositoryException {
|
||||
String locator = null;
|
||||
//TODO : call remote rest service
|
||||
locator = ""; // RemotePlugin.getRemoteDispatcher(uris).namesMapping(scientificName, this.parentName);
|
||||
Stream<String> names = convert(URI.create(locator)).ofStrings().withDefaults();
|
||||
while (names.hasNext())
|
||||
writer.write(names.next());
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,33 @@
|
||||
package org.gcube.data.spd.remoteplugin;
|
||||
|
||||
import javax.xml.bind.annotation.XmlAccessType;
|
||||
import javax.xml.bind.annotation.XmlAccessorType;
|
||||
import javax.xml.bind.annotation.XmlElement;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
|
||||
@XmlRootElement(name="RemoteUri")
|
||||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
public class RemoteUri {
|
||||
|
||||
@XmlElement(name="id")
|
||||
private String endpointId;
|
||||
|
||||
@XmlElement(name="uri")
|
||||
private String uri;
|
||||
|
||||
public RemoteUri(){}
|
||||
|
||||
public RemoteUri(String endpointId, String uri) {
|
||||
this.endpointId = endpointId;
|
||||
this.uri = uri;
|
||||
}
|
||||
|
||||
public String getEndpointId() {
|
||||
return endpointId;
|
||||
}
|
||||
|
||||
public String getUri() {
|
||||
return uri;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,311 @@
|
||||
package org.gcube.data.spd.resources;
|
||||
|
||||
import static org.gcube.data.streams.dsl.Streams.convert;
|
||||
import static org.gcube.data.streams.dsl.Streams.pipe;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import org.gcube.data.spd.exception.MaxRetriesReachedException;
|
||||
import org.gcube.data.spd.manager.AppInitializer;
|
||||
import org.gcube.data.spd.manager.TaxonomyItemWriterManager;
|
||||
import org.gcube.data.spd.model.Constants;
|
||||
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
|
||||
import org.gcube.data.spd.model.exceptions.IdNotValidException;
|
||||
import org.gcube.data.spd.model.exceptions.MethodNotSupportedException;
|
||||
import org.gcube.data.spd.model.exceptions.StreamBlockingException;
|
||||
import org.gcube.data.spd.model.exceptions.StreamNonBlockingException;
|
||||
import org.gcube.data.spd.model.products.TaxonomyItem;
|
||||
import org.gcube.data.spd.model.service.exceptions.InvalidIdentifierException;
|
||||
import org.gcube.data.spd.model.service.exceptions.UnsupportedCapabilityException;
|
||||
import org.gcube.data.spd.model.service.exceptions.UnsupportedPluginException;
|
||||
import org.gcube.data.spd.model.util.Capabilities;
|
||||
import org.gcube.data.spd.plugin.PluginManager;
|
||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||
import org.gcube.data.spd.plugin.fwk.readers.LocalReader;
|
||||
import org.gcube.data.spd.plugin.fwk.util.Util;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ObjectWriter;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.Writer;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.rswrapper.LocalWrapper;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.rswrapper.ResultWrapper;
|
||||
import org.gcube.data.spd.utils.ExecutorsContainer;
|
||||
import org.gcube.data.spd.utils.JobRetryCall;
|
||||
import org.gcube.data.spd.utils.QueryRetryCall;
|
||||
import org.gcube.data.spd.utils.ResultWrapperMantainer;
|
||||
import org.gcube.data.spd.utils.VOID;
|
||||
import org.gcube.data.streams.Stream;
|
||||
import org.gcube.data.streams.delegates.PipedStream;
|
||||
import org.gcube.smartgears.ApplicationManagerProvider;
|
||||
import org.gcube.smartgears.ContextProvider;
|
||||
import org.gcube.smartgears.context.application.ApplicationContext;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Path("taxon")
|
||||
public class Classification {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(Classification.class);
|
||||
|
||||
AppInitializer initializer = (AppInitializer)ApplicationManagerProvider.get(AppInitializer.class);
|
||||
|
||||
ApplicationContext ctx = ContextProvider.get();
|
||||
|
||||
@GET
|
||||
@Path("children/{key}")
|
||||
public Response retrieveTaxonChildrenByTaxonId(@PathParam("key") String key) throws UnsupportedPluginException,UnsupportedCapabilityException, InvalidIdentifierException {
|
||||
try{
|
||||
logger.trace("calling get taxon childs by id");
|
||||
PluginManager manager = initializer.getPluginManager();
|
||||
String pluginName = Util.getProviderFromKey(key);
|
||||
String id = Util.getIdFromKey(key);
|
||||
if (!manager.plugins().containsKey(pluginName))
|
||||
throw new UnsupportedPluginException();
|
||||
AbstractPlugin plugin = manager.plugins().get(pluginName);
|
||||
if (!plugin.getSupportedCapabilities().contains(Capabilities.Classification)) throw new UnsupportedCapabilityException();
|
||||
try {
|
||||
logger.trace("retirievng list of taxon item");
|
||||
List<TaxonomyItem> taxonChilds = plugin.getClassificationInterface().retrieveTaxonChildrenByTaxonId(id);
|
||||
logger.trace("taxon item found are "+taxonChilds.size());
|
||||
Stream<TaxonomyItem> taxonStream =convert(taxonChilds);
|
||||
PipedStream<TaxonomyItem, TaxonomyItem> pipedTaxa = pipe(taxonStream).through(new TaxonomyItemWriterManager(plugin.getRepositoryName()));
|
||||
|
||||
ResultWrapper<TaxonomyItem> wrapper = ResultWrapperMantainer.getWrapper(TaxonomyItem.class);
|
||||
|
||||
while (pipedTaxa.hasNext())
|
||||
wrapper.add(pipedTaxa.next());
|
||||
|
||||
|
||||
// the output will be probably returned even before
|
||||
// a first chunk is written by the new thread
|
||||
StringBuilder redirectUri = new StringBuilder();
|
||||
redirectUri.append("http://").append(ctx.container().configuration().hostname()).append(":").append(ctx.container().configuration().port());
|
||||
redirectUri.append(ctx.application().getContextPath()).append(Constants.APPLICATION_ROOT_PATH).append("/").append(Constants.RESULTSET_PATH).append("/").append(wrapper.getLocator());
|
||||
logger.trace("redirect uri is {} ",redirectUri.toString());
|
||||
try{
|
||||
return Response.temporaryRedirect(new URI(redirectUri.toString())).build();
|
||||
}catch(Exception e){
|
||||
logger.error("invalid redirect uri created",e);
|
||||
return Response.serverError().build();
|
||||
}
|
||||
|
||||
} catch (IdNotValidException e) {
|
||||
logger.error("the id "+id+" is not valid",e );
|
||||
throw new IdNotValidException();
|
||||
}
|
||||
}catch (Throwable e) {
|
||||
logger.error("error getting TaxonByid",e);
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("tree/{key}")
|
||||
public Response retrieveChildrenTreeById(@PathParam("key") final String key) throws UnsupportedPluginException,UnsupportedCapabilityException, InvalidIdentifierException{
|
||||
PluginManager manager = initializer.getPluginManager();
|
||||
|
||||
try{
|
||||
String pluginName = Util.getProviderFromKey(key);
|
||||
final String id = Util.getIdFromKey(key);
|
||||
if (!manager.plugins().containsKey(pluginName))
|
||||
throw new UnsupportedPluginException();
|
||||
final AbstractPlugin plugin = manager.plugins().get(pluginName);
|
||||
if (!plugin.getSupportedCapabilities().contains(Capabilities.Classification)) throw new UnsupportedCapabilityException();
|
||||
|
||||
final ResultWrapper<TaxonomyItem> wrapper = ResultWrapperMantainer.getWrapper(TaxonomyItem.class);
|
||||
|
||||
final TaxonomyItem taxon= plugin.getClassificationInterface().retrieveTaxonById(id);
|
||||
ExecutorsContainer.execSearch(new Runnable() {
|
||||
@Override
|
||||
public void run(){
|
||||
Writer<TaxonomyItem> writer = new Writer<TaxonomyItem>(wrapper, new TaxonomyItemWriterManager(plugin.getRepositoryName()));
|
||||
writer.register();
|
||||
Classification.retrieveTaxaTree(writer, taxon, plugin);
|
||||
writer.close();
|
||||
}
|
||||
});
|
||||
|
||||
// the output will be probably returned even before
|
||||
// a first chunk is written by the new thread
|
||||
StringBuilder redirectUri = new StringBuilder();
|
||||
redirectUri.append("http://").append(ctx.container().configuration().hostname()).append(":").append(ctx.container().configuration().port());
|
||||
redirectUri.append(ctx.application().getContextPath()).append(Constants.APPLICATION_ROOT_PATH).append("/").append(Constants.RESULTSET_PATH).append("/").append(wrapper.getLocator());
|
||||
logger.trace("redirect uri is {} ",redirectUri.toString());
|
||||
try{
|
||||
return Response.temporaryRedirect(new URI(redirectUri.toString())).build();
|
||||
}catch(Exception e){
|
||||
logger.error("invalid redirect uri created",e);
|
||||
return Response.serverError().build();
|
||||
}
|
||||
|
||||
}catch(IdNotValidException inve){
|
||||
logger.error("invalid id",inve);
|
||||
throw new InvalidIdentifierException(key);
|
||||
}catch (Exception e) {
|
||||
logger.error("error retrieve Children Tree By Id",e);
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("synonyms/{key}")
|
||||
public Response retrieveSynonymsById(@PathParam("key") String key) throws UnsupportedPluginException,UnsupportedCapabilityException, InvalidIdentifierException{
|
||||
try{
|
||||
PluginManager manager = initializer.getPluginManager();
|
||||
String pluginName = Util.getProviderFromKey(key);
|
||||
final String id = Util.getIdFromKey(key);
|
||||
if (!manager.plugins().containsKey(pluginName))
|
||||
throw new UnsupportedPluginException();
|
||||
final AbstractPlugin plugin = manager.plugins().get(pluginName);
|
||||
if (!plugin.getSupportedCapabilities().contains(Capabilities.Classification)) throw new UnsupportedCapabilityException();
|
||||
|
||||
final ResultWrapper<TaxonomyItem> wrapper = ResultWrapperMantainer.getWrapper(TaxonomyItem.class);
|
||||
|
||||
ExecutorsContainer.execSearch(new Runnable() {
|
||||
@Override
|
||||
public void run(){
|
||||
Writer<TaxonomyItem> writer = new Writer<TaxonomyItem>(wrapper, new TaxonomyItemWriterManager(plugin.getRepositoryName()));
|
||||
writer.register();
|
||||
try {
|
||||
plugin.getClassificationInterface().getSynonymnsById(writer, id);
|
||||
} catch (MethodNotSupportedException e) {
|
||||
logger.error("error retrieving synonyms "+e);
|
||||
} catch (Exception e) {
|
||||
logger.error("error retrieving synonyms "+e);
|
||||
}finally{
|
||||
writer.close();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// the output will be probably returned even before
|
||||
// a first chunk is written by the new thread
|
||||
StringBuilder redirectUri = new StringBuilder();
|
||||
redirectUri.append("http://").append(ctx.container().configuration().hostname()).append(":").append(ctx.container().configuration().port());
|
||||
redirectUri.append(ctx.application().getContextPath()).append(Constants.APPLICATION_ROOT_PATH).append("/").append(Constants.RESULTSET_PATH).append("/").append(wrapper.getLocator());
|
||||
logger.trace("redirect uri is {} ",redirectUri.toString());
|
||||
try{
|
||||
return Response.temporaryRedirect(new URI(redirectUri.toString())).build();
|
||||
}catch(Exception e){
|
||||
logger.error("invalid redirect uri created",e);
|
||||
return Response.serverError().build();
|
||||
}
|
||||
}catch (IdNotValidException e) {
|
||||
logger.error("error retrieving children tree by id",e);
|
||||
throw new InvalidIdentifierException(key);
|
||||
}catch (Exception e1) {
|
||||
logger.error("error retrieving children tree by id",e1);
|
||||
throw new RuntimeException(e1);
|
||||
}
|
||||
}
|
||||
|
||||
/*TODO: move to the new system
|
||||
@GET
|
||||
@PathParam("taxon/list/{idsLocator}")
|
||||
public String getTaxaByIds(@PathParam("idsLocator") String idsLocator) {
|
||||
try{
|
||||
logger.trace("calling get taxon by id with locator "+idsLocator);
|
||||
Stream<String> reader = convert(URI.create(idsLocator)).ofStrings().withDefaults();
|
||||
ResultWrapper<TaxonomyItem> wrapper = new ResultWrapper<TaxonomyItem>();
|
||||
logger.trace("starting the thread");
|
||||
ExecutorsContainer.execSearch(AuthorizedTasks.bind(new RunnableTaxonomySearch(reader, wrapper)));
|
||||
return wrapper.getLocator();
|
||||
}catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
protected static void retrieveTaxaTree(final ObjectWriter<TaxonomyItem> writer, final TaxonomyItem taxon, final AbstractPlugin plugin) {
|
||||
try {
|
||||
new JobRetryCall<VOID, IdNotValidException>() {
|
||||
|
||||
@Override
|
||||
protected VOID execute()
|
||||
throws ExternalRepositoryException, IdNotValidException {
|
||||
writer.write(taxon);
|
||||
List<TaxonomyItem> items = plugin.getClassificationInterface().retrieveTaxonChildrenByTaxonId(taxon.getId());
|
||||
for(TaxonomyItem item : items){
|
||||
item.setParent(taxon);
|
||||
retrieveTaxaTree(writer, item, plugin);
|
||||
}
|
||||
return VOID.instance();
|
||||
}
|
||||
|
||||
}.call();
|
||||
|
||||
} catch (IdNotValidException e) {
|
||||
writer.write(new StreamNonBlockingException(plugin.getRepositoryName(), taxon.getId()));
|
||||
} catch (MaxRetriesReachedException e) {
|
||||
logger.error("blocking error retrieving taxa tree",e);
|
||||
writer.write(new StreamBlockingException(plugin.getRepositoryName()));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
public class RunnableTaxonomySearch implements Runnable{
|
||||
|
||||
Stream<String> reader;
|
||||
ResultWrapper<TaxonomyItem> wrapper;
|
||||
|
||||
public RunnableTaxonomySearch(Stream<String> reader,
|
||||
ResultWrapper<TaxonomyItem> wrapper) {
|
||||
super();
|
||||
this.reader = reader;
|
||||
this.wrapper = wrapper;
|
||||
}
|
||||
|
||||
public void run(){
|
||||
Map<String, Writer<String>> pluginMap= new HashMap<String, Writer<String>>();
|
||||
while (reader.hasNext()){
|
||||
String key = reader.next();
|
||||
try{
|
||||
final String provider = Util.getProviderFromKey(key);
|
||||
String id = Util.getIdFromKey(key);
|
||||
if (!pluginMap.containsKey(provider)){
|
||||
final LocalWrapper<String> localWrapper = new LocalWrapper<String>();
|
||||
pluginMap.put(provider, new Writer<String>(localWrapper));
|
||||
ExecutorsContainer.execSearch(new Runnable(){
|
||||
public void run(){
|
||||
final AbstractPlugin plugin = initializer.getPluginManager().plugins().get(provider);
|
||||
final Writer<TaxonomyItem> writer =new Writer<TaxonomyItem>(wrapper, new TaxonomyItemWriterManager(plugin.getRepositoryName()));
|
||||
writer.register();
|
||||
|
||||
try {
|
||||
new QueryRetryCall() {
|
||||
|
||||
@Override
|
||||
protected VOID execute()
|
||||
throws ExternalRepositoryException {
|
||||
plugin.getClassificationInterface().retrieveTaxonByIds(new LocalReader<String>(localWrapper), writer);
|
||||
return VOID.instance();
|
||||
}
|
||||
|
||||
}.call();
|
||||
} catch (MaxRetriesReachedException e) {
|
||||
writer.write(new StreamBlockingException(plugin.getRepositoryName()));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
pluginMap.get(provider).write(id);
|
||||
}catch (IdNotValidException e) {
|
||||
logger.warn("the key "+key+" is not valid");
|
||||
}
|
||||
}
|
||||
for (Writer<String> writer : pluginMap.values())
|
||||
writer.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,331 @@
|
||||
package org.gcube.data.spd.resources;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.ObjectInput;
|
||||
import java.io.ObjectInputStream;
|
||||
import java.io.ObjectOutputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import javax.ws.rs.Consumes;
|
||||
import javax.ws.rs.DELETE;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.POST;
|
||||
import javax.ws.rs.PUT;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
import org.gcube.common.resources.gcore.HostingNode;
|
||||
import org.gcube.data.spd.executor.jobs.SpeciesJob;
|
||||
import org.gcube.data.spd.executor.jobs.URLJob;
|
||||
import org.gcube.data.spd.executor.jobs.csv.CSVCreator;
|
||||
import org.gcube.data.spd.executor.jobs.csv.CSVCreatorForOMJob;
|
||||
import org.gcube.data.spd.executor.jobs.darwincore.DarwinCoreJob;
|
||||
import org.gcube.data.spd.executor.jobs.dwca.DWCAJobByChildren;
|
||||
import org.gcube.data.spd.executor.jobs.dwca.DWCAJobByIds;
|
||||
import org.gcube.data.spd.executor.jobs.layer.LayerCreatorJob;
|
||||
import org.gcube.data.spd.manager.AppInitializer;
|
||||
import org.gcube.data.spd.model.exceptions.IdNotValidException;
|
||||
import org.gcube.data.spd.model.products.TaxonomyItem;
|
||||
import org.gcube.data.spd.model.service.exceptions.InvalidIdentifierException;
|
||||
import org.gcube.data.spd.model.service.exceptions.InvalidJobException;
|
||||
import org.gcube.data.spd.model.service.types.CompleteJobStatus;
|
||||
import org.gcube.data.spd.model.service.types.JobStatus;
|
||||
import org.gcube.data.spd.model.service.types.NodeStatus;
|
||||
import org.gcube.data.spd.model.service.types.SubmitJob;
|
||||
import org.gcube.data.spd.model.util.SerializableList;
|
||||
import org.gcube.data.spd.plugin.PluginManager;
|
||||
import org.gcube.data.spd.utils.DynamicList;
|
||||
import org.gcube.data.spd.utils.DynamicMap;
|
||||
import org.gcube.data.spd.utils.ExecutorsContainer;
|
||||
import org.gcube.smartgears.ApplicationManagerProvider;
|
||||
import org.gcube.smartgears.ContextProvider;
|
||||
import org.gcube.smartgears.context.application.ApplicationContext;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Path("job")
|
||||
public class Executor {
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(Executor.class);
|
||||
|
||||
public static HashMap<String, SpeciesJob> jobMap= new HashMap<String, SpeciesJob>();
|
||||
|
||||
private static final String jobMapFileName = "jobs.ser";
|
||||
|
||||
AppInitializer initializer = (AppInitializer)ApplicationManagerProvider.get(AppInitializer.class);
|
||||
|
||||
ApplicationContext cxt = ContextProvider.get();
|
||||
|
||||
@GET
|
||||
@Path("result/{jobKey}")
|
||||
public String getResultLink(@PathParam("jobKey") String jobKey) throws InvalidIdentifierException {
|
||||
|
||||
String node;
|
||||
String jobId;
|
||||
|
||||
try{
|
||||
node = extractNode(jobKey);
|
||||
jobId = extractId(jobKey);
|
||||
}catch (IdNotValidException e) {
|
||||
logger.error("id not valid "+jobKey,e);
|
||||
throw new InvalidIdentifierException(jobKey);
|
||||
}
|
||||
|
||||
if (node.equals(cxt.container().profile(HostingNode.class).id())){
|
||||
if (!jobMap.containsKey(jobId)) throw new InvalidIdentifierException(jobId);
|
||||
return ((URLJob)jobMap.get(jobId)).getResultURL();
|
||||
}else {
|
||||
//TODO
|
||||
return null; // remoteJobCall(node).getResultLink(jobKey);
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("error/{jobKey}")
|
||||
public String getErrorLink(@PathParam("jobKey") String jobKey) throws InvalidIdentifierException {
|
||||
|
||||
String node;
|
||||
String jobId;
|
||||
|
||||
try{
|
||||
node = extractNode(jobKey);
|
||||
jobId = extractId(jobKey);
|
||||
}catch (IdNotValidException e) {
|
||||
logger.error("id not valid "+jobKey,e);
|
||||
throw new InvalidIdentifierException();
|
||||
}
|
||||
|
||||
if (node.equals(cxt.container().profile(HostingNode.class).id())){
|
||||
if (!jobMap.containsKey(jobId)) throw new InvalidIdentifierException();
|
||||
return ((URLJob)jobMap.get(jobId)).getErrorURL();
|
||||
}else{
|
||||
//TODO
|
||||
return null; // remoteJobCall(node).getErrorLink(jobKey);
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("status/{jobKey}")
|
||||
public CompleteJobStatus getStatus(@PathParam("jobKey") String jobKey) throws InvalidIdentifierException {
|
||||
|
||||
String node;
|
||||
String jobId;
|
||||
|
||||
try{
|
||||
node = extractNode(jobKey);
|
||||
jobId = extractId(jobKey);
|
||||
}catch (IdNotValidException e) {
|
||||
logger.error("id not valid "+jobKey,e);
|
||||
throw new InvalidIdentifierException(jobKey);
|
||||
}
|
||||
|
||||
if (node.equals(cxt.container().profile(HostingNode.class).id())){
|
||||
|
||||
if (!jobMap.containsKey(jobId)){
|
||||
logger.trace("id not found, throwing IDNotValidExceoption");
|
||||
throw new InvalidIdentifierException(jobId);
|
||||
}
|
||||
|
||||
SpeciesJob job = jobMap.get(jobId);
|
||||
|
||||
CompleteJobStatus status = new CompleteJobStatus();
|
||||
|
||||
if (job instanceof DWCAJobByChildren){
|
||||
DWCAJobByChildren dwcaJob = (DWCAJobByChildren) job;
|
||||
|
||||
List<NodeStatus> childrenStatus = new ArrayList<NodeStatus>();
|
||||
for (Entry<TaxonomyItem, JobStatus> entry : dwcaJob.getMapSubJobs().entrySet()){
|
||||
NodeStatus childStatus = new NodeStatus(entry.getKey().getScientificName(), entry.getValue());
|
||||
childrenStatus.add(childStatus);
|
||||
}
|
||||
status.setSubNodes(childrenStatus);
|
||||
}
|
||||
|
||||
status.setStatus(job.getStatus());
|
||||
status.setStartDate(job.getStartDate());
|
||||
status.setEndDate(job.getEndDate());
|
||||
status.setCompletedEntries(job.getCompletedEntries());
|
||||
|
||||
return status;
|
||||
}else{
|
||||
//TODO
|
||||
return null ; //remoteJobCall(node).getStatus(jobKey);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
public static void storeJobMap(ApplicationContext context){
|
||||
logger.trace("calling store job Map");
|
||||
ObjectOutputStream oos = null;
|
||||
File file = null;
|
||||
try {
|
||||
file = context.persistence().file(jobMapFileName);
|
||||
//if (file.exists()) file.delete();
|
||||
//file.createNewFile();
|
||||
oos = new ObjectOutputStream(new FileOutputStream(file));
|
||||
oos.writeObject(jobMap);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("error writing jobMapof type "+jobMap.getClass().getName()+" on disk",e);
|
||||
if (file !=null && file.exists()) file.delete();
|
||||
}finally{
|
||||
if (oos!=null)
|
||||
try {
|
||||
oos.close();
|
||||
} catch (IOException e) {
|
||||
logger.warn("error closing stream",e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static void loadJobMap(ApplicationContext context){
|
||||
logger.trace("calling load job Map");
|
||||
ObjectInput ois;
|
||||
try {
|
||||
ois = new ObjectInputStream(new FileInputStream(context.persistence().file(jobMapFileName)));
|
||||
jobMap = (HashMap<String, SpeciesJob>) ois.readObject();
|
||||
for (Entry<String, SpeciesJob> entry : jobMap.entrySet())
|
||||
if (entry.getValue().getStatus().equals(JobStatus.RUNNING))
|
||||
entry.getValue().setStatus(JobStatus.FAILED);
|
||||
ois.close();
|
||||
} catch (Exception e) {
|
||||
logger.trace("the file doesn't exist, creating an empty map");
|
||||
jobMap = new HashMap<String, SpeciesJob>();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@PUT
|
||||
@Path("input/{jobKey}")
|
||||
@Consumes(MediaType.APPLICATION_XML)
|
||||
public boolean submitJob(@PathParam("jobKey") String jobKey, SerializableList<String> input) throws InvalidIdentifierException {
|
||||
//String node;
|
||||
String jobId;
|
||||
try{
|
||||
//node = extractNode(jobKey);
|
||||
jobId = extractId(jobKey);
|
||||
}catch (IdNotValidException e) {
|
||||
logger.error("id not valid "+jobKey,e);
|
||||
throw new InvalidIdentifierException(jobKey);
|
||||
}
|
||||
logger.trace("job Id extracted is {} ",jobId);
|
||||
if (input.getValuesList().isEmpty()){
|
||||
logger.info("closing input stream");
|
||||
DynamicMap.remove(jobId);
|
||||
}
|
||||
else {
|
||||
DynamicList list = DynamicMap.get(jobId);
|
||||
for (String id : input.getValuesList()){
|
||||
logger.trace("elaborating input id ",id);
|
||||
if (!list.add(id)) return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
@DELETE
|
||||
@Path("{jobKey}")
|
||||
public void removeJob(@PathParam("jobKey") String jobId) throws InvalidIdentifierException {
|
||||
if (!jobMap.containsKey(jobId)) throw new InvalidIdentifierException(jobId);
|
||||
jobMap.remove(jobId);
|
||||
}
|
||||
|
||||
|
||||
@POST
|
||||
@Path("execute")
|
||||
@Consumes(MediaType.APPLICATION_XML)
|
||||
public String submitJob(SubmitJob request) throws InvalidJobException {
|
||||
PluginManager pluginManger = initializer.getPluginManager();
|
||||
SpeciesJob job = null;
|
||||
switch (request.getJob()) {
|
||||
case DWCAByChildren:
|
||||
job = new DWCAJobByChildren(request.getInput(), pluginManger.plugins());
|
||||
break;
|
||||
case DWCAById:
|
||||
job = new DWCAJobByIds(pluginManger.plugins());
|
||||
DynamicMap.put(job.getId());
|
||||
break;
|
||||
case CSV:
|
||||
job = new CSVCreator(pluginManger.plugins());
|
||||
DynamicMap.put(job.getId());
|
||||
break;
|
||||
case CSVForOM:
|
||||
job = new CSVCreatorForOMJob(pluginManger.plugins());
|
||||
DynamicMap.put(job.getId());
|
||||
break;
|
||||
case DarwinCore:
|
||||
job = new DarwinCoreJob(pluginManger.plugins());
|
||||
DynamicMap.put(job.getId());
|
||||
break;
|
||||
case LayerCreator:
|
||||
job = new LayerCreatorJob(request.getInput(),pluginManger.plugins());
|
||||
DynamicMap.put(job.getId());
|
||||
break;
|
||||
default:
|
||||
throw new InvalidJobException();
|
||||
}
|
||||
|
||||
if (job ==null || !job.validateInput(request.getInput()))
|
||||
throw new InvalidJobException();
|
||||
return executeJob(job);
|
||||
}
|
||||
|
||||
|
||||
private String executeJob(SpeciesJob job){
|
||||
jobMap.put(job.getId(), job);
|
||||
ExecutorsContainer.execJob(job);
|
||||
return createKey(job.getId());
|
||||
}
|
||||
|
||||
private static String extractNode(String key) throws IdNotValidException{
|
||||
String[] splitted = key.split("\\|\\|");
|
||||
if (splitted.length==2)
|
||||
return splitted[0];
|
||||
else throw new IdNotValidException();
|
||||
}
|
||||
|
||||
private static String extractId(String key) throws IdNotValidException{
|
||||
String[] splitted = key.split("\\|\\|");
|
||||
if (splitted.length==2)
|
||||
return splitted[1];
|
||||
else throw new IdNotValidException();
|
||||
}
|
||||
|
||||
private String createKey(String id){
|
||||
String node = cxt.container().profile(HostingNode.class).id();
|
||||
return node+"||"+id;
|
||||
}
|
||||
/*
|
||||
private Executor remoteJobCall(String riId) throws InvalidIdentifierException{
|
||||
SimpleQuery query = queryFor(GCoreEndpoint.class);
|
||||
query.addCondition("$resource/ID/text() eq '"+riId+"'");
|
||||
|
||||
DiscoveryClient<GCoreEndpoint> client = clientFor(GCoreEndpoint.class);
|
||||
List<GCoreEndpoint> addresses = client.submit(query);
|
||||
if (addresses.size()>0){
|
||||
GCoreEndpoint endpoint = addresses.get(0);
|
||||
URI address = endpoint.profile().endpointMap().get("gcube/data/speciesproductsdiscovery/executor").uri();
|
||||
try {
|
||||
Executor executorPT = executor().at(address).build();
|
||||
return executorPT;
|
||||
} catch (Exception e) {
|
||||
logger.trace("remote service error");
|
||||
throw new InvalidIdentifierException();
|
||||
}
|
||||
|
||||
}else {
|
||||
logger.trace("remote job not found");
|
||||
throw new InvalidIdentifierException();
|
||||
}
|
||||
}*/
|
||||
}
|
@ -0,0 +1,232 @@
|
||||
package org.gcube.data.spd.resources;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Calendar;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.Produces;
|
||||
import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import org.gcube.data.spd.caching.QueryCacheFactory;
|
||||
import org.gcube.data.spd.manager.AppInitializer;
|
||||
import org.gcube.data.spd.manager.OccurrenceWriterManager;
|
||||
import org.gcube.data.spd.manager.ResultItemWriterManager;
|
||||
import org.gcube.data.spd.manager.TaxonomyItemWriterManager;
|
||||
import org.gcube.data.spd.manager.search.Search;
|
||||
import org.gcube.data.spd.model.Condition;
|
||||
import org.gcube.data.spd.model.Condition.Operator;
|
||||
import org.gcube.data.spd.model.Conditions;
|
||||
import org.gcube.data.spd.model.Constants;
|
||||
import org.gcube.data.spd.model.Coordinate;
|
||||
import org.gcube.data.spd.model.PluginDescription;
|
||||
import org.gcube.data.spd.model.products.OccurrencePoint;
|
||||
import org.gcube.data.spd.model.products.ResultItem;
|
||||
import org.gcube.data.spd.model.products.TaxonomyItem;
|
||||
import org.gcube.data.spd.model.service.exceptions.QueryNotValidException;
|
||||
import org.gcube.data.spd.model.service.exceptions.UnsupportedCapabilityException;
|
||||
import org.gcube.data.spd.model.service.exceptions.UnsupportedPluginException;
|
||||
import org.gcube.data.spd.model.service.types.PluginDescriptions;
|
||||
import org.gcube.data.spd.model.util.Capabilities;
|
||||
import org.gcube.data.spd.plugin.PluginManager;
|
||||
import org.gcube.data.spd.plugin.PluginUtils;
|
||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||
import org.gcube.data.spd.plugin.fwk.Searchable;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.rswrapper.ResultWrapper;
|
||||
import org.gcube.data.spd.utils.ResultWrapperMantainer;
|
||||
import org.gcube.data.spd.utils.Utils;
|
||||
import org.gcube.dataaccess.spql.ParserException;
|
||||
import org.gcube.dataaccess.spql.SPQLQueryParser;
|
||||
import org.gcube.dataaccess.spql.model.Query;
|
||||
import org.gcube.dataaccess.spql.model.ret.ReturnType;
|
||||
import org.gcube.dataaccess.spql.model.where.ParserCoordinate;
|
||||
import org.gcube.dataaccess.spql.model.where.ParserDate;
|
||||
import org.gcube.smartgears.ApplicationManagerProvider;
|
||||
import org.gcube.smartgears.ContextProvider;
|
||||
import org.gcube.smartgears.annotations.ManagedBy;
|
||||
import org.gcube.smartgears.context.application.ApplicationContext;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@ManagedBy(AppInitializer.class)
|
||||
@Path(Constants.MANAGER_PATH)
|
||||
public class Manager {
|
||||
|
||||
Logger logger = LoggerFactory.getLogger(Manager.class);
|
||||
|
||||
AppInitializer initializer = (AppInitializer)ApplicationManagerProvider.get();
|
||||
|
||||
private ApplicationContext ctx = ContextProvider.get();
|
||||
|
||||
/**
|
||||
*
|
||||
* @param query a SpQL query
|
||||
* @return
|
||||
* @throws GCUBEFault
|
||||
*/
|
||||
@GET
|
||||
@Path("search")
|
||||
public Response search(@QueryParam("query") String query) throws QueryNotValidException, UnsupportedPluginException, UnsupportedCapabilityException {
|
||||
|
||||
Query result;
|
||||
logger.trace("submitted query is "+query);
|
||||
try{
|
||||
result = SPQLQueryParser.parse(query);
|
||||
}catch (ParserException e) {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
builder.append("syntax error on query ("+query+") : ");
|
||||
for (String error : e.getErrors())
|
||||
builder.append(error).append(" ; ");
|
||||
logger.error(builder.toString());
|
||||
throw new QueryNotValidException(builder.toString());
|
||||
}
|
||||
|
||||
String locator;
|
||||
|
||||
try{
|
||||
|
||||
boolean selectedAllSupportedPlugin = result.getDatasources().size()==0;
|
||||
|
||||
Collection<AbstractPlugin> plugins=!selectedAllSupportedPlugin?PluginUtils.getPluginsSubList(result.getDatasources(), initializer.getPluginManager().plugins()):
|
||||
initializer.getPluginManager().plugins().values();
|
||||
|
||||
Condition[] conditions = new Condition[0];
|
||||
|
||||
if (result.getWhereExpression() != null)
|
||||
conditions= evaluateConditions(result.getWhereExpression().getConditions());
|
||||
|
||||
ReturnType returnType = result.getReturnType();
|
||||
if (returnType == null) returnType = ReturnType.PRODUCT;
|
||||
|
||||
|
||||
logger.trace("RETUN TYPE IS {} ",returnType);
|
||||
|
||||
switch (returnType) {
|
||||
|
||||
case OCCURRENCE:{
|
||||
|
||||
Set<AbstractPlugin> pluginsPerCapability = initializer.getPluginManager().getPluginsPerCapability(Capabilities.Occurrence, plugins);
|
||||
logger.trace("searching in plugins {} ",pluginsPerCapability);
|
||||
if (pluginsPerCapability.size()==0) throw new UnsupportedCapabilityException();
|
||||
|
||||
Map<String, Searchable<OccurrencePoint>> searchableMapping = new HashMap<String, Searchable<OccurrencePoint>>();
|
||||
for (AbstractPlugin plugin: pluginsPerCapability)
|
||||
searchableMapping.put(plugin.getRepositoryName(), plugin.getOccurrencesInterface());
|
||||
|
||||
ResultWrapper<OccurrencePoint> wrapper = ResultWrapperMantainer.getWrapper(OccurrencePoint.class);
|
||||
locator = wrapper.getLocator();
|
||||
|
||||
Search<OccurrencePoint> search =new Search<OccurrencePoint>(wrapper, initializer.getPluginManager().plugins(), OccurrenceWriterManager.class, new QueryCacheFactory<OccurrencePoint>(ctx.configuration().persistence().location()));
|
||||
search.search(searchableMapping, result, conditions);
|
||||
break;
|
||||
}
|
||||
case PRODUCT:{
|
||||
logger.trace("searching in plugins {} ",plugins);
|
||||
Map<String, Searchable<ResultItem>> searchableMapping = new HashMap<String, Searchable<ResultItem>>();
|
||||
for (AbstractPlugin plugin: plugins)
|
||||
searchableMapping.put(plugin.getRepositoryName(), plugin);
|
||||
|
||||
ResultWrapper<ResultItem> wrapper = ResultWrapperMantainer.getWrapper(ResultItem.class);
|
||||
locator = wrapper.getLocator();
|
||||
Search<ResultItem> search = new Search<ResultItem>(wrapper, initializer.getPluginManager().plugins(), ResultItemWriterManager.class, new QueryCacheFactory<ResultItem>(ctx.configuration().persistence().location()));
|
||||
search.search(searchableMapping, result, conditions);
|
||||
break;
|
||||
}
|
||||
case TAXON:{
|
||||
Set<AbstractPlugin> pluginsPerCapability = initializer.getPluginManager().getPluginsPerCapability(Capabilities.Classification, plugins);
|
||||
logger.trace("searching in plugins {} ",pluginsPerCapability);
|
||||
if (pluginsPerCapability.size()==0) throw new UnsupportedCapabilityException();
|
||||
|
||||
Map<String, Searchable<TaxonomyItem>> searchableMapping = new HashMap<String, Searchable<TaxonomyItem>>();
|
||||
for (AbstractPlugin plugin: pluginsPerCapability)
|
||||
searchableMapping.put(plugin.getRepositoryName(), plugin.getClassificationInterface());
|
||||
|
||||
ResultWrapper<TaxonomyItem> wrapper = ResultWrapperMantainer.getWrapper(TaxonomyItem.class);
|
||||
locator = wrapper.getLocator();
|
||||
|
||||
Search<TaxonomyItem> search = new Search<TaxonomyItem>(wrapper, initializer.getPluginManager().plugins(), TaxonomyItemWriterManager.class, new QueryCacheFactory<TaxonomyItem>(ctx.configuration().persistence().location()));
|
||||
search.search(searchableMapping, result, conditions);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Exception("unexpected behaviour");
|
||||
}
|
||||
}catch (UnsupportedCapabilityException e) {
|
||||
logger.error("unsupported capability error",e);
|
||||
throw e;
|
||||
}catch (UnsupportedPluginException e) {
|
||||
logger.error("unsupported plugin error",e);
|
||||
throw e;
|
||||
}catch (Exception e) {
|
||||
logger.error("error submitting search",e);
|
||||
throw new RuntimeException("error submitting search", e);
|
||||
}
|
||||
|
||||
// the output will be probably returned even before
|
||||
// a first chunk is written by the new thread
|
||||
StringBuilder redirectUri = new StringBuilder();
|
||||
redirectUri.append("http://").append(ctx.container().configuration().hostname()).append(":").append(ctx.container().configuration().port());
|
||||
redirectUri.append(ctx.application().getContextPath()).append(Constants.APPLICATION_ROOT_PATH).append("/").append(Constants.RESULTSET_PATH).append("/").append(locator);
|
||||
logger.trace("redirect uri is {} ",redirectUri.toString());
|
||||
try{
|
||||
return Response.temporaryRedirect(new URI(redirectUri.toString())).build();
|
||||
}catch(Exception e){
|
||||
logger.error("invalid redirect uri created",e);
|
||||
return Response.serverError().build();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private Condition[] evaluateConditions(List<org.gcube.dataaccess.spql.model.where.Condition> conditions){
|
||||
List<Condition> props= new ArrayList<Condition>();
|
||||
for (org.gcube.dataaccess.spql.model.where.Condition condition :conditions){
|
||||
switch (condition.getParameter()) {
|
||||
case EVENT_DATE:
|
||||
ParserDate parserDate = (ParserDate)condition.getValue();
|
||||
Calendar value = parserDate.getValue();
|
||||
props.add(new Condition(Conditions.DATE, value, Operator.valueOf(condition.getOperator().name())));
|
||||
break;
|
||||
case COORDINATE:
|
||||
ParserCoordinate parserCoordinate = (ParserCoordinate)condition.getValue();
|
||||
Coordinate coordinate = new Coordinate(parserCoordinate.getValue().getLatitude(), parserCoordinate.getValue().getLongitude());
|
||||
props.add(new Condition(Conditions.COORDINATE, coordinate, Operator.valueOf(condition.getOperator().name())));
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
return props.toArray(new Condition[props.size()]);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("providers")
|
||||
@Produces(MediaType.APPLICATION_XML)
|
||||
public PluginDescriptions getSupportedPlugins(){
|
||||
logger.trace("calling providers method");
|
||||
PluginManager pluginManager = initializer.getPluginManager();
|
||||
List<PluginDescription> descriptions = new ArrayList<PluginDescription>();
|
||||
try{
|
||||
for (AbstractPlugin plugin : pluginManager.plugins().values())
|
||||
descriptions.add(Utils.getPluginDescription(plugin));
|
||||
|
||||
logger.trace("returning "+descriptions.size()+" descriptions");
|
||||
}catch(Exception e){
|
||||
logger.error("error producing descriptions", e);
|
||||
}
|
||||
return new PluginDescriptions(descriptions);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
@ -0,0 +1,258 @@
|
||||
package org.gcube.data.spd.resources;
|
||||
|
||||
import java.net.URI;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.core.Response;
|
||||
|
||||
import org.gcube.common.authorization.library.AuthorizedTasks;
|
||||
import org.gcube.data.spd.exception.MaxRetriesReachedException;
|
||||
import org.gcube.data.spd.manager.AppInitializer;
|
||||
import org.gcube.data.spd.manager.OccurrenceWriterManager;
|
||||
import org.gcube.data.spd.model.Constants;
|
||||
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
|
||||
import org.gcube.data.spd.model.exceptions.IdNotValidException;
|
||||
import org.gcube.data.spd.model.exceptions.StreamBlockingException;
|
||||
import org.gcube.data.spd.model.products.OccurrencePoint;
|
||||
import org.gcube.data.spd.plugin.PluginManager;
|
||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||
import org.gcube.data.spd.plugin.fwk.capabilities.OccurrencesCapability;
|
||||
import org.gcube.data.spd.plugin.fwk.readers.LocalReader;
|
||||
import org.gcube.data.spd.plugin.fwk.util.Util;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.Writer;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.rswrapper.AbstractWrapper;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.rswrapper.LocalWrapper;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.rswrapper.ResultWrapper;
|
||||
import org.gcube.data.spd.utils.ExecutorsContainer;
|
||||
import org.gcube.data.spd.utils.QueryRetryCall;
|
||||
import org.gcube.data.spd.utils.ResultWrapperMantainer;
|
||||
import org.gcube.data.spd.utils.VOID;
|
||||
import org.gcube.data.streams.Stream;
|
||||
import org.gcube.data.streams.dsl.Streams;
|
||||
import org.gcube.smartgears.ApplicationManagerProvider;
|
||||
import org.gcube.smartgears.ContextProvider;
|
||||
import org.gcube.smartgears.context.application.ApplicationContext;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Path("occurrence")
|
||||
public class Occurrences{
|
||||
|
||||
Logger logger = LoggerFactory.getLogger(Occurrences.class);
|
||||
|
||||
ApplicationContext ctx = ContextProvider.get();
|
||||
|
||||
AppInitializer initializer = (AppInitializer) ApplicationManagerProvider.get(AppInitializer.class);
|
||||
|
||||
public enum ExecType {
|
||||
IDS,
|
||||
KEYS
|
||||
}
|
||||
|
||||
|
||||
@GET
|
||||
@Path("keys")
|
||||
public Response getByKeys(@QueryParam("keys") List<String> keys) {
|
||||
try{
|
||||
|
||||
logger.trace("keys arrived are {} ",keys);
|
||||
|
||||
Stream<String> reader = Streams.convert(keys.iterator());
|
||||
|
||||
ResultWrapper<OccurrencePoint> wrapper = ResultWrapperMantainer.getWrapper(OccurrencePoint.class);
|
||||
|
||||
logger.trace("entering in the getOccurrence by productKeys with keys {}",keys);
|
||||
ExecutorsContainer.execJob(AuthorizedTasks.bind(new RunnableOccurrenceSearch(reader, wrapper, ExecType.KEYS)));
|
||||
|
||||
// the output will be probably returned even before
|
||||
// a first chunk is written by the new thread
|
||||
StringBuilder redirectUri = new StringBuilder();
|
||||
redirectUri.append("http://").append(ctx.container().configuration().hostname()).append(":").append(ctx.container().configuration().port());
|
||||
redirectUri.append(ctx.application().getContextPath()).append(Constants.APPLICATION_ROOT_PATH).append("/").append(Constants.RESULTSET_PATH).append("/").append(wrapper.getLocator());
|
||||
logger.trace("redirect uri is {} ",redirectUri.toString());
|
||||
try{
|
||||
return Response.temporaryRedirect(new URI(redirectUri.toString())).build();
|
||||
}catch(Exception e){
|
||||
logger.error("invalid redirect uri created",e);
|
||||
return Response.serverError().build();
|
||||
}
|
||||
}catch (Exception e) {
|
||||
logger.error("error getting occurrences by ids",e);
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("ids")
|
||||
public Response getByIds(@QueryParam("ids") List<String> ids){
|
||||
try{
|
||||
Stream<String> reader = Streams.convert(ids.iterator());
|
||||
|
||||
ResultWrapper<OccurrencePoint> wrapper = ResultWrapperMantainer.getWrapper(OccurrencePoint.class);
|
||||
ExecutorsContainer.execJob(AuthorizedTasks.bind(new RunnableOccurrenceSearch(reader, wrapper, ExecType.IDS)));
|
||||
// the output will be probably returned even before
|
||||
// a first chunk is written by the new thread
|
||||
StringBuilder redirectUri = new StringBuilder();
|
||||
redirectUri.append("http://").append(ctx.container().configuration().hostname()).append(":").append(ctx.container().configuration().port());
|
||||
redirectUri.append(ctx.application().getContextPath()).append(Constants.APPLICATION_ROOT_PATH).append("/").append(Constants.RESULTSET_PATH).append("/").append(wrapper.getLocator());
|
||||
logger.trace("redirect uri is {} ",redirectUri.toString());
|
||||
try{
|
||||
return Response.temporaryRedirect(new URI(redirectUri.toString())).build();
|
||||
}catch(Exception e){
|
||||
logger.error("invalid redirect uri created",e);
|
||||
return Response.serverError().build();
|
||||
}
|
||||
}catch (Exception e) {
|
||||
logger.error("error getting occurrences by ids");
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public class RunnableOccurrenceSearch implements Runnable{
|
||||
|
||||
private Stream<String> reader;
|
||||
private ResultWrapper<OccurrencePoint> wrapper;
|
||||
private ExecType execType;
|
||||
|
||||
public RunnableOccurrenceSearch(Stream<String> reader,
|
||||
ResultWrapper<OccurrencePoint> wrapper, ExecType execType) {
|
||||
super();
|
||||
this.reader = reader;
|
||||
this.wrapper = wrapper;
|
||||
this.execType = execType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run(){
|
||||
Map<String, Writer<String>> pluginMap= new HashMap<String, Writer<String>>();
|
||||
while (reader.hasNext()){
|
||||
String key = reader.next();
|
||||
try{
|
||||
final String provider = Util.getProviderFromKey(key);
|
||||
String id = Util.getIdFromKey(key);
|
||||
logger.trace("key arrived "+id+" for provider "+provider);
|
||||
if (!pluginMap.containsKey(provider)){
|
||||
final LocalWrapper<String> localWrapper = new LocalWrapper<String>();
|
||||
Writer<String> localWriter = new Writer<String>(localWrapper);
|
||||
//localWriter.register();
|
||||
pluginMap.put(provider, localWriter);
|
||||
if (execType == ExecType.KEYS)
|
||||
ExecutorsContainer.execSearch(AuthorizedTasks.bind(new RunnableOccurrenceByKeys(provider, wrapper, localWrapper)));
|
||||
else ExecutorsContainer.execSearch(AuthorizedTasks.bind(new RunnableOccurrenceByIds(provider, wrapper, localWrapper)));
|
||||
}
|
||||
logger.trace("key put "+id+"? "+( pluginMap.get(provider).write(id)));
|
||||
}catch (IdNotValidException e) {
|
||||
logger.warn("the key "+key+" is not valid");
|
||||
}
|
||||
}
|
||||
logger.trace("is wrapper closed? "+wrapper.isClosed());
|
||||
if (pluginMap.values().isEmpty())
|
||||
wrapper.close();
|
||||
else
|
||||
for (Writer<String> entry : pluginMap.values())
|
||||
entry.close();
|
||||
reader.close();
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
public class RunnableOccurrenceByKeys implements Runnable{
|
||||
|
||||
private String provider;
|
||||
private AbstractWrapper<OccurrencePoint> wrapper;
|
||||
private LocalWrapper<String> localWrapper;
|
||||
|
||||
|
||||
|
||||
public RunnableOccurrenceByKeys(String provider,
|
||||
AbstractWrapper<OccurrencePoint> wrapper,
|
||||
LocalWrapper<String> localWrapper) {
|
||||
super();
|
||||
this.provider = provider;
|
||||
this.wrapper = wrapper;
|
||||
this.localWrapper = localWrapper;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public void run(){
|
||||
logger.trace("call to provider "+provider);
|
||||
final Writer<OccurrencePoint> writer = new Writer<OccurrencePoint>(wrapper, new OccurrenceWriterManager(provider));
|
||||
writer.register();
|
||||
try {
|
||||
new QueryRetryCall(){
|
||||
|
||||
@Override
|
||||
protected VOID execute() throws ExternalRepositoryException {
|
||||
PluginManager pm = initializer.getPluginManager();
|
||||
AbstractPlugin plugin = pm.plugins().get(provider);
|
||||
OccurrencesCapability oc = plugin.getOccurrencesInterface();
|
||||
oc.getOccurrencesByProductKeys(writer, new LocalReader<String>(localWrapper));
|
||||
return VOID.instance();
|
||||
}
|
||||
|
||||
}.call();
|
||||
} catch (MaxRetriesReachedException e) {
|
||||
writer.write(new StreamBlockingException(provider));
|
||||
}
|
||||
writer.close();
|
||||
logger.trace("writer is closed ? "+(!writer.isAlive()));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public class RunnableOccurrenceByIds implements Runnable{
|
||||
|
||||
private String provider;
|
||||
private AbstractWrapper<OccurrencePoint> wrapper;
|
||||
private LocalWrapper<String> localWrapper;
|
||||
|
||||
|
||||
|
||||
public RunnableOccurrenceByIds(String provider,
|
||||
AbstractWrapper<OccurrencePoint> wrapper,
|
||||
LocalWrapper<String> localWrapper) {
|
||||
super();
|
||||
this.provider = provider;
|
||||
this.wrapper = wrapper;
|
||||
this.localWrapper = localWrapper;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public void run(){
|
||||
logger.trace("call to provider "+provider);
|
||||
final Writer<OccurrencePoint> writer = new Writer<OccurrencePoint>(wrapper, new OccurrenceWriterManager(provider));
|
||||
writer.register();
|
||||
try {
|
||||
new QueryRetryCall(){
|
||||
|
||||
@Override
|
||||
protected VOID execute() throws ExternalRepositoryException {
|
||||
PluginManager pm = initializer.getPluginManager();
|
||||
AbstractPlugin plugin = pm.plugins().get(provider);
|
||||
OccurrencesCapability oc = plugin.getOccurrencesInterface();
|
||||
oc.getOccurrencesByIds(writer, new LocalReader<String>(localWrapper)); return VOID.instance();
|
||||
}
|
||||
|
||||
}.call();
|
||||
} catch (MaxRetriesReachedException e) {
|
||||
writer.write(new StreamBlockingException(provider));
|
||||
}
|
||||
|
||||
writer.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,38 @@
|
||||
package org.gcube.data.spd.resources;
|
||||
|
||||
import javax.ws.rs.DELETE;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.Produces;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
import org.gcube.data.spd.model.Constants;
|
||||
import org.gcube.data.spd.utils.ResultWrapperMantainer;
|
||||
import org.glassfish.jersey.server.ChunkedOutput;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
||||
@Path(value = Constants.RESULTSET_PATH)
|
||||
public class ResultSetEndpoint {
|
||||
|
||||
Logger logger = LoggerFactory.getLogger(ResultSetEndpoint.class);
|
||||
|
||||
@GET
|
||||
@Produces(MediaType.TEXT_XML)
|
||||
@Path("{locator}")
|
||||
public ChunkedOutput<String> get(@PathParam("locator") String locator){
|
||||
logger.info("requesting locator {} ",locator);
|
||||
return ResultWrapperMantainer.getWriterById(locator).getOutput();
|
||||
}
|
||||
|
||||
@DELETE
|
||||
@Produces(MediaType.TEXT_XML)
|
||||
@Path("{locator}")
|
||||
public void close(@PathParam("locator") String locator){
|
||||
logger.info("removing locator {} ",locator);
|
||||
ResultWrapperMantainer.remove(locator);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,68 @@
|
||||
package org.gcube.data.spd.utils;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.concurrent.LinkedBlockingQueue;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class DynamicList implements Iterator<String>{
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(DynamicList.class);
|
||||
|
||||
private final static long TIMEOUT_IN_MILLIS = 1000;
|
||||
private final static int RETRY = 10;
|
||||
|
||||
private LinkedBlockingQueue<String> internalQueue = new LinkedBlockingQueue<String>(50);
|
||||
|
||||
private boolean closed= false;
|
||||
|
||||
private String nextElement;
|
||||
|
||||
public boolean add(String element){
|
||||
if (this.closed) return false;
|
||||
return internalQueue.offer(element);
|
||||
}
|
||||
|
||||
public boolean hasNext(){
|
||||
if (this.closed && internalQueue.isEmpty()){
|
||||
this.remove();
|
||||
return false;
|
||||
}
|
||||
int _retry = 0;
|
||||
String retrievedElement = null;
|
||||
while (_retry<RETRY && retrievedElement == null && (!this.closed || !internalQueue.isEmpty()))
|
||||
try{
|
||||
retrievedElement = internalQueue.poll(TIMEOUT_IN_MILLIS, TimeUnit.MILLISECONDS);
|
||||
_retry++;
|
||||
} catch (InterruptedException e) {
|
||||
logger.warn("interrupd exception arrived", e);
|
||||
return false;
|
||||
}
|
||||
if(retrievedElement==null){
|
||||
this.close();
|
||||
this.remove();
|
||||
logger.trace("no more elements");
|
||||
return false;
|
||||
} else {
|
||||
nextElement = retrievedElement;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String next() {
|
||||
return nextElement;
|
||||
}
|
||||
|
||||
public void close(){
|
||||
this.closed = true;
|
||||
}
|
||||
|
||||
public void remove(){
|
||||
internalQueue = null;
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,37 @@
|
||||
package org.gcube.data.spd.utils;
|
||||
|
||||
import java.util.HashMap;
|
||||
|
||||
public class DynamicMap {
|
||||
|
||||
private HashMap<String, DynamicList> map;
|
||||
|
||||
private static DynamicMap singleton= new DynamicMap();
|
||||
|
||||
public static DynamicList get(String jobId){
|
||||
return singleton.map.get(jobId);
|
||||
}
|
||||
|
||||
public static DynamicList put(String jobId){
|
||||
DynamicList dynamicList = new DynamicList();
|
||||
singleton.map.put(jobId, dynamicList);
|
||||
return dynamicList;
|
||||
}
|
||||
|
||||
public static void remove(String jobId){
|
||||
DynamicList dynamicList = singleton.map.get(jobId);
|
||||
if (dynamicList!= null){
|
||||
dynamicList.close();
|
||||
singleton.map.remove(jobId);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
private DynamicMap() {
|
||||
map = new HashMap<String, DynamicList>();
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,33 @@
|
||||
package org.gcube.data.spd.utils;
|
||||
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
|
||||
public class ExecutorsContainer {
|
||||
|
||||
private static final int MAX_SEARCH_THREAD_POOL= 100;
|
||||
|
||||
private static final int MAX_JOB_POOL= 10;
|
||||
|
||||
private static ExecutorService searchThreadPool = Executors.newFixedThreadPool(MAX_SEARCH_THREAD_POOL);;
|
||||
|
||||
private static ExecutorService jobThreadPool = Executors.newFixedThreadPool(MAX_JOB_POOL);
|
||||
|
||||
|
||||
public static void execSearch(Runnable runnable){
|
||||
searchThreadPool.execute(runnable);
|
||||
}
|
||||
|
||||
public static void execJob(Runnable runnable){
|
||||
jobThreadPool.execute(runnable);
|
||||
}
|
||||
|
||||
public static void stopAll(){
|
||||
if (searchThreadPool!=null && jobThreadPool!=null){
|
||||
searchThreadPool.shutdownNow();
|
||||
jobThreadPool.shutdownNow();
|
||||
searchThreadPool = null;
|
||||
jobThreadPool = null;
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,67 @@
|
||||
package org.gcube.data.spd.utils;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.gcube.data.spd.plugin.fwk.writers.RecordWriter;
|
||||
import org.glassfish.jersey.server.ChunkedOutput;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public abstract class JerseyWriter<T,K> implements RecordWriter<T> {
|
||||
|
||||
Logger logger = LoggerFactory.getLogger(JerseyWriter.class);
|
||||
|
||||
private ChunkedOutput<K> output;
|
||||
|
||||
private boolean isFirst = true;
|
||||
|
||||
public JerseyWriter(ChunkedOutput<K> out) {
|
||||
this.output = out;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean put(T element) {
|
||||
try {
|
||||
K convertedElement = convert(element);
|
||||
if (isFirst){
|
||||
output.write(header());
|
||||
isFirst = false;
|
||||
}
|
||||
output.write(convertedElement);
|
||||
return true;
|
||||
} catch (IOException e) {
|
||||
logger.warn("error writing element",e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean put(Exception error) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
if (!this.isClosed()){
|
||||
logger.info("closing the writer");
|
||||
try {
|
||||
if (isFirst) output.write(header());
|
||||
this.output.write(footer());
|
||||
this.output.close();
|
||||
this.output = null;
|
||||
} catch (IOException e) {
|
||||
logger.warn("error closing output",e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isClosed() {
|
||||
return this.output==null || output.isClosed();
|
||||
}
|
||||
|
||||
public abstract K convert(T input);
|
||||
public K header(){return null;}
|
||||
public K footer(){return null;}
|
||||
|
||||
}
|
@ -0,0 +1,16 @@
|
||||
package org.gcube.data.spd.utils;
|
||||
|
||||
import org.gcube.data.spd.Constants;
|
||||
|
||||
public abstract class JobRetryCall<T,E extends Throwable> extends RetryCall<T, E> {
|
||||
|
||||
public JobRetryCall(){
|
||||
super(Constants.JOB_CALL_RETRIES, Constants.RETRY_JOBS_MILLIS);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected long getWaitTime(int retry, long waitTimeInMillis) {
|
||||
return retry*waitTimeInMillis;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,331 @@
|
||||
package org.gcube.data.spd.utils;
|
||||
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.clientFor;
|
||||
import static org.gcube.resources.discovery.icclient.ICFactory.queryFor;
|
||||
|
||||
import java.net.URISyntaxException;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.common.encryption.StringEncrypter;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint.AccessPoint;
|
||||
import org.gcube.common.resources.gcore.ServiceEndpoint.Property;
|
||||
import org.gcube.data.spd.model.PointInfo;
|
||||
import org.gcube.data.spd.model.service.types.MetadataDetails;
|
||||
import org.gcube.resources.discovery.client.api.DiscoveryClient;
|
||||
import org.gcube.resources.discovery.client.queries.api.SimpleQuery;
|
||||
import org.gcube.spatial.data.geonetwork.LoginLevel;
|
||||
import org.gcube.spatial.data.geonetwork.iso.GcubeISOMetadata;
|
||||
import org.gcube.spatial.data.geonetwork.iso.MissingInformationException;
|
||||
import org.gcube.spatial.data.geonetwork.iso.Thesaurus;
|
||||
import org.gcube.spatial.data.geonetwork.model.faults.EncryptionException;
|
||||
import org.gcube.spatial.data.gis.GISInterface;
|
||||
import org.gcube.spatial.data.gis.model.report.PublishResponse;
|
||||
import org.gcube.spatial.data.gis.model.report.Report.OperationState;
|
||||
import org.geotoolkit.metadata.iso.extent.DefaultExtent;
|
||||
import org.opengis.metadata.Metadata;
|
||||
import org.opengis.metadata.citation.PresentationForm;
|
||||
import org.opengis.metadata.identification.TopicCategory;
|
||||
import org.opengis.metadata.spatial.GeometricObjectType;
|
||||
import org.opengis.metadata.spatial.TopologyLevel;
|
||||
|
||||
import it.geosolutions.geoserver.rest.encoder.GSLayerEncoder;
|
||||
import it.geosolutions.geoserver.rest.encoder.feature.GSFeatureTypeEncoder;
|
||||
import lombok.Data;
|
||||
import lombok.NonNull;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
|
||||
@Slf4j
|
||||
public class MapUtils {
|
||||
|
||||
private static final String CRS = "GEOGCS[\"WGS 84\", DATUM[\"World Geodetic System 1984\", SPHEROID[\"WGS 84\", 6378137.0, 298.257223563, AUTHORITY[\"EPSG\",\"7030\"]],"+
|
||||
"AUTHORITY[\"EPSG\",\"6326\"]], PRIMEM[\"Greenwich\", 0.0, AUTHORITY[\"EPSG\",\"8901\"]], UNIT[\"degree\", 0.017453292519943295],"+
|
||||
"AXIS[\"Geodetic longitude\", EAST], AXIS[\"Geodetic latitude\", NORTH], AUTHORITY[\"EPSG\",\"4326\"]]";
|
||||
|
||||
|
||||
|
||||
@Data
|
||||
public static class LayerCreationOptions{
|
||||
@NonNull
|
||||
private String workspace;
|
||||
@NonNull
|
||||
private String defaultStyle;
|
||||
@NonNull
|
||||
private String store;
|
||||
//* GN
|
||||
@NonNull
|
||||
private String layerCategory;
|
||||
@NonNull
|
||||
private Boolean publishAsParentContext;
|
||||
@NonNull
|
||||
private Boolean accessibleParentContexts;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Data
|
||||
public static class DataBaseDescription{
|
||||
@NonNull
|
||||
private String databaseEndpoint;
|
||||
@NonNull
|
||||
private String user;
|
||||
@NonNull
|
||||
private String password;
|
||||
}
|
||||
|
||||
|
||||
@Data
|
||||
public static class Map{
|
||||
@NonNull
|
||||
private String layerUUID;
|
||||
@NonNull
|
||||
private String featureType;
|
||||
@NonNull
|
||||
private String databaseTable;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public static final Map publishLayerByCoords(MetadataDetails metadata,Collection<PointInfo> points, Boolean publishAsParentContext, Boolean accessibleParentContexts) throws Exception{
|
||||
DataBaseDescription db=loadDB();
|
||||
LayerCreationOptions layerOpts=loadOptions(publishAsParentContext, accessibleParentContexts);
|
||||
return publishLayerByCoords(db, layerOpts, metadata, points);
|
||||
}
|
||||
|
||||
|
||||
|
||||
public static final Map publishLayerByCoords(DataBaseDescription db,LayerCreationOptions layerOptions, MetadataDetails metadata,Collection<PointInfo> points) throws Exception{
|
||||
if(points==null||points.isEmpty()) throw new Exception("Empty or null collection cannot be a layer");
|
||||
String tableName=null;
|
||||
try{
|
||||
log.trace("Generating layer by points");
|
||||
tableName=createPointTable(db, points);
|
||||
log.debug("Created table {} in {} ",tableName,db);
|
||||
PublishResponse resp=createLayer(layerOptions, metadata, tableName);
|
||||
log.debug("Publish response output {} ",resp);
|
||||
|
||||
if(!resp.getDataOperationResult().equals(OperationState.COMPLETE)){
|
||||
throw new Exception("Erors while publishing layer. Messages are : "+resp.getDataOperationMessages());
|
||||
}else if(!resp.getMetaOperationResult().equals(OperationState.COMPLETE)){
|
||||
throw new Exception("Erors while publishing layer metadata. Messages are : "+resp.getMetaOperationMessages());
|
||||
}else {
|
||||
String uuid=resp.getPublishedMetadata().getFileIdentifier();
|
||||
log.trace("Genrated layer {} ",uuid);
|
||||
return new Map(uuid, tableName, tableName);
|
||||
}
|
||||
|
||||
}catch(Exception e){
|
||||
log.trace("Unexpected errors while publishing layer. Throwing exception {} ",e.getMessage());
|
||||
if(tableName!=null){
|
||||
log.debug("Dropping created postgis table {} ",tableName);
|
||||
dropTable(tableName, db);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
private static final boolean dropTable(String tableName,DataBaseDescription db){
|
||||
Connection conn=null;
|
||||
try{
|
||||
conn=connect(db);
|
||||
conn.createStatement().execute("DROP TABLE "+tableName);
|
||||
return true;
|
||||
}catch(Exception e){
|
||||
log.warn("Unable to drop table {}.",tableName,e);
|
||||
return false;
|
||||
}finally{
|
||||
closeQuietly(conn);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static final Connection connect(DataBaseDescription db) throws SQLException{
|
||||
// String dbUrl="jdbc:postgresql://"+db.getHost()+":"+db.getPort()+"/"+db.getDatabaseName();
|
||||
log.debug("Connecting to {}, user : {} ",db.getDatabaseEndpoint(),db.user);
|
||||
try{
|
||||
Class.forName("org.postgresql.Driver");
|
||||
}catch(Exception e){
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return DriverManager.getConnection(db.getDatabaseEndpoint(),db.getUser(),db.getPassword());
|
||||
}
|
||||
|
||||
private static final String createPointTable(DataBaseDescription db,Collection<PointInfo> points) throws SQLException{
|
||||
Connection conn=null;
|
||||
PreparedStatement psInsert=null;
|
||||
try{
|
||||
conn=connect(db);
|
||||
conn.setAutoCommit(false);
|
||||
String tableName="spd"+UUID.randomUUID().toString().replace("-", "");
|
||||
String createStatement="CREATE TABLE "+tableName+" (the_geom geometry)";
|
||||
log.debug("Executing {} ",createStatement);
|
||||
conn.createStatement().execute(createStatement);
|
||||
psInsert=conn.prepareStatement("INSERT INTO "+tableName+" (the_geom) VALUES( ST_GeomFromText(?, 4326))");
|
||||
log.debug("Gonna execute insert..");
|
||||
long count=0l;
|
||||
for(PointInfo point :points){
|
||||
psInsert.setString(1, "POINT("+point.getX()+" "+point.getY()+")"); // POINT(-71.060316 48.432044)
|
||||
count+=psInsert.executeUpdate();
|
||||
}
|
||||
conn.commit();
|
||||
log.debug("inserted {} / {} entries in table {}. Closing connection to db..", count,points.size(),tableName);
|
||||
|
||||
return tableName;
|
||||
}catch(Throwable t){
|
||||
log.error("Unable to create table.",t);
|
||||
throw new SQLException("Rethrown exception, unable to create table.",t);
|
||||
}finally{
|
||||
closeQuietly(psInsert);
|
||||
closeQuietly(conn);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
private static void closeQuietly(AutoCloseable toClose){
|
||||
if(toClose!=null){
|
||||
try {
|
||||
toClose.close();
|
||||
} catch (Exception e) {
|
||||
log.debug("Exception while closing... ",e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static final PublishResponse createLayer(LayerCreationOptions layerOpt,MetadataDetails details,String tableName) throws URISyntaxException, MissingInformationException, Exception{
|
||||
|
||||
GSFeatureTypeEncoder fte=new GSFeatureTypeEncoder();
|
||||
fte.setEnabled(true);
|
||||
fte.setLatLonBoundingBox(-180.0, -90.0, 180.0, 90.0, CRS);
|
||||
fte.setName(tableName);
|
||||
fte.setNativeCRS(CRS);
|
||||
|
||||
|
||||
// GSLayerEncoder layerEncoder
|
||||
|
||||
GSLayerEncoder le=new GSLayerEncoder();
|
||||
le.setDefaultStyle(layerOpt.getDefaultStyle());
|
||||
le.setEnabled(true);
|
||||
|
||||
log.debug("Generating meta for layer table {}. Meta parameters are {}",tableName,details);
|
||||
Metadata meta=fillMeta(details).getMetadata();
|
||||
|
||||
|
||||
GISInterface gis=GISInterface.get();
|
||||
|
||||
log.trace("Publishing layer from table {} with options {} in store {} ",tableName,layerOpt);
|
||||
|
||||
LoginLevel login= layerOpt.getAccessibleParentContexts()?LoginLevel.SCOPE:LoginLevel.PRIVATE;
|
||||
|
||||
|
||||
return gis.publishDBTable(layerOpt.getWorkspace(),layerOpt.getStore(), fte, le,
|
||||
meta, layerOpt.getLayerCategory(), "_none_", login,layerOpt.getPublishAsParentContext());
|
||||
}
|
||||
|
||||
|
||||
|
||||
private static GcubeISOMetadata fillMeta(MetadataDetails metaDetails) throws Exception{
|
||||
GcubeISOMetadata meta=new GcubeISOMetadata();
|
||||
meta.setAbstractField(metaDetails.getAbstractField());
|
||||
meta.setCreationDate(new Date(System.currentTimeMillis()));
|
||||
meta.setExtent((DefaultExtent) DefaultExtent.WORLD);
|
||||
meta.setGeometricObjectType(GeometricObjectType.SURFACE);
|
||||
meta.setPresentationForm(PresentationForm.MAP_DIGITAL);
|
||||
meta.setPurpose(metaDetails.getPurpose());
|
||||
meta.setResolution(0.5d);
|
||||
meta.setTitle(metaDetails.getTitle());
|
||||
meta.setTopologyLevel(TopologyLevel.GEOMETRY_ONLY);
|
||||
meta.setUser(metaDetails.getAuthor());
|
||||
|
||||
|
||||
meta.addCredits(metaDetails.getCredits());
|
||||
List<String> keywords=metaDetails.getKeywords();
|
||||
if(keywords!=null&&!keywords.isEmpty()){
|
||||
Thesaurus generalThesaurus=meta.getConfig().getThesauri().get("General");
|
||||
for(String key:keywords)
|
||||
meta.addKeyword(key, generalThesaurus);
|
||||
}
|
||||
meta.addTopicCategory(TopicCategory.BIOTA);
|
||||
return meta;
|
||||
}
|
||||
|
||||
|
||||
|
||||
//******************* IS QUERIES
|
||||
|
||||
public static final DataBaseDescription loadDB() throws Exception{
|
||||
SimpleQuery query = queryFor(ServiceEndpoint.class);
|
||||
|
||||
query.addCondition("$resource/Profile/Category/text() eq 'Gis'")
|
||||
.addCondition("$resource/Profile/Name/text() eq 'TimeSeriesDataStore'")
|
||||
.setResult("$resource/Profile/AccessPoint");
|
||||
|
||||
DiscoveryClient<AccessPoint> client = clientFor(AccessPoint.class);
|
||||
|
||||
List<AccessPoint> accesspoints = client.submit(query);
|
||||
DataBaseDescription toReturn=null;
|
||||
for (AccessPoint point : accesspoints) {
|
||||
if (point.name().equals("jdbc")){
|
||||
toReturn=new DataBaseDescription(point.address(), point.username(), decrypt(point.password()));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if(toReturn==null) throw new Exception("Database info not found in current scope");
|
||||
return toReturn;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public static final LayerCreationOptions loadOptions(Boolean publishAsParentContext, Boolean accessibleParentContexts) throws Exception{
|
||||
SimpleQuery query = queryFor(ServiceEndpoint.class);
|
||||
|
||||
query.addCondition("$resource/Profile/Category/text() eq 'Gis'")
|
||||
.addCondition("$resource/Profile/Name/text() eq 'GeoServer'")
|
||||
.setResult("$resource/Profile/AccessPoint");
|
||||
|
||||
|
||||
DiscoveryClient<AccessPoint> client = clientFor(AccessPoint.class);
|
||||
|
||||
List<AccessPoint> accesspoints = client.submit(query);
|
||||
LayerCreationOptions toReturn=null;
|
||||
|
||||
for (AccessPoint point : accesspoints) {
|
||||
if (point.name().equals("geoserver")){
|
||||
java.util.Map<String, Property> properties=point.propertyMap();
|
||||
toReturn=new LayerCreationOptions(properties.get("timeseriesWorkspace").value(), "point", properties.get("timeseriesDataStore").value(), "datasets",
|
||||
publishAsParentContext, accessibleParentContexts);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if(toReturn==null) throw new Exception("Layer Creation Options not found in current scope");
|
||||
return toReturn;
|
||||
|
||||
}
|
||||
|
||||
|
||||
public static final String decrypt(String toDecrypt) throws EncryptionException{
|
||||
try{
|
||||
return StringEncrypter.getEncrypter().decrypt(toDecrypt);
|
||||
}catch(Exception e){
|
||||
throw new EncryptionException(e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,36 @@
|
||||
package org.gcube.data.spd.utils;
|
||||
import org.gcube.data.spd.Constants;
|
||||
import org.gcube.data.spd.exception.MaxRetriesReachedException;
|
||||
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
|
||||
|
||||
public abstract class QueryRetryCall extends RetryCall<VOID, Exception>{
|
||||
|
||||
public QueryRetryCall(){
|
||||
super(Constants.QUERY_CALL_RETRIES, Constants.RETRY_QUERY_MILLIS);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public VOID call() throws MaxRetriesReachedException {
|
||||
try{
|
||||
return super.call();
|
||||
}catch (MaxRetriesReachedException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
logger.error("unexpected error",e);
|
||||
}
|
||||
return VOID.instance();
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
protected abstract VOID execute() throws ExternalRepositoryException ;
|
||||
|
||||
|
||||
@Override
|
||||
protected long getWaitTime(int retry, long waitTimeInMillis) {
|
||||
return waitTimeInMillis;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,572 @@
|
||||
package org.gcube.data.spd.utils;
|
||||
|
||||
import static org.gcube.data.streams.dsl.Streams.convert;
|
||||
|
||||
import java.net.URI;
|
||||
import java.rmi.RemoteException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
|
||||
import javax.ws.rs.Consumes;
|
||||
import javax.ws.rs.DELETE;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.PUT;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.Produces;
|
||||
import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.data.spd.Constants;
|
||||
import org.gcube.data.spd.exception.MaxRetriesReachedException;
|
||||
import org.gcube.data.spd.manager.AppInitializer;
|
||||
import org.gcube.data.spd.model.Condition;
|
||||
import org.gcube.data.spd.model.PluginDescription;
|
||||
import org.gcube.data.spd.model.binding.Bindings;
|
||||
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
|
||||
import org.gcube.data.spd.model.exceptions.IdNotValidException;
|
||||
import org.gcube.data.spd.model.exceptions.StreamBlockingException;
|
||||
import org.gcube.data.spd.model.exceptions.StreamNonBlockingException;
|
||||
import org.gcube.data.spd.model.products.OccurrencePoint;
|
||||
import org.gcube.data.spd.model.products.ResultElement;
|
||||
import org.gcube.data.spd.model.products.ResultItem;
|
||||
import org.gcube.data.spd.model.products.TaxonomyItem;
|
||||
import org.gcube.data.spd.model.service.exceptions.InvalidIdentifierException;
|
||||
import org.gcube.data.spd.model.service.exceptions.UnsupportedCapabilityException;
|
||||
import org.gcube.data.spd.model.service.types.PluginDescriptions;
|
||||
import org.gcube.data.spd.model.service.types.SearchCondition;
|
||||
import org.gcube.data.spd.model.service.types.SearchRequest;
|
||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||
import org.gcube.data.spd.plugin.fwk.Searchable;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.ClosableWriter;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.Writer;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.rswrapper.ResultWrapper;
|
||||
import org.gcube.data.streams.Stream;
|
||||
import org.gcube.smartgears.ApplicationManagerProvider;
|
||||
import org.gcube.smartgears.ContextProvider;
|
||||
import org.gcube.smartgears.context.application.ApplicationContext;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.thoughtworks.xstream.XStream;
|
||||
|
||||
@Path("remote")
|
||||
public class RemoteDispatcher {
|
||||
|
||||
|
||||
private static Logger logger = LoggerFactory.getLogger(RemoteDispatcher.class);
|
||||
|
||||
ApplicationContext ctx = ContextProvider.get();
|
||||
|
||||
AppInitializer initializer = (AppInitializer) ApplicationManagerProvider.get(AppInitializer.class);
|
||||
|
||||
public RemoteDispatcher(){
|
||||
super();
|
||||
}
|
||||
|
||||
//only for test
|
||||
public RemoteDispatcher(AbstractPlugin plugin, ExecutorService executor) {
|
||||
this.plugin = plugin;
|
||||
}
|
||||
|
||||
//only for test is not null
|
||||
AbstractPlugin plugin=null;
|
||||
|
||||
|
||||
|
||||
private AbstractPlugin getPlugin(String pluginName){
|
||||
if (plugin==null)
|
||||
return initializer.getPluginManager().plugins().get(pluginName);
|
||||
else return plugin;
|
||||
}
|
||||
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.gcube.data.spd.remotedispatcher.RemoteDispatcher#search(org.gcube.data.spd.remotedispatcher.types.SearchRequest)
|
||||
*/
|
||||
/* @GET
|
||||
@Path("retrieve/search")
|
||||
@Consumes(MediaType.APPLICATION_XML)
|
||||
public String search(SearchRequest request)
|
||||
throws RemoteException {
|
||||
logger.trace("searchByScienficName called in scope "+ScopeProvider.instance.get());
|
||||
|
||||
AbstractPlugin localPlugin = getPlugin(request.getPluginName());
|
||||
logger.trace("plugin "+request.getPluginName()+" have been retrieved, it is null?"+(localPlugin==null));
|
||||
List<Condition> properties = Collections.emptyList();
|
||||
if (request.getProperties()!=null){
|
||||
properties = new ArrayList<Condition>(request.getProperties().size());
|
||||
for (SearchCondition prop : request.getProperties()){
|
||||
Object value = new XStream().fromXML(prop.getValue());
|
||||
properties.add(new Condition(prop.getType(), value,prop.getOperator()));
|
||||
}
|
||||
}
|
||||
try{
|
||||
if (request.getResultType().equals(Constants.TAXON_RETURN_TYPE)){
|
||||
ResultWrapper<TaxonomyItem> wrapper = new ResultWrapper<TaxonomyItem>();
|
||||
Writer<TaxonomyItem> writer = new Writer<TaxonomyItem>(wrapper);
|
||||
ExecutorsContainer.execSearch(new RemoteSearch<TaxonomyItem>(localPlugin.getClassificationInterface(), writer, request.getWord(), properties));
|
||||
return wrapper.getLocator();
|
||||
}else if (request.getResultType().equals(Constants.OCCURRENCE_RETURN_TYPE)){
|
||||
ResultWrapper<OccurrencePoint> wrapper = new ResultWrapper<OccurrencePoint>();
|
||||
Writer<OccurrencePoint> writer = new Writer<OccurrencePoint>(wrapper);
|
||||
ExecutorsContainer.execSearch(new RemoteSearch<OccurrencePoint>(localPlugin.getOccurrencesInterface(), writer, request.getWord(), properties));
|
||||
return wrapper.getLocator();
|
||||
}else {
|
||||
ResultWrapper<ResultItem> wrapper = new ResultWrapper<ResultItem>();
|
||||
Writer<ResultItem> writer = new Writer<ResultItem>(wrapper);
|
||||
ExecutorsContainer.execSearch(new RemoteSearch<ResultItem>(localPlugin, writer, request.getWord(), properties));
|
||||
return wrapper.getLocator();
|
||||
}
|
||||
}catch (Exception e) {
|
||||
logger.error("search error for remote plugin", e);
|
||||
throw new RemoteException(e.getMessage());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
//TAXON functions
|
||||
|
||||
(non-Javadoc)
|
||||
* @see org.gcube.data.spd.remotedispatcher.RemoteDispatcher#getSynonymsById(java.lang.String, java.lang.String)
|
||||
|
||||
@GET
|
||||
@Path("taxon/synonyms")
|
||||
public String getSynonymsById(@QueryParam("id") final String id, @QueryParam("plugin") String pluginName)
|
||||
throws RemoteException, InvalidIdentifierException {
|
||||
final AbstractPlugin localPlugin = getPlugin(pluginName);
|
||||
try{
|
||||
final ResultWrapper<TaxonomyItem> wrapper = new ResultWrapper<TaxonomyItem>();
|
||||
|
||||
ExecutorsContainer.execSearch(new Runnable() {
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
final Writer<TaxonomyItem> writer = new Writer<TaxonomyItem>(wrapper);
|
||||
try {
|
||||
new JobRetryCall<VOID, Exception>() {
|
||||
|
||||
@Override
|
||||
protected VOID execute() throws ExternalRepositoryException, Exception {
|
||||
localPlugin.getClassificationInterface().getSynonymnsById(writer, id);
|
||||
return VOID.instance();
|
||||
}
|
||||
}.call();
|
||||
} catch (Exception e) {
|
||||
logger.error("getSynonymsById for remote plugin",e);
|
||||
writer.write(new StreamBlockingException(localPlugin.getRepositoryName(),id));
|
||||
} finally{
|
||||
writer.close();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return wrapper.getLocator();
|
||||
} catch (Exception e) {
|
||||
logger.error("error getting locator ",e);
|
||||
throw new RemoteException(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
(non-Javadoc)
|
||||
* @see org.gcube.data.spd.remotedispatcher.RemoteDispatcher#retrieveTaxonChildrenByTaxonId(java.lang.String, java.lang.String)
|
||||
|
||||
@GET
|
||||
@Path("taxon/children/{key}")
|
||||
public String retrieveTaxonChildrenByTaxonId(
|
||||
@PathParam("id") final String id, @QueryParam("plugin") final String pluginName)
|
||||
throws RemoteException, InvalidIdentifierException {
|
||||
final AbstractPlugin localPlugin = getPlugin(pluginName);
|
||||
try{
|
||||
final ResultWrapper<TaxonomyItem> wrapper = new ResultWrapper<TaxonomyItem>();
|
||||
|
||||
ExecutorsContainer.execSearch(new Runnable() {
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
Writer<TaxonomyItem> writer = new Writer<TaxonomyItem>(wrapper);
|
||||
try {
|
||||
|
||||
List<TaxonomyItem> items = new JobRetryCall<List<TaxonomyItem>, IdNotValidException>() {
|
||||
|
||||
@Override
|
||||
protected List<TaxonomyItem> execute() throws ExternalRepositoryException, IdNotValidException {
|
||||
return localPlugin.getClassificationInterface().retrieveTaxonChildrenByTaxonId(id);
|
||||
}
|
||||
}.call();
|
||||
|
||||
for (TaxonomyItem item :items)
|
||||
writer.write(item);
|
||||
} catch (Exception e) {
|
||||
logger.error("error retreiving children by id",e);
|
||||
writer.write(new StreamBlockingException(localPlugin.getRepositoryName(), id));
|
||||
}finally{
|
||||
writer.close();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return wrapper.getLocator();
|
||||
} catch (Exception e) {
|
||||
logger.error("error getting locator ",e);
|
||||
throw new RemoteException(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
(non-Javadoc)
|
||||
* @see org.gcube.data.spd.remotedispatcher.RemoteDispatcher#retrieveTaxaByIds(java.lang.String, java.lang.String)
|
||||
|
||||
@GET
|
||||
@Path("taxon/tree/{plugin}/{key}")
|
||||
public String retrieveTaxaByIds(@PathParam("key") final String idsLocator, @PathParam("plugin") String pluginName)
|
||||
throws RemoteException {
|
||||
final AbstractPlugin localPlugin = getPlugin(pluginName);
|
||||
try{
|
||||
final ResultWrapper<TaxonomyItem> wrapper = new ResultWrapper<TaxonomyItem>();
|
||||
|
||||
ExecutorsContainer.execSearch(new Runnable() {
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
final Stream<String> idsStream = convert(URI.create(idsLocator)).ofStrings().withDefaults();;
|
||||
final Writer<TaxonomyItem> writer = new Writer<TaxonomyItem>(wrapper);
|
||||
new JobRetryCall<VOID, Exception>() {
|
||||
|
||||
@Override
|
||||
protected VOID execute()
|
||||
throws ExternalRepositoryException, Exception {
|
||||
localPlugin.getClassificationInterface().retrieveTaxonByIds(idsStream, writer);
|
||||
return VOID.instance();
|
||||
}
|
||||
|
||||
};
|
||||
writer.close();
|
||||
}
|
||||
});
|
||||
|
||||
return wrapper.getLocator();
|
||||
} catch (Exception e) {
|
||||
logger.error("error getting locator ",e);
|
||||
throw new RemoteException(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
(non-Javadoc)
|
||||
* @see org.gcube.data.spd.remotedispatcher.RemoteDispatcher#getTaxonById(java.lang.String, java.lang.String)
|
||||
|
||||
@GET
|
||||
@Path("taxon/ids/{idsLocator}")
|
||||
public String getTaxonById(@PathParam("idsLocator") final String id, @QueryParam("plugin") String pluginName)
|
||||
throws RemoteException, InvalidIdentifierException {
|
||||
AbstractPlugin plugin = getPlugin(pluginName);
|
||||
try {
|
||||
return Bindings.toXml(plugin.getClassificationInterface().retrieveTaxonById(id));
|
||||
} catch (IdNotValidException e) {
|
||||
logger.error("error in getTaxonById",e);
|
||||
throw new InvalidIdentifierException();
|
||||
} catch (Exception e) {
|
||||
logger.error("error in getTaxonById",e);
|
||||
throw new RemoteException(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
//END: TAXON functions
|
||||
|
||||
|
||||
//occurrence functions
|
||||
|
||||
(non-Javadoc)
|
||||
* @see org.gcube.data.spd.remotedispatcher.RemoteDispatcher#getOccurrencesByProductKeys(java.lang.String, java.lang.String)
|
||||
|
||||
@GET
|
||||
@Path("occurrence/keys/{productKeysLocator}")
|
||||
public String getOccurrencesByProductKeys(
|
||||
@PathParam("productKeysLocator") final String productKeysLocator, @QueryParam("plugin") String pluginName) throws RemoteException {
|
||||
final AbstractPlugin localPlugin = getPlugin(pluginName);
|
||||
try{
|
||||
final ResultWrapper<OccurrencePoint> wrapper = new ResultWrapper<OccurrencePoint>();
|
||||
ExecutorsContainer.execSearch(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
logger.debug("searching remote occurrence for plugin "+localPlugin.getRepositoryName());
|
||||
final Stream<String> keysStream = convert(URI.create(productKeysLocator)).ofStrings().withDefaults();
|
||||
final Writer<OccurrencePoint> writer = new Writer<OccurrencePoint>(wrapper);
|
||||
try {
|
||||
new JobRetryCall<VOID, Exception>() {
|
||||
|
||||
@Override
|
||||
protected VOID execute()
|
||||
throws ExternalRepositoryException, Exception {
|
||||
localPlugin.getOccurrencesInterface().getOccurrencesByProductKeys(writer, keysStream);
|
||||
return VOID.instance();
|
||||
}
|
||||
|
||||
}.call();
|
||||
} catch (Exception e) {
|
||||
writer.write(new StreamBlockingException(localPlugin.getRepositoryName()));
|
||||
}
|
||||
writer.close();
|
||||
}
|
||||
});
|
||||
return wrapper.getLocator();
|
||||
} catch (Exception e) {
|
||||
logger.error("error getting locator ",e);
|
||||
throw new RemoteException(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
(non-Javadoc)
|
||||
* @see org.gcube.data.spd.remotedispatcher.RemoteDispatcher#getOccurrencesByIds(java.lang.String, java.lang.String)
|
||||
|
||||
@GET
|
||||
@Path("occurrence/ids/{IdsLocator}")
|
||||
public String getOccurrencesByIds(final String idsLocator, String pluginName)
|
||||
throws RemoteException {
|
||||
final AbstractPlugin localPlugin = getPlugin(pluginName);
|
||||
try{
|
||||
final ResultWrapper<OccurrencePoint> wrapper = new ResultWrapper<OccurrencePoint>();
|
||||
ExecutorsContainer.execSearch(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
final Stream<String> idsStream = convert(URI.create(idsLocator)).ofStrings().withDefaults();
|
||||
final Writer<OccurrencePoint> writer = new Writer<OccurrencePoint>(wrapper);
|
||||
try {
|
||||
new JobRetryCall<VOID, Exception>() {
|
||||
|
||||
@Override
|
||||
protected VOID execute()
|
||||
throws ExternalRepositoryException, Exception {
|
||||
localPlugin.getOccurrencesInterface().getOccurrencesByIds(writer, idsStream);
|
||||
return VOID.instance();
|
||||
}
|
||||
|
||||
}.call();
|
||||
} catch (Exception e) {
|
||||
writer.write(new StreamBlockingException(localPlugin.getRepositoryName()));
|
||||
}
|
||||
|
||||
writer.close();
|
||||
}
|
||||
});
|
||||
return wrapper.getLocator();
|
||||
} catch (Exception e) {
|
||||
logger.error("error getting locator ",e);
|
||||
throw new RemoteException(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
//END : occurrence functions
|
||||
|
||||
|
||||
//RESOLVE CAPABILITIES
|
||||
|
||||
(non-Javadoc)
|
||||
* @see org.gcube.data.spd.remotedispatcher.RemoteDispatcher#namesMapping(java.lang.String, java.lang.String)
|
||||
|
||||
@GET
|
||||
@Path("extensions/mapping/{commonName}")
|
||||
public String namesMapping(@PathParam("commonName") final String commonName, @QueryParam("name") String pluginName)
|
||||
throws RemoteException {
|
||||
logger.trace("requesting plugin "+pluginName);
|
||||
final AbstractPlugin localPlugin = getPlugin(pluginName);
|
||||
if (plugin==null) throw new RemoteException("error executing namesMapping on "+pluginName);
|
||||
try{
|
||||
final ResultWrapper<String> wrapper = new ResultWrapper<String>();
|
||||
|
||||
ExecutorsContainer.execSearch(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
final Writer<String> writer = new Writer<String>(wrapper);
|
||||
logger.trace("calling names mapping on "+localPlugin.getRepositoryName());
|
||||
|
||||
try{
|
||||
new QueryRetryCall(){
|
||||
|
||||
@Override
|
||||
protected VOID execute()
|
||||
throws ExternalRepositoryException {
|
||||
localPlugin.getMappingInterface().getRelatedScientificNames(writer, commonName);
|
||||
return VOID.instance();
|
||||
}
|
||||
|
||||
}.call();
|
||||
|
||||
} catch (MaxRetriesReachedException e) {
|
||||
logger.error("error retreiving namesMapping on remote plugin",e);
|
||||
writer.write(new StreamBlockingException(localPlugin.getRepositoryName()));
|
||||
}finally{
|
||||
writer.close();
|
||||
}
|
||||
}
|
||||
});
|
||||
return wrapper.getLocator();
|
||||
} catch (Exception e) {
|
||||
logger.error("error getting locator ",e);
|
||||
throw new RemoteException(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
//END : RESOLVE CAPABILITIES
|
||||
|
||||
//EXPAND CAPABILITIES
|
||||
|
||||
(non-Javadoc)
|
||||
* @see org.gcube.data.spd.remotedispatcher.RemoteDispatcher#expandWithSynonyms(java.lang.String, java.lang.String)
|
||||
|
||||
@GET
|
||||
@Path("extensions/expand/{scientificName}")
|
||||
public String expandWithSynonyms(@PathParam("scientificName") final String scientificName,@QueryParam("plugin") String pluginName)
|
||||
throws RemoteException {
|
||||
final AbstractPlugin localPlugin = getPlugin(pluginName);
|
||||
try{
|
||||
final ResultWrapper<String> wrapper = new ResultWrapper<String>();
|
||||
|
||||
ExecutorsContainer.execSearch(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
final Writer<String> writer = new Writer<String>(wrapper);
|
||||
try {
|
||||
//"synonyms expansion is not suported in "+plugin.getRepositoryName()
|
||||
if (localPlugin.getExpansionInterface()==null) throw new UnsupportedCapabilityException();
|
||||
else{
|
||||
new QueryRetryCall(){
|
||||
|
||||
@Override
|
||||
protected VOID execute()
|
||||
throws ExternalRepositoryException {
|
||||
localPlugin.getExpansionInterface().getSynonyms(writer, scientificName);
|
||||
return VOID.instance();
|
||||
}
|
||||
}.call();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("error getting synonyms for remote plugin",e);
|
||||
}finally{
|
||||
writer.close();
|
||||
}
|
||||
}
|
||||
});
|
||||
return wrapper.getLocator();
|
||||
} catch (Exception e) {
|
||||
logger.error("error getting locator ",e);
|
||||
throw new RemoteException(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
//END: EXPAND CAPABILITIES
|
||||
|
||||
//UNFOLD CAPABILITIES
|
||||
|
||||
(non-Javadoc)
|
||||
* @see org.gcube.data.spd.remotedispatcher.RemoteDispatcher#unfold(java.lang.String, java.lang.String)
|
||||
|
||||
@GET
|
||||
@Path("extensions/unfold/{scientificName}")
|
||||
public String unfold(@PathParam("scientificName") final String scientificName,@QueryParam("plugin") String pluginName)
|
||||
throws RemoteException {
|
||||
final AbstractPlugin localPlugin = getPlugin(pluginName);
|
||||
try{
|
||||
final ResultWrapper<String> wrapper = new ResultWrapper<String>();
|
||||
|
||||
ExecutorsContainer.execSearch(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
final Writer<String> writer = new Writer<String>(wrapper);
|
||||
try {
|
||||
//"synonyms expansion is not suported in "+plugin.getRepositoryName()
|
||||
if (localPlugin.getUnfoldInterface()==null) throw new UnsupportedCapabilityException();
|
||||
else{
|
||||
new QueryRetryCall(){
|
||||
|
||||
@Override
|
||||
protected VOID execute()
|
||||
throws ExternalRepositoryException {
|
||||
localPlugin.getUnfoldInterface().unfold(writer, scientificName);
|
||||
return VOID.instance();
|
||||
}
|
||||
}.call();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("error getting synonyms for remote plugin",e);
|
||||
}finally{
|
||||
writer.close();
|
||||
}
|
||||
}
|
||||
});
|
||||
return wrapper.getLocator();
|
||||
} catch (Exception e) {
|
||||
logger.error("error getting locator ",e);
|
||||
throw new RemoteException(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
//END: UNFOLD CAPABILITIES
|
||||
|
||||
class RemoteSearch<T extends ResultElement> implements Runnable {
|
||||
|
||||
private final Searchable<T> searchable;
|
||||
private final ClosableWriter<T> writer;
|
||||
private final String word;
|
||||
private final Condition[] conditions;
|
||||
|
||||
public RemoteSearch(Searchable<T> searchable,
|
||||
ClosableWriter<T> writer, String word,
|
||||
List<Condition> conditions) {
|
||||
super();
|
||||
this.searchable = searchable;
|
||||
this.writer = writer;
|
||||
this.word = word;
|
||||
this.conditions = new Condition[conditions.size()];
|
||||
conditions.toArray(this.conditions);
|
||||
}
|
||||
|
||||
|
||||
public void run() {
|
||||
try{
|
||||
new QueryRetryCall() {
|
||||
|
||||
@Override
|
||||
protected VOID execute() throws ExternalRepositoryException {
|
||||
searchable.searchByScientificName(word, writer, conditions);
|
||||
return VOID.instance();
|
||||
}
|
||||
}.call();
|
||||
} catch (MaxRetriesReachedException e) {
|
||||
writer.write(new StreamNonBlockingException(word));
|
||||
}finally{
|
||||
writer.close();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@PUT
|
||||
@Path("exchange")
|
||||
@Consumes(MediaType.APPLICATION_XML)
|
||||
@Produces(MediaType.APPLICATION_XML)
|
||||
public PluginDescriptions exchangePlugins(PluginDescriptions remotePlugins,@QueryParam("gCoreEndpointId") String gCoreEndpointId)
|
||||
throws RemoteException {
|
||||
initializer.getPluginManager().addRemotePlugins(remotePlugins.getDescriptions(), gCoreEndpointId);
|
||||
List<PluginDescription> descriptions = new ArrayList<PluginDescription>();
|
||||
for (AbstractPlugin plugin :initializer.getPluginManager().plugins().values())
|
||||
if(!plugin.isRemote())
|
||||
descriptions.add(Utils.getPluginDescription(plugin));
|
||||
return new PluginDescriptions(descriptions);
|
||||
|
||||
}
|
||||
|
||||
@DELETE
|
||||
@Path("remove/{gCoreEndpointId}")
|
||||
public void removeAll(@PathParam("gCoreEndpointId") String gCoreEndpointId)
|
||||
throws RemoteException {
|
||||
initializer.getPluginManager().removeRemotePlugin(gCoreEndpointId);
|
||||
}
|
||||
|
||||
*/
|
||||
}
|
@ -0,0 +1,71 @@
|
||||
package org.gcube.data.spd.utils;
|
||||
|
||||
import javax.xml.bind.JAXBException;
|
||||
|
||||
import org.gcube.data.spd.model.binding.Bindings;
|
||||
import org.gcube.data.spd.model.products.ResultElement;
|
||||
import org.glassfish.jersey.server.ChunkedOutput;
|
||||
|
||||
public class ResultStreamingThread<T extends ResultElement> extends Thread{
|
||||
|
||||
private JerseyWriter<T, String> writer;
|
||||
private ChunkedOutput<String> output;
|
||||
private Long startTime;
|
||||
|
||||
public ResultStreamingThread(Class<T> clazz) {
|
||||
output = new ChunkedOutput<String>(String.class);
|
||||
writer = new JerseyWriter<T,String>(output){
|
||||
|
||||
@Override
|
||||
public String convert(T input) {
|
||||
try {
|
||||
return "<Result>"+Bindings.toXml(input)+"</Result>";
|
||||
} catch (JAXBException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String header() {
|
||||
return "<Results>";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String footer() {
|
||||
return "</Results>";
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
this.startTime = System.currentTimeMillis();
|
||||
while (!writer.isClosed()){
|
||||
try {
|
||||
Thread.sleep(10*1000);
|
||||
} catch (InterruptedException e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public JerseyWriter<T, String> getWriter() {
|
||||
return writer;
|
||||
}
|
||||
|
||||
|
||||
public ChunkedOutput<String> getOutput() {
|
||||
return output;
|
||||
}
|
||||
|
||||
|
||||
public long getStartTime() {
|
||||
return startTime;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,36 @@
|
||||
package org.gcube.data.spd.utils;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.gcube.data.spd.model.products.ResultElement;
|
||||
import org.gcube.data.spd.plugin.fwk.writers.rswrapper.ResultWrapper;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class ResultWrapperMantainer {
|
||||
|
||||
private static Logger log = LoggerFactory.getLogger(ResultWrapperMantainer.class);
|
||||
|
||||
private static Map<String, ResultStreamingThread<?>> writerMap = new HashMap<String, ResultStreamingThread<?>>();
|
||||
|
||||
|
||||
public static <T extends ResultElement> ResultWrapper<T> getWrapper(Class<T> _clazz){
|
||||
ResultStreamingThread<T> retrieverThread = new ResultStreamingThread<T>(_clazz);
|
||||
ResultWrapper<T> wrapper = new ResultWrapper<T>(retrieverThread.getWriter());
|
||||
retrieverThread.start();
|
||||
writerMap.put(wrapper.getLocator(), retrieverThread);
|
||||
return wrapper;
|
||||
}
|
||||
|
||||
public static ResultStreamingThread<?> getWriterById(String locator){
|
||||
return writerMap.get(locator);
|
||||
}
|
||||
|
||||
public static void remove(String locator){
|
||||
if (writerMap.containsKey(locator)){
|
||||
writerMap.get(locator).getWriter().close();
|
||||
writerMap.remove(locator);
|
||||
} else log.warn("wrapper already closed");
|
||||
}
|
||||
}
|
@ -0,0 +1,48 @@
|
||||
package org.gcube.data.spd.utils;
|
||||
|
||||
|
||||
import org.gcube.data.spd.exception.MaxRetriesReachedException;
|
||||
import org.gcube.data.spd.model.exceptions.ExternalRepositoryException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public abstract class RetryCall<T, E extends Throwable>{
|
||||
|
||||
Logger logger = LoggerFactory.getLogger(RetryCall.class);
|
||||
|
||||
private int retries;
|
||||
private long waitTimeInMillis;
|
||||
|
||||
protected RetryCall(int retries, long waitTimeInMillis) {
|
||||
super();
|
||||
this.retries = retries;
|
||||
this.waitTimeInMillis = waitTimeInMillis;
|
||||
}
|
||||
|
||||
public RetryCall() {
|
||||
super();
|
||||
}
|
||||
|
||||
public T call() throws MaxRetriesReachedException, E {
|
||||
int retry = 0;
|
||||
do {
|
||||
try{
|
||||
return execute();
|
||||
}catch (ExternalRepositoryException e) {
|
||||
logger.warn("error on external repository, "+(retry<retries?" ":"not ")+"retrying",e);
|
||||
retry++;
|
||||
try {
|
||||
Thread.sleep(getWaitTime(retry, waitTimeInMillis));
|
||||
} catch (InterruptedException e1) {}
|
||||
}
|
||||
}while(retry<retries);
|
||||
throw new MaxRetriesReachedException();
|
||||
}
|
||||
|
||||
protected abstract T execute() throws ExternalRepositoryException,E;
|
||||
|
||||
protected abstract long getWaitTime(int retry, long waitTimeInMillis);
|
||||
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,92 @@
|
||||
package org.gcube.data.spd.utils;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Calendar;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.gcube.data.spd.model.Condition;
|
||||
import org.gcube.data.spd.model.Conditions;
|
||||
import org.gcube.data.spd.model.Coordinate;
|
||||
import org.gcube.data.spd.model.PluginDescription;
|
||||
import org.gcube.data.spd.model.PropertySupport;
|
||||
import org.gcube.data.spd.model.util.Capabilities;
|
||||
import org.gcube.data.spd.plugin.fwk.AbstractPlugin;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
||||
public class Utils {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(Utils.class);
|
||||
|
||||
public static PluginDescription getPluginDescription(AbstractPlugin plugin){
|
||||
PluginDescription description = new PluginDescription(plugin.getRepositoryName(), plugin.getDescription(), plugin.getRepositoryInfo());
|
||||
description.setRemote(plugin.isRemote());
|
||||
|
||||
Map<Capabilities, List<Conditions>> capabilityMap = new HashMap<Capabilities, List<Conditions>>();
|
||||
|
||||
|
||||
for (Capabilities capability : plugin.getSupportedCapabilities()){
|
||||
if (capability.isPropertySupport())
|
||||
try{
|
||||
Set<Conditions> props = ((PropertySupport) plugin.getClass().getDeclaredMethod(capability.getMethod()).invoke(plugin)).getSupportedProperties();
|
||||
capabilityMap.put(capability, new ArrayList<Conditions>(props));
|
||||
}catch (Exception e) {
|
||||
logger.warn("cannot retreive properties for capability "+capability,e);
|
||||
}
|
||||
else{
|
||||
List<Conditions> emptyConditions = Collections.emptyList();
|
||||
capabilityMap.put(capability, emptyConditions);
|
||||
}
|
||||
}
|
||||
description.setSupportedCapabilities(capabilityMap);
|
||||
return description;
|
||||
}
|
||||
|
||||
|
||||
public static String getPropsAsString(Condition[] conditions){
|
||||
StringBuilder props =new StringBuilder();
|
||||
Arrays.sort(conditions);
|
||||
for (Condition cond: conditions){
|
||||
switch (cond.getType()) {
|
||||
case COORDINATE:
|
||||
Coordinate coord = (Coordinate)cond.getValue();
|
||||
props.append("lat="+coord.getLatitude());
|
||||
props.append("long="+coord.getLongitude());
|
||||
props.append("op="+cond.getOp().name());
|
||||
break;
|
||||
case DATE:
|
||||
Calendar cal = (Calendar)cond.getValue();
|
||||
props.append("date="+cal.getTimeInMillis());
|
||||
props.append("op="+cond.getOp().name());
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
return props.toString();
|
||||
}
|
||||
|
||||
public static File createErrorFile(Iterator<String> errors) throws Exception{
|
||||
int entries =0;
|
||||
File file = File.createTempFile("errors", "txt");
|
||||
FileWriter writer= new FileWriter(file);
|
||||
while(errors.hasNext()){
|
||||
writer.write(errors.next()+"\n");
|
||||
entries++;
|
||||
}
|
||||
writer.close();
|
||||
if (entries==0){
|
||||
file.delete();
|
||||
return null;
|
||||
}else return file;
|
||||
}
|
||||
}
|
@ -0,0 +1,13 @@
|
||||
package org.gcube.data.spd.utils;
|
||||
|
||||
public class VOID {
|
||||
|
||||
private static VOID singleton = new VOID();
|
||||
|
||||
public static VOID instance(){
|
||||
return singleton;
|
||||
}
|
||||
|
||||
private VOID(){}
|
||||
|
||||
}
|
@ -0,0 +1,60 @@
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<eml:eml xmlns:eml="eml://ecoinformatics.org/eml-2.1.1"
|
||||
xmlns:md="eml://ecoinformatics.org/methods-2.1.1"
|
||||
xmlns:proj="eml://ecoinformatics.org/project-2.1.1"
|
||||
xmlns:d="eml://ecoinformatics.org/dataset-2.1.1"
|
||||
xmlns:res="eml://ecoinformatics.org/resource-2.1.1"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:dc="http://purl.org/dc/terms/"
|
||||
packageId="e71fda1c-dcb9-4eae-81a9-183114978e44/eml-1.xml" system="GBIF-IPT" scope="system">
|
||||
<dataset>
|
||||
<title>The gCube System - Species Products Discovery Service</title>
|
||||
<creator>
|
||||
<individualName>
|
||||
<givenName>Valentina</givenName>
|
||||
<surName>Marioli</surName>
|
||||
</individualName>
|
||||
<organizationName>CNR Pisa, Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo"</organizationName>
|
||||
<positionName></positionName>
|
||||
<address>
|
||||
<city>Pisa</city>
|
||||
<administrativeArea></administrativeArea>
|
||||
<postalCode></postalCode>
|
||||
<country>Italy</country>
|
||||
</address>
|
||||
<phone></phone>
|
||||
<electronicMailAddress>valentina.marioli@isti.cnr.it</electronicMailAddress>
|
||||
<onlineUrl>https://gcube.wiki.gcube-system.org/gcube/index.php/Biodiversity_Access</onlineUrl>
|
||||
</creator>
|
||||
<pubDate></pubDate>
|
||||
<language>en</language>
|
||||
<abstract>
|
||||
<para>This work has been partially supported by the following European projects: DILIGENT (FP6-2003-IST-2),
|
||||
D4Science (FP7-INFRA-2007-1.2.2), D4Science-II (FP7-INFRA-2008-1.2.2),
|
||||
iMarine (FP7-INFRASTRUCTURES-2011-2), and EUBrazilOpenBio (FP7-ICT-2011-EU-Brazil).</para>
|
||||
</abstract>
|
||||
<keywordSet>
|
||||
<keyword>gCube</keyword>
|
||||
<keyword>Species Discovery</keyword>
|
||||
</keywordSet>
|
||||
<intellectualRights>
|
||||
<para>The gCube/gCore software is licensed as Free Open Source software conveying to the EUPL (http://ec.europa.eu/idabc/eupl).
|
||||
The software and documentation is provided by its authors/distributors "as is" and no expressed or
|
||||
implied warranty is given for its use, quality or fitness for a particular case.</para>
|
||||
</intellectualRights>
|
||||
<contact>
|
||||
<individualName>
|
||||
<givenName>Valentina</givenName>
|
||||
<surName>Marioli</surName>
|
||||
</individualName>
|
||||
<electronicMailAddress>valentina.marioli@isti.cnr.it</electronicMailAddress>
|
||||
</contact>
|
||||
<methods>
|
||||
</methods>
|
||||
<project>
|
||||
<title>Species Products Discovery Service</title>
|
||||
</project>
|
||||
</dataset>
|
||||
<additionalMetadata>
|
||||
</additionalMetadata>
|
||||
</eml:eml>
|
@ -0,0 +1,42 @@
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<archive xmlns="http://rs.tdwg.org/dwc/text/" metadata="eml.xml">
|
||||
<core encoding="UTF-8" linesTerminatedBy="\n" fieldsTerminatedBy="\t" fieldsEnclosedBy=""
|
||||
ignoreHeaderLines="1" rowType="http://rs.tdwg.org/dwc/terms/Taxon">
|
||||
<files>
|
||||
<location>taxa.txt</location>
|
||||
</files>
|
||||
<id index="0"/>
|
||||
<field index="0" term="http://rs.tdwg.org/dwc/terms/taxonID"/>
|
||||
<field index="1" term="http://rs.tdwg.org/dwc/terms/acceptedNameUsageID"/>
|
||||
<field index="2" term="http://rs.tdwg.org/dwc/terms/parentNameUsageID"/>
|
||||
<field index="3" term="http://rs.tdwg.org/dwc/terms/scientificName"/>
|
||||
<field index="4" term="http://rs.tdwg.org/dwc/terms/scientificNameAuthorship"/>
|
||||
<field index="5" term="http://rs.tdwg.org/dwc/terms/nameAccordingTo"/>
|
||||
<field index="6" term="http://rs.tdwg.org/dwc/terms/kingdom"/>
|
||||
<field index="7" term="http://rs.tdwg.org/dwc/terms/phylum"/>
|
||||
<field index="8" term="http://rs.tdwg.org/dwc/terms/class"/>
|
||||
<field index="9" term="http://rs.tdwg.org/dwc/terms/order"/>
|
||||
<field index="10" term="http://rs.tdwg.org/dwc/terms/family"/>
|
||||
<field index="11" term="http://rs.tdwg.org/dwc/terms/genus"/>
|
||||
<field index="12" term="http://rs.tdwg.org/dwc/terms/subgenus"/>
|
||||
<field index="13" term="http://rs.tdwg.org/dwc/terms/specificEpithet"/>
|
||||
<field index="14" term="http://rs.tdwg.org/dwc/terms/infraspecificEpithet"/>
|
||||
<field index="15" term="http://rs.tdwg.org/dwc/terms/verbatimTaxonRank"/>
|
||||
<field index="16" term="http://rs.tdwg.org/dwc/terms/taxonRank"/>
|
||||
<field index="17" term="http://rs.tdwg.org/dwc/terms/taxonomicStatus"/>
|
||||
<field index="18" term="http://purl.org/dc/terms/modified"/>
|
||||
<field index="19" term="http://purl.org/dc/terms/bibliographicCitation"/>
|
||||
<field index="20" term="http://rs.tdwg.org/dwc/terms/taxonRemarks"/>
|
||||
<field index="21" term="http://rs.tdwg.org/dwc/terms/scientificNameID"/>
|
||||
</core>
|
||||
<extension encoding="UTF-8" linesTerminatedBy="\n" fieldsTerminatedBy="\t" fieldsEnclosedBy=""
|
||||
ignoreHeaderLines="1" rowType="http://rs.gbif.org/terms/1.0/VernacularName">
|
||||
<files>
|
||||
<location>VernacularName.txt</location>
|
||||
</files>
|
||||
<coreid index="0"/>
|
||||
<field index="1" term="http://rs.tdwg.org/dwc/terms/vernacularName"/>
|
||||
<field index="2" term="http://purl.org/dc/terms/language"/>
|
||||
<field index="3" term="http://rs.tdwg.org/dwc/terms/locality"/>
|
||||
</extension>
|
||||
</archive>
|
@ -0,0 +1,39 @@
|
||||
package org.gcube.data.spd;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.data.spd.model.PointInfo;
|
||||
import org.gcube.data.spd.model.service.types.MetadataDetails;
|
||||
import org.gcube.data.spd.utils.MapUtils;
|
||||
|
||||
public class MapTest {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
|
||||
ScopeProvider.instance.set("/gcube/devsec");
|
||||
|
||||
// DataBaseDescription db=new DataBaseDescription(
|
||||
// "jdbc:postgresql://geoserver-test.d4science-ii.research-infrastructures.eu:5432/timeseriesgisdb","postgres", "d4science2");
|
||||
//
|
||||
// LayerCreationOptions layerOptions=new LayerCreationOptions
|
||||
// ("timeseriesws", "point","timeseriesws" ,"Datasets", false, true);
|
||||
|
||||
MetadataDetails details=new MetadataDetails(
|
||||
"This layers means nothing to me", "Mind your business", "Just a layer", "Qualcuno", "insert credits");
|
||||
|
||||
|
||||
ArrayList<PointInfo> points=new ArrayList<>();
|
||||
System.out.println("Creating points...");
|
||||
for(int x=-180;x<180;x++)
|
||||
for(int y=-90;y<90;y++)
|
||||
points.add(new PointInfo(x, y));
|
||||
|
||||
System.out.println("Launching..");
|
||||
// System.out.println("Result : "+MapUtils.publishLayerByCoords(db, layerOptions, details, points));
|
||||
System.out.println("Result : "+MapUtils.publishLayerByCoords(details, points,false,true));
|
||||
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue