git-svn-id: http://svn.research-infrastructures.eu/public/d4science/gcube/branches/data-access/spd-client-library/4.0@142359 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
d23b6b7080
commit
3a090bb124
|
@ -1 +0,0 @@
|
||||||
Used as a Library in the gCube Framework
|
|
|
@ -1,6 +1 @@
|
||||||
gCube System - License
|
${gcube.license}
|
||||||
------------------------------------------------------------
|
|
||||||
|
|
||||||
The gCube/gCore software is licensed as Free Open Source software conveying to the EUPL (http://ec.europa.eu/idabc/eupl).
|
|
||||||
The software and documentation is provided by its authors/distributors "as is" and no expressed or
|
|
||||||
implied warranty is given for its use, quality or fitness for a particular case.
|
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
Lucio lelii (lucio.lelii@isti.cnr.it), CNR Pisa,
|
|
||||||
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo".
|
|
|
@ -1,47 +1,61 @@
|
||||||
The gCube System - spd client library
|
The gCube System - ${name}
|
||||||
|
--------------------------------------------------
|
||||||
|
|
||||||
------------------------------------------------------------
|
${description}
|
||||||
|
|
||||||
This work has been partially supported by the following European projects: DILIGENT (FP6-2003-IST-2),
|
|
||||||
D4Science (FP7-INFRA-2007-1.2.2), D4Science-II (FP7-INFRA-2008-1.2.2), iMarine (FP7-INFRASTRUCTURES-2011-2),
|
${gcube.description}
|
||||||
and EUBrazilOpenBio (FP7-ICT-2011-EU-Brazil).
|
|
||||||
|
${gcube.funding}
|
||||||
|
|
||||||
|
|
||||||
|
Version
|
||||||
|
--------------------------------------------------
|
||||||
|
|
||||||
|
${version} (${buildDate})
|
||||||
|
|
||||||
|
Please see the file named "changelog.xml" in this directory for the release notes.
|
||||||
|
|
||||||
|
|
||||||
Authors
|
Authors
|
||||||
-------
|
--------------------------------------------------
|
||||||
|
|
||||||
* Lucio Lelii (lucio.lelii@isti.cnr.it), CNR Pisa,
|
* Lucio Lelii (lucio.lelii-AT-isti.cnr.it), CNR, Italy
|
||||||
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo".
|
|
||||||
|
MAINTAINERS
|
||||||
|
--------------------------------------------------
|
||||||
|
|
||||||
|
* Lucio Lelii (lucio.lelii-AT-isti.cnr.it), CNR, Italy
|
||||||
|
|
||||||
|
Download information
|
||||||
|
--------------------------------------------------
|
||||||
|
|
||||||
|
Source code is available from SVN:
|
||||||
|
${scm.url}
|
||||||
|
|
||||||
|
Binaries can be downloaded from the gCube website:
|
||||||
|
${gcube.website}
|
||||||
|
|
||||||
|
|
||||||
Version and Release Date
|
Installation
|
||||||
------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
v. 1.0.0 (04-05-2012)
|
|
||||||
* First release
|
|
||||||
|
|
||||||
|
|
||||||
Description
|
|
||||||
-----------
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Download information
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
|
|
||||||
|
Installation documentation is available on-line in the gCube Wiki:
|
||||||
|
${gcube.wikiRoot}
|
||||||
|
|
||||||
Documentation
|
Documentation
|
||||||
-------------
|
--------------------------------------------------
|
||||||
|
|
||||||
Documentation is available on-line from the Projects Documentation Wiki:
|
Documentation is available on-line in the gCube Wiki:
|
||||||
https://gcube.wiki.gcube-system.org/gcube/index.php/Biodiversity_Access
|
${gcube.wikiRoot}
|
||||||
|
Support
|
||||||
|
--------------------------------------------------
|
||||||
|
|
||||||
|
Bugs and support requests can be reported in the gCube issue tracking tool:
|
||||||
|
${gcube.issueTracking}
|
||||||
|
|
||||||
|
|
||||||
Licensing
|
Licensing
|
||||||
---------
|
--------------------------------------------------
|
||||||
|
|
||||||
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.
|
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -15,8 +15,7 @@
|
||||||
<includes>
|
<includes>
|
||||||
<include>README</include>
|
<include>README</include>
|
||||||
<include>LICENSE</include>
|
<include>LICENSE</include>
|
||||||
<include>INSTALL</include>
|
<include>profile.xml</include>
|
||||||
<include>MAINTAINERS</include>
|
|
||||||
<include>changelog.xml</include>
|
<include>changelog.xml</include>
|
||||||
</includes>
|
</includes>
|
||||||
<fileMode>755</fileMode>
|
<fileMode>755</fileMode>
|
||||||
|
@ -24,19 +23,9 @@
|
||||||
</fileSet>
|
</fileSet>
|
||||||
</fileSets>
|
</fileSets>
|
||||||
<files>
|
<files>
|
||||||
<file>
|
|
||||||
<source>${distroDirectory}/profile.xml</source>
|
|
||||||
<outputDirectory>/</outputDirectory>
|
|
||||||
<filtered>true</filtered>
|
|
||||||
</file>
|
|
||||||
<file>
|
<file>
|
||||||
<source>target/${build.finalName}.jar</source>
|
<source>target/${build.finalName}.jar</source>
|
||||||
<outputDirectory>/${artifactId}</outputDirectory>
|
<outputDirectory>/${artifactId}</outputDirectory>
|
||||||
</file>
|
</file>
|
||||||
<file>
|
|
||||||
<source>${distroDirectory}/svnpath.txt</source>
|
|
||||||
<outputDirectory>/${artifactId}</outputDirectory>
|
|
||||||
<filtered>true</filtered>
|
|
||||||
</file>
|
|
||||||
</files>
|
</files>
|
||||||
</assembly>
|
</assembly>
|
|
@ -12,10 +12,13 @@ import org.gcube.common.clients.stubs.jaxws.JAXWSUtils.Empty;
|
||||||
import org.gcube.data.spd.model.service.exceptions.InvalidIdentifierException;
|
import org.gcube.data.spd.model.service.exceptions.InvalidIdentifierException;
|
||||||
import org.gcube.data.spd.model.service.types.CompleteJobStatus;
|
import org.gcube.data.spd.model.service.types.CompleteJobStatus;
|
||||||
import org.gcube.data.spd.model.service.types.JobType;
|
import org.gcube.data.spd.model.service.types.JobType;
|
||||||
|
import org.gcube.data.spd.model.service.types.MetadataDetails;
|
||||||
import org.gcube.data.spd.model.service.types.SubmitJob;
|
import org.gcube.data.spd.model.service.types.SubmitJob;
|
||||||
import org.gcube.data.spd.model.util.SerializableList;
|
import org.gcube.data.spd.model.util.SerializableList;
|
||||||
import org.gcube.data.streams.Stream;
|
import org.gcube.data.streams.Stream;
|
||||||
|
|
||||||
|
import com.thoughtworks.xstream.XStream;
|
||||||
|
|
||||||
public class DefaultExecutor implements ExecutorClient{
|
public class DefaultExecutor implements ExecutorClient{
|
||||||
|
|
||||||
private final ProxyDelegate<WebTarget> delegate;
|
private final ProxyDelegate<WebTarget> delegate;
|
||||||
|
@ -161,6 +164,17 @@ public class DefaultExecutor implements ExecutorClient{
|
||||||
return jobId;
|
return jobId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String createLayer(Stream<String> keys, MetadataDetails metadata) throws Exception {
|
||||||
|
String jobId = delegate.make(getCallForJobs(new XStream().toXML(metadata), JobType.LayerCreator));
|
||||||
|
try{
|
||||||
|
sendInput(jobId, keys);
|
||||||
|
}catch(Exception e){
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
return jobId;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String createCSVforOM(Stream<String> ids) throws Exception {
|
public String createCSVforOM(Stream<String> ids) throws Exception {
|
||||||
String jobId = delegate.make(getCallForJobs(null, JobType.CSVForOM));
|
String jobId = delegate.make(getCallForJobs(null, JobType.CSVForOM));
|
||||||
|
|
|
@ -2,6 +2,7 @@ package org.gcube.data.spd.client.proxies;
|
||||||
|
|
||||||
import org.gcube.data.spd.model.service.exceptions.InvalidIdentifierException;
|
import org.gcube.data.spd.model.service.exceptions.InvalidIdentifierException;
|
||||||
import org.gcube.data.spd.model.service.types.CompleteJobStatus;
|
import org.gcube.data.spd.model.service.types.CompleteJobStatus;
|
||||||
|
import org.gcube.data.spd.model.service.types.MetadataDetails;
|
||||||
import org.gcube.data.streams.Stream;
|
import org.gcube.data.streams.Stream;
|
||||||
|
|
||||||
public interface ExecutorClient {
|
public interface ExecutorClient {
|
||||||
|
@ -24,4 +25,7 @@ public interface ExecutorClient {
|
||||||
|
|
||||||
public String createDarwincoreFromOccurrenceKeys(final Stream<String> ids) throws Exception;
|
public String createDarwincoreFromOccurrenceKeys(final Stream<String> ids) throws Exception;
|
||||||
|
|
||||||
|
String createLayer(Stream<String> keys, MetadataDetails metadata)
|
||||||
|
throws Exception;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
package org.gcube.data.spd.client.manager;
|
package org.gcube.data.spd.client.manager;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
|
import org.gcube.common.authorization.library.provider.SecurityTokenProvider;
|
||||||
|
@ -13,6 +14,7 @@ import org.gcube.data.spd.model.products.Product.ProductType;
|
||||||
import org.gcube.data.spd.model.products.ResultItem;
|
import org.gcube.data.spd.model.products.ResultItem;
|
||||||
import org.gcube.data.spd.model.service.types.CompleteJobStatus;
|
import org.gcube.data.spd.model.service.types.CompleteJobStatus;
|
||||||
import org.gcube.data.spd.model.service.types.JobStatus;
|
import org.gcube.data.spd.model.service.types.JobStatus;
|
||||||
|
import org.gcube.data.spd.model.service.types.MetadataDetails;
|
||||||
import org.gcube.data.spd.model.service.types.NodeStatus;
|
import org.gcube.data.spd.model.service.types.NodeStatus;
|
||||||
import org.gcube.data.streams.Stream;
|
import org.gcube.data.streams.Stream;
|
||||||
import org.gcube.data.streams.dsl.Streams;
|
import org.gcube.data.streams.dsl.Streams;
|
||||||
|
@ -21,6 +23,8 @@ import org.junit.Test;
|
||||||
|
|
||||||
public class DWCATest {
|
public class DWCATest {
|
||||||
|
|
||||||
|
private static List<String> occurrenceKeys = Arrays.asList("GBIF:84028840-f762-11e1-a439-00145eb45e9a^^Marine and Coastal Management - Demersal Surveys (years 1991-1995) (AfrOBIS)^^Marine and Coastal Management - Demersal Surveys^^0e0fc0f0-828e-11d8-b7ed-b8a03c50a862^^Ocean Biogeographic Information System||5208593" , "GBIF:ed820bdb-4345-4143-a280-4fbffaacd31d^^The Pisces Collection at the Staatssammlung für Anthropologie und Paläoanatomie München^^Staatliche Naturwissenschaftliche Sammlungen Bayerns: The Pisces Collection at the Staatssammlung für Anthropologie und Paläoanatomie München^^0674aea0-a7e1-11d8-9534-b8a03c50a862^^Staatliche Naturwissenschaftliche Sammlungen Bayerns||5712279", "GBIF:8609f1a0-f762-11e1-a439-00145eb45e9a^^Marine and Coastal Management - Linefish Dataset (Second Semester of 1992) (AfrOBIS)^^Marine and Coastal Management - Linefish Dataset^^0e0fc0f0-828e-11d8-b7ed-b8a03c50a862^^Ocean Biogeographic Information System||5208602" );
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void OccurrenceJobFromSardaSarda() throws Exception{
|
public void OccurrenceJobFromSardaSarda() throws Exception{
|
||||||
|
@ -31,14 +35,14 @@ public class DWCATest {
|
||||||
|
|
||||||
//"CatalogueOfLife:13445516" chordata
|
//"CatalogueOfLife:13445516" chordata
|
||||||
//"CatalogueOfLife:13446218" cervidae
|
//"CatalogueOfLife:13446218" cervidae
|
||||||
String jobId = createOccurrence(creator);
|
String jobId = createLayerFromSardasarda(creator);
|
||||||
//String jobId = createOccurrence(creator);
|
//String jobId = createOccurrence(creator);
|
||||||
CompleteJobStatus response= null;
|
CompleteJobStatus response= null;
|
||||||
do{
|
do{
|
||||||
Thread.sleep(10000);
|
Thread.sleep(10000);
|
||||||
response= creator.getStatus(jobId);
|
response= creator.getStatus(jobId);
|
||||||
System.out.println("thes status is "+response.getStatus());
|
System.out.println("thes status is "+response.getStatus());
|
||||||
System.out.println("the number of element read are "+response.getCompletedEntries());
|
System.out.println("the number of elements read are "+response.getCompletedEntries());
|
||||||
if(response.getSubNodes()!=null)
|
if(response.getSubNodes()!=null)
|
||||||
for (NodeStatus status : response.getSubNodes())
|
for (NodeStatus status : response.getSubNodes())
|
||||||
System.out.println(status.getScientificName()+"--"+status.getStatus());
|
System.out.println(status.getScientificName()+"--"+status.getStatus());
|
||||||
|
@ -57,7 +61,7 @@ public class DWCATest {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String createOccurrence(ExecutorClient creator) throws Exception {
|
private static String createOccurrence(ExecutorClient creator) throws Exception {
|
||||||
Stream<String> keyStream =Streams.convert(new String[]{"Obis:522634-1695----", "Obis:465686-1695----", "Obis:429081-721----", "Obis:822676-1691----", "Obis:742361-119----", "Obis:447539-1695----", "GBIF:sarda||130||11956||57744173||","GBIF:sarda||82||400||50917042||","GBIF:sarda||427||14113||60499431||"});
|
Stream<String> keyStream =Streams.convert(occurrenceKeys);
|
||||||
return creator.createDarwincoreFromOccurrenceKeys(keyStream);
|
return creator.createDarwincoreFromOccurrenceKeys(keyStream);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -86,5 +90,33 @@ public class DWCATest {
|
||||||
return creator.createCSV(keyStream);
|
return creator.createCSV(keyStream);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static String createLayerFromSardasarda(ExecutorClient creator) throws Exception {
|
||||||
|
/*
|
||||||
|
ManagerClient manager = AbstractPlugin.manager().build();
|
||||||
|
|
||||||
|
Stream<ResultItem> rsStream = manager.search("SEARCH BY SN 'sarda sarda'");
|
||||||
|
|
||||||
|
List<String> keylist = new ArrayList<String>();
|
||||||
|
int i =0;
|
||||||
|
while (rsStream.hasNext()){
|
||||||
|
ResultItem rs = rsStream.next();
|
||||||
|
for (Product product: rs.getProducts())
|
||||||
|
if (product.getCount()>0 && product.getType()==ProductType.Occurrence) keylist.add(product.getKey());
|
||||||
|
if (i++>=5)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
rsStream.close();
|
||||||
|
|
||||||
|
System.out.println("keyList is "+keylist.size());
|
||||||
|
|
||||||
|
System.in.read();
|
||||||
|
*/
|
||||||
|
//Stream<String> keyStream =Streams.convert(keylist);
|
||||||
|
|
||||||
|
Stream<String> keyStream =Streams.convert(occurrenceKeys);
|
||||||
|
MetadataDetails details=new MetadataDetails(
|
||||||
|
"This layers means nothing to me", "Mind your business", "Just a layer", "Qualcuno", "insert credits");
|
||||||
|
return creator.createLayer(keyStream, details);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue