update spd algorithms
git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngineExternalAlgorithms@83149 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
171d6e53d4
commit
7feded234c
15
pom.xml
15
pom.xml
|
@ -99,14 +99,19 @@
|
|||
<artifactId>common-clients</artifactId>
|
||||
<version>[2.0.0-SNAPSHOT,3.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.core</groupId>
|
||||
<artifactId>common-fw-clients</artifactId>
|
||||
<version>1.0.0-SNAPSHOT</version>
|
||||
<scope>runtime</scope>
|
||||
<artifactId>common-gcore-clients</artifactId>
|
||||
<version>[1.0.0-SNAPSHOT,3.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<!-- <dependency> -->
|
||||
<!-- <groupId>org.gcube.core</groupId> -->
|
||||
<!-- <artifactId>common-fw-clients</artifactId> -->
|
||||
<!-- <version>[1.0.0-SNAPSHOT,3.0.0-SNAPSHOT)</version> -->
|
||||
<!-- <scope>runtime</scope> -->
|
||||
<!-- </dependency> -->
|
||||
<dependency>
|
||||
<groupId>org.gcube.resources.discovery</groupId>
|
||||
<artifactId>ic-client</artifactId>
|
||||
|
|
|
@ -0,0 +1,469 @@
|
|||
package org.gcube.dataanalysis.JobSMspd;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.text.DateFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.Hashtable;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
import org.gcube.data.spd.model.CommonName;
|
||||
import org.gcube.data.spd.model.products.TaxonomyItem;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
public class MapDwCA {
|
||||
|
||||
static Logger logger = LoggerFactory.getLogger(MapDwCA.class);
|
||||
|
||||
private BufferedWriter vernacularFile;
|
||||
private File tempFolder;
|
||||
private List<File> fileList = new ArrayList<File>();
|
||||
private String archiveZip = "archive-tax.zip";
|
||||
private String directory;
|
||||
|
||||
public MapDwCA(String directory) {
|
||||
super();
|
||||
this.directory=directory;
|
||||
}
|
||||
|
||||
|
||||
public synchronized File createDwCA(Iterator<TaxonomyItem> taxa) throws Exception{
|
||||
createMetaXml();
|
||||
createMetadata();
|
||||
createHeaders();
|
||||
createTaxaTxt(taxa);
|
||||
getAllFiles(tempFolder);
|
||||
return writeZipFile(tempFolder);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create file meta.xml
|
||||
*/
|
||||
private void createMetaXml(){
|
||||
|
||||
try {
|
||||
BufferedWriter bw = null;
|
||||
BufferedReader br = null;
|
||||
//tempFolder = File.createTempFile("DwCA-folder", "" );
|
||||
tempFolder = new File(directory+"DwCA-folder");
|
||||
//tempFolder.delete();
|
||||
tempFolder.mkdir();
|
||||
File output = new File(tempFolder + "/meta.xml") ;
|
||||
|
||||
bw = new BufferedWriter(new FileWriter(output));
|
||||
br = new BufferedReader(new InputStreamReader(MapDwCA.class.getResourceAsStream("/org/gcube/data/spd/dwca/meta.xml")));
|
||||
String line;
|
||||
|
||||
while ((line = br.readLine()) != null) {
|
||||
bw.write(line);
|
||||
bw.write('\n');
|
||||
|
||||
}
|
||||
bw.close();
|
||||
br.close();
|
||||
} catch (IOException e) {
|
||||
logger.error("IO Error", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Create headers in taxa.txt and vernacular.txt
|
||||
*/
|
||||
private void createHeaders(){
|
||||
|
||||
try {
|
||||
|
||||
BufferedWriter file = new BufferedWriter(new FileWriter(tempFolder + "/" + "taxa.txt", true));
|
||||
vernacularFile = new BufferedWriter(new FileWriter(tempFolder + "/" + "VernacularName.txt", true));
|
||||
|
||||
//header
|
||||
file.write("taxonID\t");
|
||||
file.write("acceptedNameUsageID\t");
|
||||
file.write("parentNameUsageID\t");
|
||||
file.write("scientificName\t");
|
||||
file.write("scientificNameAuthorship\t");
|
||||
file.write("nameAccordingTo\t");
|
||||
file.write("kingdom\t");
|
||||
file.write("phylum\t");
|
||||
file.write("class\t");
|
||||
file.write("order\t");
|
||||
file.write("family\t");
|
||||
file.write("genus\t");
|
||||
file.write("subgenus\t");
|
||||
file.write("specificEpithet\t");
|
||||
file.write("infraspecificEpithet\t");
|
||||
file.write("verbatimTaxonRank\t");
|
||||
file.write("taxonRank\t");
|
||||
file.write("taxonomicStatus\t");
|
||||
file.write("modified\t");
|
||||
file.write("bibliographicCitation\t");
|
||||
file.write("taxonRemarks\t");
|
||||
file.write("scientificNameID\n");
|
||||
file.close();
|
||||
|
||||
|
||||
//header VernacularName.txt
|
||||
vernacularFile.write("taxonID\t");
|
||||
vernacularFile.write("vernacularName\t");
|
||||
vernacularFile.write("language\t");
|
||||
vernacularFile.write("locality\n");
|
||||
vernacularFile.close();
|
||||
|
||||
} catch (IOException e) {
|
||||
logger.error("IO Error", e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Write taxa.txt
|
||||
*/
|
||||
public void createTaxaTxt(Iterator<TaxonomyItem> taxaReader){
|
||||
|
||||
while (taxaReader.hasNext()) {
|
||||
TaxonomyItem item = taxaReader.next();
|
||||
//logger.trace(item.toString());
|
||||
writeLine(item);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
private void internalWriter(TaxonomyItem taxonomyItem, BufferedWriter file ) throws IOException{
|
||||
String[] name = taxonomyItem.getScientificName().split(" ");
|
||||
|
||||
// Get elemen
|
||||
TaxonomyItem tax = taxonomyItem.getParent();
|
||||
|
||||
Hashtable<String, String> hashTaxa = new Hashtable<String,String>();
|
||||
//create hashtable with taxonomy keys
|
||||
if (tax !=null)
|
||||
getTax(tax, hashTaxa);
|
||||
|
||||
|
||||
//taxonID
|
||||
file.write(taxonomyItem.getId());
|
||||
file.write("\t");
|
||||
|
||||
//acceptedNameUsageID
|
||||
if (taxonomyItem.getStatus()==null){
|
||||
logger.trace("the status is null for "+taxonomyItem.getId());
|
||||
}if (taxonomyItem.getStatus().getRefId() != null){
|
||||
String id = taxonomyItem.getStatus().getRefId();
|
||||
file.write(id);
|
||||
}
|
||||
|
||||
file.write("\t");
|
||||
|
||||
//parentNameUsageID
|
||||
if (tax !=null)
|
||||
file.write(tax.getId());
|
||||
file.write("\t");
|
||||
|
||||
//scientificName
|
||||
/*if (taxonomyItem.getCitation() != null)
|
||||
file.write(taxonomyItem.getScientificName() + " " + taxonomyItem.getCitation());
|
||||
else*/
|
||||
file.write(taxonomyItem.getScientificName());
|
||||
|
||||
file.write("\t");
|
||||
|
||||
//scientificNameAuthorship
|
||||
if (taxonomyItem.getAuthor()!= null)
|
||||
file.write(taxonomyItem.getAuthor());
|
||||
file.write("\t");
|
||||
|
||||
if (taxonomyItem.getCitation()!= null)
|
||||
file.write(taxonomyItem.getCitation());
|
||||
file.write("\t");
|
||||
|
||||
//kingdom
|
||||
String kingdom = (String)hashTaxa.get("kingdom");
|
||||
if (kingdom != null)
|
||||
file.write(kingdom);
|
||||
file.write("\t");
|
||||
|
||||
//phylum
|
||||
String phylum = (String) hashTaxa.get("phylum");
|
||||
if (phylum != null)
|
||||
file.write(phylum);
|
||||
file.write("\t");
|
||||
|
||||
//class
|
||||
String claz = (String)hashTaxa.get("class");
|
||||
if (claz != null)
|
||||
file.write(claz);
|
||||
file.write("\t");
|
||||
|
||||
//order
|
||||
String order = (String)hashTaxa.get("order");
|
||||
if (order != null)
|
||||
file.write(order);
|
||||
file.write("\t");
|
||||
|
||||
//family
|
||||
String family = (String)hashTaxa.get("family");
|
||||
if (family != null)
|
||||
file.write(family);
|
||||
file.write("\t");
|
||||
|
||||
//genus
|
||||
String genus = (String)hashTaxa.get("genus");
|
||||
if (genus != null)
|
||||
file.write(genus);
|
||||
file.write("\t");
|
||||
|
||||
//subgenus
|
||||
String subgenus = (String)hashTaxa.get("subgenus");
|
||||
if (subgenus != null)
|
||||
file.write(subgenus);
|
||||
file.write("\t");
|
||||
|
||||
//specificEpithet
|
||||
if (name.length>1)
|
||||
file.write(name[1]);
|
||||
file.write("\t");
|
||||
|
||||
//infraspecificEpithet
|
||||
if (name.length>2){
|
||||
file.write(name[name.length-1]);
|
||||
}
|
||||
file.write("\t");
|
||||
|
||||
//verbatimTaxonRank
|
||||
if (name.length>2){
|
||||
file.write(name[name.length-2]);
|
||||
}
|
||||
file.write("\t");
|
||||
|
||||
//taxonRank
|
||||
if (taxonomyItem.getRank()!= null)
|
||||
file.write(taxonomyItem.getRank().toLowerCase());
|
||||
file.write("\t");
|
||||
|
||||
//taxonomicStatus (accepted, synonym, unkonwn)
|
||||
file.write(taxonomyItem.getStatus().getStatus().toString().toLowerCase());
|
||||
file.write("\t");
|
||||
|
||||
//modified
|
||||
if (taxonomyItem.getModified() !=null){
|
||||
DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
|
||||
Date date = taxonomyItem.getModified().getTime();
|
||||
String s = sdf.format(date);
|
||||
file.write(s);
|
||||
}
|
||||
file.write("\t");
|
||||
|
||||
//source
|
||||
if (taxonomyItem.getCredits() != null)
|
||||
file.write(taxonomyItem.getCredits());
|
||||
file.write("\t");
|
||||
|
||||
//taxonRemarks
|
||||
if (taxonomyItem.getStatus().getStatusAsString() != null)
|
||||
file.write(taxonomyItem.getStatus().getStatusAsString());
|
||||
|
||||
file.write("\t");
|
||||
|
||||
if (taxonomyItem.getLsid() != null)
|
||||
file.write(taxonomyItem.getLsid());
|
||||
file.write("\n");
|
||||
|
||||
|
||||
|
||||
//write varnacular names
|
||||
if (taxonomyItem.getCommonNames()!= null){
|
||||
createVernacularTxt(taxonomyItem.getId(), taxonomyItem.getCommonNames());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert line in taxa.txt
|
||||
*/
|
||||
private void writeLine(TaxonomyItem taxonomyItem){
|
||||
|
||||
BufferedWriter bufferedWriter =null;
|
||||
try {
|
||||
bufferedWriter = new BufferedWriter(new FileWriter(tempFolder + "/" + "taxa.txt", true));
|
||||
internalWriter(taxonomyItem, bufferedWriter);
|
||||
|
||||
|
||||
|
||||
} catch (IOException e) {
|
||||
logger.error("IO Error", e);
|
||||
}finally{
|
||||
try {
|
||||
if (bufferedWriter!=null)
|
||||
bufferedWriter.close();
|
||||
} catch (IOException e) {
|
||||
logger.error("error closing bufferedWriter",e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Write VernacularName.txt
|
||||
*/
|
||||
private void createVernacularTxt(String id, List<CommonName> list){
|
||||
|
||||
try {
|
||||
vernacularFile = new BufferedWriter(new FileWriter(tempFolder + "/" + "VernacularName.txt", true));
|
||||
for (CommonName vernacular : list) {
|
||||
// logger.trace("Vernacular name: " + vernacular.getName());
|
||||
|
||||
//taxonID
|
||||
vernacularFile.write(id);
|
||||
vernacularFile.write("\t");
|
||||
|
||||
//vernacularName
|
||||
vernacularFile.write(vernacular.getName());
|
||||
vernacularFile.write("\t");
|
||||
|
||||
//language
|
||||
if (vernacular.getLanguage()!= null)
|
||||
vernacularFile.write(vernacular.getLanguage());
|
||||
vernacularFile.write("\t");
|
||||
|
||||
//locality
|
||||
if (vernacular.getLocality()!= null)
|
||||
vernacularFile.write(vernacular.getLocality());
|
||||
|
||||
vernacularFile.write("\n");
|
||||
|
||||
|
||||
}
|
||||
vernacularFile.close();
|
||||
} catch (IOException e) {
|
||||
logger.error("IO Error", e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Create hashtable with taxonomy keys
|
||||
*/
|
||||
private void getTax(TaxonomyItem tax, Hashtable<String, String> taxa){
|
||||
taxa.put((tax.getRank()).toLowerCase(), tax.getScientificName());
|
||||
//writeLine(tax);
|
||||
// logger.trace("insert parent " + tax.getId() + " " + tax.getScientificName());
|
||||
if (tax.getParent()!=null)
|
||||
getTax(tax.getParent(), taxa);
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* List files in directory
|
||||
*/
|
||||
private void getAllFiles(File dir) {
|
||||
try {
|
||||
File[] files = dir.listFiles();
|
||||
for (File file : files) {
|
||||
fileList.add(file);
|
||||
if (file.isDirectory()) {
|
||||
logger.trace("directory:" + file.getCanonicalPath());
|
||||
getAllFiles(file);
|
||||
} else {
|
||||
logger.trace(" file:" + file.getCanonicalPath());
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.error("error creating files",e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create zip file
|
||||
*/
|
||||
private File writeZipFile(File directoryToZip) throws Exception {
|
||||
|
||||
File zipFile = new File(directoryToZip + "/" + archiveZip);
|
||||
FileOutputStream fos = new FileOutputStream(zipFile);
|
||||
ZipOutputStream zos = new ZipOutputStream(fos);
|
||||
|
||||
for (File file : fileList) {
|
||||
if (!file.isDirectory()) { // we only zip files, not directories
|
||||
addToZip(directoryToZip, file, zos);
|
||||
}
|
||||
}
|
||||
zos.close();
|
||||
fos.close();
|
||||
return zipFile;
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Add files to zip
|
||||
*/
|
||||
private void addToZip(File directoryToZip, File file, ZipOutputStream zos) throws FileNotFoundException,
|
||||
IOException {
|
||||
|
||||
FileInputStream fis = new FileInputStream(file);
|
||||
|
||||
// we want the zipEntry's path to be a relative path that is relative
|
||||
// to the directory being zipped, so chop off the rest of the path
|
||||
String zipFilePath = file.getCanonicalPath().substring(directoryToZip.getCanonicalPath().length() + 1,
|
||||
file.getCanonicalPath().length());
|
||||
logger.trace("Writing '" + zipFilePath + "' to zip file");
|
||||
ZipEntry zipEntry = new ZipEntry(zipFilePath);
|
||||
zos.putNextEntry(zipEntry);
|
||||
|
||||
byte[] bytes = new byte[1024];
|
||||
int length;
|
||||
while ((length = fis.read(bytes)) >= 0) {
|
||||
zos.write(bytes, 0, length);
|
||||
}
|
||||
|
||||
zos.closeEntry();
|
||||
fis.close();
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Create file em.xml
|
||||
*/
|
||||
public void createMetadata() throws IOException {
|
||||
|
||||
Calendar now = Calendar.getInstance();
|
||||
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");
|
||||
|
||||
File output = new File(tempFolder + "/eml.xml") ;
|
||||
BufferedWriter bw = null;
|
||||
try {
|
||||
bw = new BufferedWriter(new FileWriter(output));
|
||||
} catch (IOException e) {
|
||||
logger.error("IO Error", e);
|
||||
}
|
||||
|
||||
BufferedReader br = new BufferedReader(new InputStreamReader(MapDwCA.class.getResourceAsStream("/org/gcube/data/spd/dwca/eml.xml")));
|
||||
String line;
|
||||
while ((line = br.readLine()) != null) {
|
||||
bw.write(line.replace("<pubDate></pubDate>", "<pubDate>" + format.format(now.getTime()) + "</pubDate>"));
|
||||
bw.write('\n');
|
||||
|
||||
}
|
||||
bw.close();
|
||||
br.close();
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -6,6 +6,8 @@ import java.io.BufferedWriter;
|
|||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.StringWriter;
|
||||
import java.lang.reflect.Field;
|
||||
import java.sql.Connection;
|
||||
import java.sql.ResultSet;
|
||||
|
@ -24,6 +26,9 @@ import org.gcube.data.spd.client.proxies.Manager;
|
|||
import org.gcube.data.spd.model.Conditions;
|
||||
import org.gcube.data.spd.model.PluginDescription;
|
||||
import org.gcube.data.spd.model.util.Capabilities;
|
||||
import org.gcube.dataanalysis.JobSMspd.TaxaProcedure.DataPenum;
|
||||
import org.gcube.dataanalysis.JobSMspd.TaxaProcedure.ExtentionDPEnum;
|
||||
import org.gcube.dataanalysis.JobSMspd.TaxaProcedure.UnfoldDPEnum;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
|
@ -49,34 +54,33 @@ public class OccurencesProcedure extends StandardLocalExternalAlgorithm {
|
|||
static String passwordParameterName = "password";
|
||||
static String urlParameterName = "FishBase";
|
||||
SessionFactory dbconnection = null;
|
||||
public static boolean call=false;
|
||||
// public static boolean call=false;
|
||||
String tablename;
|
||||
String columnnames;
|
||||
List<Object> speciesList;
|
||||
List<Object> speciesList=null;
|
||||
protected String fileName;
|
||||
BufferedWriter out;
|
||||
String outputtablename;
|
||||
String outputErrortablename;
|
||||
String outputtable;
|
||||
HashMap<String, String>dpHash= new HashMap<String, String>();
|
||||
HashMap<String, String>dpUHash= new HashMap<String, String>();
|
||||
HashMap<String, String>dpEHash= new HashMap<String, String>();
|
||||
HashMap<String, String> dpHash = new HashMap<String, String>();
|
||||
HashMap<String, String> dpUHash = new HashMap<String, String>();
|
||||
HashMap<String, String> dpEHash = new HashMap<String, String>();
|
||||
String tableError;
|
||||
private DataPenum dp = new DataPenum();
|
||||
private ExtentionDPEnum dpE = new ExtentionDPEnum();
|
||||
private UnfoldDPEnum dpU = new UnfoldDPEnum();
|
||||
private static DataPenum dp = null;
|
||||
private static ExtentionDPEnum dpE = null;
|
||||
private static UnfoldDPEnum dpU = null;
|
||||
private String dataProvider = "Data Provider :";
|
||||
private String chosendataProvider = new String();
|
||||
private String dataProviderExtention = "Data Provider (Expand Option):";
|
||||
private String chosendataProviderExtention=new String();
|
||||
//private String chosendataProviderUnfold="Data Provider Unfold:";
|
||||
private String chosendataProviderExtention = new String();
|
||||
// private String chosendataProviderUnfold="Data Provider Unfold:";
|
||||
private String dataProviderUnfold = "Data Provider (Unfold Option):";
|
||||
private String chosendataProviderUnfold=new String();
|
||||
|
||||
private String chosendataProviderUnfold = new String();
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "A transducer algorithm that produces a dataset of species occurrences for a set of target species by retrieving these from major data providers including GBIF and OBIS. ";
|
||||
return "An Algorithm that retrieves the occurrences from a data provided based on the given search options";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -96,7 +100,7 @@ public class OccurencesProcedure extends StandardLocalExternalAlgorithm {
|
|||
tableError = getInputParameter("ErrorTable");
|
||||
chosendataProviderUnfold = getInputParameter(dataProviderUnfold);
|
||||
chosendataProviderExtention = getInputParameter(dataProviderExtention);
|
||||
|
||||
|
||||
chosendataProvider = getInputParameter(dataProvider);
|
||||
outputErrortablename = getInputParameter("ErrorTableName");
|
||||
|
||||
|
@ -109,15 +113,13 @@ public class OccurencesProcedure extends StandardLocalExternalAlgorithm {
|
|||
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
protected void process() throws Exception {
|
||||
|
||||
try {
|
||||
String scope = ScopeProvider.instance.get();
|
||||
fulfilParameters();
|
||||
createTables();
|
||||
|
||||
int lenght = (int) (speciesList.size() / 3);
|
||||
ArrayList<String> chunk1 = new ArrayList<String>();
|
||||
ArrayList<String> chunk2 = new ArrayList<String>();
|
||||
|
@ -132,13 +134,15 @@ public class OccurencesProcedure extends StandardLocalExternalAlgorithm {
|
|||
|
||||
chunk3.add((String) speciesList.get(i));
|
||||
}
|
||||
|
||||
ThreadExtractionOccFromSPD t1 = new ThreadExtractionOccFromSPD(
|
||||
chunk1, chosendataProvider,chosendataProviderExtention,chosendataProviderUnfold);
|
||||
chunk1, chosendataProvider, chosendataProviderExtention,
|
||||
chosendataProviderUnfold, scope);
|
||||
ThreadExtractionOccFromSPD t2 = new ThreadExtractionOccFromSPD(
|
||||
chunk2, chosendataProvider,chosendataProviderExtention,chosendataProviderUnfold);
|
||||
chunk2, chosendataProvider, chosendataProviderExtention,
|
||||
chosendataProviderUnfold, scope);
|
||||
ThreadExtractionOccFromSPD t3 = new ThreadExtractionOccFromSPD(
|
||||
chunk3, chosendataProvider,chosendataProviderExtention,chosendataProviderUnfold);
|
||||
chunk3, chosendataProvider, chosendataProviderExtention,
|
||||
chosendataProviderUnfold, scope);
|
||||
Thread th1 = new Thread(t1);
|
||||
th1.start();
|
||||
Thread th2 = new Thread(t2);
|
||||
|
@ -151,18 +155,22 @@ public class OccurencesProcedure extends StandardLocalExternalAlgorithm {
|
|||
write("scientific_name; author; catalogue_number; citation; collection_gode; cordinate_uncertaninty_in_meters; country;"
|
||||
+ "credits; family; id; institution_code; kingdom; locality; providere; latitude; longitude; max_depth; min_depth");
|
||||
out.newLine();
|
||||
|
||||
insertInTheTable(t1.getInfo());
|
||||
insertInTheTable(t2.getInfo());
|
||||
insertInTheTable(t3.getInfo());
|
||||
|
||||
|
||||
insertInTheTableErrors(t1.getErrors());
|
||||
insertInTheTableErrors(t2.getErrors());
|
||||
insertInTheTableErrors(t3.getErrors());
|
||||
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(e.toString());
|
||||
|
||||
throw e;
|
||||
} finally {
|
||||
|
||||
DatabaseUtils.closeDBConnection(dbconnection);
|
||||
out.close();
|
||||
}
|
||||
|
@ -170,15 +178,6 @@ public class OccurencesProcedure extends StandardLocalExternalAlgorithm {
|
|||
}
|
||||
|
||||
private void createTables() throws Exception {
|
||||
// String s =
|
||||
// " (scientific_name character varying, author character varying,catalogue_number character varying, citation character varying,"
|
||||
// +
|
||||
// "collection_gode character varying,cordinate_uncertaninty_in_meters character varying,country character varying,credits character varying ,family character varying, id character varying, institution_code character varying,kingdom character varying,"
|
||||
// +
|
||||
// "locality character varying,providere character varying ,latitude character varying, longitude character varying ,max_depth character varying ,min_depth character varying)";
|
||||
//
|
||||
// DatabaseFactory.executeSQLUpdate("create table " + outputtable + s,
|
||||
// dbconnection);
|
||||
|
||||
DatabaseFactory.executeSQLUpdate("create table " + tableError
|
||||
+ " (error character varying)", dbconnection);
|
||||
|
@ -186,12 +185,6 @@ public class OccurencesProcedure extends StandardLocalExternalAlgorithm {
|
|||
|
||||
private void insertInTheTable(ArrayList<ArrayList<String>> arrays)
|
||||
throws Exception {
|
||||
// AnalysisLogger.getLogger().debug("inside insertInTheTable ");
|
||||
// String st =
|
||||
// " (scientific_name, author,catalogue_number, citation,collection_gode,cordinate_uncertaninty_in_meters,country,"
|
||||
// + "credits,family,id,"
|
||||
// +
|
||||
// " institution_code,kingdom,locality,providere,latitude, longitude,max_depth,min_depth)";
|
||||
|
||||
for (ArrayList<String> array : arrays) {
|
||||
// String query = "insert into " + outputtable + st + " values (";
|
||||
|
@ -202,22 +195,18 @@ public class OccurencesProcedure extends StandardLocalExternalAlgorithm {
|
|||
if (i != 0) {
|
||||
writeString = writeString + "; ";
|
||||
}
|
||||
// query = query + ", ";}
|
||||
writeString = writeString + " '";
|
||||
// query = query + " '";
|
||||
if (s != null)
|
||||
s = s.replace("'", "");
|
||||
writeString = writeString + s;
|
||||
// query = query + s;
|
||||
// query = query + "'";
|
||||
|
||||
writeString = writeString + "'";
|
||||
i++;
|
||||
|
||||
}
|
||||
write(writeString);
|
||||
out.newLine();
|
||||
// query = query + ")";
|
||||
// /* DatabaseFactory.executeSQLUpdate(query, dbconnection);*/
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -236,186 +225,153 @@ public class OccurencesProcedure extends StandardLocalExternalAlgorithm {
|
|||
|
||||
}
|
||||
|
||||
public void print(ArrayList<ArrayList<String>> arrays) {
|
||||
for (ArrayList<String> array : arrays) {
|
||||
|
||||
for (String s : array) {
|
||||
AnalysisLogger.getLogger().debug(s);
|
||||
}
|
||||
|
||||
private void insertEnumValuesr() {
|
||||
if (dp == null || dpU == null || dpE == null) {
|
||||
dp = new DataPenum();
|
||||
dpE = new ExtentionDPEnum();
|
||||
dpU = new UnfoldDPEnum();
|
||||
setDynamicParameter();
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@Override
|
||||
public void shutdown() {
|
||||
|
||||
AnalysisLogger.getLogger().debug("Shut down ");
|
||||
}
|
||||
public void setDynamicParameter() {
|
||||
|
||||
|
||||
|
||||
private void setDynamicParameter() {
|
||||
AnalysisLogger.getLogger().debug("Inside setDynamicParameter");
|
||||
ScopeProvider.instance.set("/gcube/devsec");
|
||||
// ScopeProvider.instance.set("/gcube/devsec");
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"Procedure called in the scope"
|
||||
+ ScopeProvider.instance.get().toString());
|
||||
Manager manager = null;
|
||||
|
||||
manager = manager().build();
|
||||
AnalysisLogger.getLogger().debug("costruito manager");
|
||||
AnalysisLogger.getLogger().debug("prima dei plugin");
|
||||
List<PluginDescription> plugin = manager.getPluginsDescription();
|
||||
AnalysisLogger.getLogger().debug("preso i plugin");
|
||||
// if(!dpHash.containsKey("ALL"))
|
||||
// {
|
||||
// dpHash.put("ALL", "ALL");
|
||||
// dp.addEnum(DataProvidersType.class, "ALL");
|
||||
// }
|
||||
// if(!dpUHash.containsKey("ALL"))
|
||||
// {
|
||||
// dpUHash.put("ALL", "ALL");
|
||||
// dpU.addEnum(UnfoldDPEnumType.class, "ALL");
|
||||
// }
|
||||
// if(!dpEHash.containsKey("ALL"))
|
||||
// {
|
||||
// dpEHash.put("ALL", "ALL");
|
||||
// dpE.addEnum(ExtentionDPType.class, "ALL");
|
||||
// }
|
||||
|
||||
dp.addEnum(DataProvidersType.class, "ALL");
|
||||
//dpU.addEnum(UnfoldDPEnumType.class, "ALL");
|
||||
dpE.addEnum(ExtentionDPType.class, "ALL");
|
||||
dpU.addEnum(UnfoldDPEnumType.class, "NO OPTION");
|
||||
dpE.addEnum(ExtentionDPType.class, "NO OPTION");
|
||||
|
||||
|
||||
if (plugin != null) {
|
||||
AnalysisLogger.getLogger().debug("build Manager");
|
||||
AnalysisLogger.getLogger().debug("before dei plugin");
|
||||
List<PluginDescription> plugin=null;
|
||||
try{
|
||||
plugin = manager.getPluginsDescription();
|
||||
}
|
||||
catch(Exception e)
|
||||
{
|
||||
String eTracMes= e.getMessage();
|
||||
StringWriter sw = new StringWriter();
|
||||
PrintWriter pw = new PrintWriter(sw);
|
||||
e.printStackTrace(pw);
|
||||
sw.toString();
|
||||
AnalysisLogger.getLogger().debug(eTracMes);
|
||||
AnalysisLogger.getLogger().debug(sw.toString());
|
||||
}
|
||||
finally
|
||||
{
|
||||
dp.addEnum(DataProvidersType.class, "ALL");
|
||||
dpE.addEnum(ExtentionDPType.class, "ALL");
|
||||
dpU.addEnum(UnfoldDPEnumType.class, "NO OPTION");
|
||||
dpE.addEnum(ExtentionDPType.class, "NO OPTION");
|
||||
}
|
||||
AnalysisLogger.getLogger().debug("get plugin");
|
||||
|
||||
|
||||
|
||||
if (plugin != null) {
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"*****PluginDescription is NOT null - length: "
|
||||
+ plugin.size());
|
||||
|
||||
for (int i = 0; i < plugin.size(); i++) {
|
||||
|
||||
PluginDescription pluginDescription = plugin.get(i);
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"*****PluginDescription is NOT null - length: "
|
||||
+ plugin.size());
|
||||
|
||||
for (int i = 0; i < plugin.size(); i++) {
|
||||
|
||||
PluginDescription pluginDescription = plugin.get(i);
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"For plugin ***"+pluginDescription.getName() );
|
||||
Map<Capabilities, List<Conditions>> pluginCapabilities = pluginDescription
|
||||
.getSupportedCapabilities();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"created maps");
|
||||
AnalysisLogger.getLogger().debug(
|
||||
" map size"+pluginCapabilities.size());
|
||||
|
||||
for (Entry<Capabilities, List<Conditions>> pluginCapability : pluginCapabilities
|
||||
.entrySet()) {
|
||||
|
||||
Capabilities capability = pluginCapability.getKey();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
capability.name().toString());
|
||||
if(capability.name().toString().equals("Unfold"))
|
||||
{
|
||||
// if(!dpUHash.containsKey(pluginDescription.getName()))
|
||||
// {
|
||||
// AnalysisLogger.getLogger().debug("insert "+
|
||||
// pluginDescription.getName().toString() + "fopr upfold");
|
||||
// dpUHash.put(pluginDescription.getName(), pluginDescription.getName());
|
||||
// dpU.addEnum(UnfoldDPEnumType.class,pluginDescription.getName().toString() );
|
||||
// }
|
||||
dpU.addEnum(UnfoldDPEnumType.class,pluginDescription.getName().toString() );
|
||||
}
|
||||
|
||||
if(capability.name().toString().equals("Expansion"))
|
||||
{
|
||||
// if(!dpEHash.containsKey(pluginDescription.getName()))
|
||||
// {
|
||||
// AnalysisLogger.getLogger().debug("insert "+
|
||||
// pluginDescription.getName().toString() + "fopr Expansion");
|
||||
// dpEHash.put(pluginDescription.getName(), pluginDescription.getName());
|
||||
// dpE.addEnum(ExtentionDPType.class,pluginDescription.getName().toString() );
|
||||
// }
|
||||
dpE.addEnum(ExtentionDPType.class,pluginDescription.getName().toString() );
|
||||
|
||||
}
|
||||
if(capability.name().toString().equals("Occurrence"))
|
||||
{
|
||||
// if(!dpHash.containsKey(pluginDescription.getName()))
|
||||
// {
|
||||
// AnalysisLogger.getLogger().debug("insert "+
|
||||
// pluginDescription.getName().toString() + "fopr Occurrence");
|
||||
// dpHash.put(pluginDescription.getName() , pluginDescription.getName() );
|
||||
// dp.addEnum(DataProvidersType.class,pluginDescription.getName().toString());
|
||||
// }
|
||||
dp.addEnum(DataProvidersType.class,pluginDescription.getName().toString());
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
"For plugin ***" + pluginDescription.getName());
|
||||
Map<Capabilities, List<Conditions>> pluginCapabilities = pluginDescription
|
||||
.getSupportedCapabilities();
|
||||
AnalysisLogger.getLogger().debug("created maps");
|
||||
AnalysisLogger.getLogger().debug(
|
||||
" map size" + pluginCapabilities.size());
|
||||
|
||||
for (Entry<Capabilities, List<Conditions>> pluginCapability : pluginCapabilities
|
||||
.entrySet()) {
|
||||
|
||||
Capabilities capability = pluginCapability.getKey();
|
||||
String capabilityName=capability.name().toString();
|
||||
AnalysisLogger.getLogger().debug(capabilityName);
|
||||
if (capabilityName.equals("Unfold"))
|
||||
dpU.addEnum(UnfoldDPEnumType.class, pluginDescription
|
||||
.getName().toString());
|
||||
|
||||
if (capabilityName.equals("Expansion"))
|
||||
|
||||
dpE.addEnum(ExtentionDPType.class, pluginDescription
|
||||
.getName().toString());
|
||||
|
||||
if (capabilityName.equals("Occurrence"))
|
||||
|
||||
dp.addEnum(DataProvidersType.class, pluginDescription
|
||||
.getName().toString());
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
} else
|
||||
|
||||
}
|
||||
|
||||
} else
|
||||
AnalysisLogger.getLogger().debug("*****PluginDescription is null");
|
||||
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setInputParameters() {
|
||||
try{
|
||||
AnalysisLogger.getLogger().debug("inside setInputParameters ");
|
||||
try {
|
||||
AnalysisLogger.getLogger().debug("inside setInputParameters ");
|
||||
|
||||
addRemoteDatabaseInput(databaseParameterName, urlParameterName,
|
||||
userParameterName, passwordParameterName, "driver", "dialect");
|
||||
addRemoteDatabaseInput(databaseParameterName, urlParameterName,
|
||||
userParameterName, passwordParameterName, "driver",
|
||||
"dialect");
|
||||
List<TableTemplates> templates = new ArrayList<TableTemplates>();
|
||||
templates.add(TableTemplates.GENERIC);
|
||||
InputTable tinput = new InputTable(templates, "SpeciesTable",
|
||||
"The table containing the species information");
|
||||
ColumnTypesList columns = new ColumnTypesList("SpeciesTable",
|
||||
"SpeciesColumns", "Select the columns for species name",
|
||||
false);
|
||||
addStringInput("OutputTableName", "The name of the output table",
|
||||
"occ_");
|
||||
addStringInput("ErrorTableName", "The name of the output table",
|
||||
"err_");
|
||||
ServiceType randomstring = new ServiceType(
|
||||
ServiceParameters.RANDOMSTRING, "OutputTable", "", "occ");
|
||||
ServiceType randomstringErr = new ServiceType(
|
||||
ServiceParameters.RANDOMSTRING, "ErrorTable", "", "err");
|
||||
insertEnumValuesr();
|
||||
|
||||
|
||||
|
||||
|
||||
addEnumerateInput(DataProvidersType.values(), dataProvider,
|
||||
"Choose Data Providere", "ALL");
|
||||
AnalysisLogger.getLogger().debug("After DataProvidersType");
|
||||
addEnumerateInput(ExtentionDPType.values(), dataProviderExtention,
|
||||
"Choose Expand Option Data Providere", "ALL");
|
||||
AnalysisLogger.getLogger().debug("After ExtentionDPType");
|
||||
addEnumerateInput(UnfoldDPEnumType.values(), dataProviderUnfold,
|
||||
"Choose UnfoldRR Option Data Providere", "ALL");
|
||||
AnalysisLogger.getLogger().debug("After UnfoldDPEnumType");
|
||||
|
||||
List<TableTemplates> templates = new ArrayList<TableTemplates>();
|
||||
templates.add(TableTemplates.GENERIC);
|
||||
InputTable tinput = new InputTable(templates, "SpeciesTable",
|
||||
"The table containing the species information");
|
||||
ColumnTypesList columns = new ColumnTypesList("SpeciesTable",
|
||||
"SpeciesColumns", "Select the columns for species name", false);
|
||||
addStringInput("OutputTableName", "The name of the output table",
|
||||
"occ_");
|
||||
addStringInput("ErrorTableName", "The name of the output table", "err_");
|
||||
ServiceType randomstring = new ServiceType(
|
||||
ServiceParameters.RANDOMSTRING, "OutputTable", "", "occ");
|
||||
ServiceType randomstringErr = new ServiceType(
|
||||
ServiceParameters.RANDOMSTRING, "ErrorTable", "", "err");
|
||||
|
||||
|
||||
AnalysisLogger.getLogger().debug("before setDynamicParameter() ");
|
||||
if(!call)
|
||||
setDynamicParameter();
|
||||
|
||||
// try {
|
||||
//// if (justcall == 0) {
|
||||
//// justcall = 1;
|
||||
//
|
||||
//// }
|
||||
// } catch (Throwable e) {
|
||||
// e.printStackTrace();
|
||||
// AnalysisLogger.getLogger().debug(e.toString());
|
||||
// }
|
||||
|
||||
addEnumerateInput(DataProvidersType.values(), dataProvider,
|
||||
"Choose Data Providere", "ALL");
|
||||
AnalysisLogger.getLogger().debug("Dopo DataProvidersType");
|
||||
addEnumerateInput(ExtentionDPType.values(),dataProviderExtention ,
|
||||
"Choose Expand Option Data Providere","ALL");
|
||||
AnalysisLogger.getLogger().debug("Dopo ExtentionDPType");
|
||||
addEnumerateInput(UnfoldDPEnumType.values(), dataProviderUnfold,
|
||||
"Choose UnfoldRR Option Data Providere","ALL");
|
||||
AnalysisLogger.getLogger().debug("Dopo UnfoldDPEnumType");
|
||||
|
||||
|
||||
inputs.add(tinput);
|
||||
inputs.add(columns);
|
||||
inputs.add(randomstring);
|
||||
inputs.add(randomstringErr);
|
||||
DatabaseType.addDefaultDBPars(inputs);
|
||||
}catch (Throwable e) {
|
||||
inputs.add(tinput);
|
||||
inputs.add(columns);
|
||||
inputs.add(randomstring);
|
||||
inputs.add(randomstringErr);
|
||||
DatabaseType.addDefaultDBPars(inputs);
|
||||
} catch (Throwable e) {
|
||||
e.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(e.toString());
|
||||
}
|
||||
call=true;
|
||||
|
||||
// call=true;
|
||||
|
||||
}
|
||||
|
||||
|
@ -427,20 +383,16 @@ public class OccurencesProcedure extends StandardLocalExternalAlgorithm {
|
|||
List<TableTemplates> outtemplateErr = new ArrayList<TableTemplates>();
|
||||
outtemplateErr.add(TableTemplates.GENERIC);
|
||||
|
||||
// OutputTable out = new OutputTable(outtemplate, outputtablename,
|
||||
// outputtable, "The output table containing all the matches");
|
||||
OutputTable outErr = new OutputTable(outtemplate, outputErrortablename,
|
||||
tableError, "The output table containing all the matches");
|
||||
PrimitiveType f = new PrimitiveType(File.class.getName(), new File(
|
||||
fileName), PrimitiveTypes.FILE, "OccFile", "OccFile");
|
||||
map.put("Output", f);
|
||||
// map.put("Output", out);
|
||||
map.put("Errors", outErr);
|
||||
PrimitiveType output = new PrimitiveType(HashMap.class.getName(), map,
|
||||
PrimitiveTypes.MAP, "ResultsMap", "Results Map");
|
||||
|
||||
return output;
|
||||
// return out;
|
||||
|
||||
}
|
||||
|
||||
|
@ -464,6 +416,7 @@ public class OccurencesProcedure extends StandardLocalExternalAlgorithm {
|
|||
return fields;
|
||||
}
|
||||
}
|
||||
|
||||
enum ExtentionDPType {
|
||||
}
|
||||
|
||||
|
@ -473,13 +426,16 @@ public class OccurencesProcedure extends StandardLocalExternalAlgorithm {
|
|||
return fields;
|
||||
}
|
||||
}
|
||||
|
||||
enum UnfoldDPEnumType {
|
||||
}
|
||||
|
||||
class UnfoldDPEnum extends DynamicEnum {
|
||||
|
||||
public Field[] getFields() {
|
||||
Field[] fields = UnfoldDPEnumType.class.getDeclaredFields();
|
||||
return fields;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,6 +6,8 @@ import java.io.BufferedWriter;
|
|||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.StringWriter;
|
||||
import java.lang.reflect.Field;
|
||||
import java.sql.Connection;
|
||||
import java.sql.ResultSet;
|
||||
|
@ -13,9 +15,11 @@ import java.sql.SQLException;
|
|||
import java.sql.Statement;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Vector;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
|
@ -23,7 +27,14 @@ import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
|||
import org.gcube.data.spd.client.proxies.Manager;
|
||||
import org.gcube.data.spd.model.Conditions;
|
||||
import org.gcube.data.spd.model.PluginDescription;
|
||||
import org.gcube.data.spd.model.products.TaxonomyItem;
|
||||
import org.gcube.data.spd.model.util.Capabilities;
|
||||
import org.gcube.dataanalysis.JobSMspd.TaxaProcedure.DataPenum;
|
||||
import org.gcube.dataanalysis.JobSMspd.TaxaProcedure.DataProvidersType;
|
||||
import org.gcube.dataanalysis.JobSMspd.TaxaProcedure.ExtentionDPEnum;
|
||||
import org.gcube.dataanalysis.JobSMspd.TaxaProcedure.ExtentionDPType;
|
||||
import org.gcube.dataanalysis.JobSMspd.TaxaProcedure.UnfoldDPEnum;
|
||||
import org.gcube.dataanalysis.JobSMspd.TaxaProcedure.UnfoldDPEnumType;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnTypesList;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
|
||||
|
@ -49,34 +60,34 @@ public class TaxaProcedure extends StandardLocalExternalAlgorithm {
|
|||
static String passwordParameterName = "password";
|
||||
static String urlParameterName = "FishBase";
|
||||
SessionFactory dbconnection = null;
|
||||
public static boolean call=false;
|
||||
// public static boolean call=false;
|
||||
String tablename;
|
||||
String columnnames;
|
||||
List<Object> speciesList;
|
||||
protected String fileName;
|
||||
BufferedWriter out;
|
||||
List<Object> speciesList=null;
|
||||
// protected String fileName;
|
||||
// BufferedWriter out;
|
||||
String outputtablename;
|
||||
String outputErrortablename;
|
||||
String outputtable;
|
||||
HashMap<String, String>dpHash= new HashMap<String, String>();
|
||||
HashMap<String, String>dpUHash= new HashMap<String, String>();
|
||||
HashMap<String, String>dpEHash= new HashMap<String, String>();
|
||||
HashMap<String, String> dpHash = new HashMap<String, String>();
|
||||
HashMap<String, String> dpUHash = new HashMap<String, String>();
|
||||
HashMap<String, String> dpEHash = new HashMap<String, String>();
|
||||
String tableError;
|
||||
private DataPenum dp = new DataPenum();
|
||||
private ExtentionDPEnum dpE = new ExtentionDPEnum();
|
||||
private UnfoldDPEnum dpU = new UnfoldDPEnum();
|
||||
private static DataPenum dp = null;
|
||||
private static ExtentionDPEnum dpE = null;
|
||||
private static UnfoldDPEnum dpU = null;
|
||||
private String dataProvider = "Data Provider :";
|
||||
private String chosendataProvider = new String();
|
||||
private String dataProviderExtention = "Data Provider (Expand Option):";
|
||||
private String chosendataProviderExtention=new String();
|
||||
//private String chosendataProviderUnfold="Data Provider Unfold:";
|
||||
private String chosendataProviderExtention = new String();
|
||||
// private String chosendataProviderUnfold="Data Provider Unfold:";
|
||||
private String dataProviderUnfold = "Data Provider (Unfold Option):";
|
||||
private String chosendataProviderUnfold=new String();
|
||||
|
||||
private String chosendataProviderUnfold = new String();
|
||||
File file;
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "A transducer algorithm that produces a dataset of species taxonomic information for a set of target species by retrieving these from major data providers including Catalogue of Life, OBIS, WoRMS. ";
|
||||
return " An Algorithm that retrieves the taxon from a data provided based on the given search options";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -96,7 +107,7 @@ public class TaxaProcedure extends StandardLocalExternalAlgorithm {
|
|||
tableError = getInputParameter("ErrorTable");
|
||||
chosendataProviderUnfold = getInputParameter(dataProviderUnfold);
|
||||
chosendataProviderExtention = getInputParameter(dataProviderExtention);
|
||||
|
||||
|
||||
chosendataProvider = getInputParameter(dataProvider);
|
||||
outputErrortablename = getInputParameter("ErrorTableName");
|
||||
|
||||
|
@ -104,13 +115,9 @@ public class TaxaProcedure extends StandardLocalExternalAlgorithm {
|
|||
.getListSeparator());
|
||||
speciesList = DatabaseFactory.executeSQLQuery("select " + columnlist[0]
|
||||
+ " from " + tablename, dbconnection);
|
||||
fileName = super.config.getPersistencePath() + "results.csv";
|
||||
out = new BufferedWriter(new FileWriter(fileName));
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
protected void process() throws Exception {
|
||||
|
||||
|
@ -132,13 +139,16 @@ public class TaxaProcedure extends StandardLocalExternalAlgorithm {
|
|||
|
||||
chunk3.add((String) speciesList.get(i));
|
||||
}
|
||||
|
||||
String scope = ScopeProvider.instance.get();
|
||||
ThreadExtractionTaxaFromSPD t1 = new ThreadExtractionTaxaFromSPD(
|
||||
chunk1, chosendataProvider,chosendataProviderExtention,chosendataProviderUnfold);
|
||||
chunk1, chosendataProvider, chosendataProviderExtention,
|
||||
chosendataProviderUnfold, scope);
|
||||
ThreadExtractionTaxaFromSPD t2 = new ThreadExtractionTaxaFromSPD(
|
||||
chunk2, chosendataProvider,chosendataProviderExtention,chosendataProviderUnfold);
|
||||
chunk2, chosendataProvider, chosendataProviderExtention,
|
||||
chosendataProviderUnfold, scope);
|
||||
ThreadExtractionTaxaFromSPD t3 = new ThreadExtractionTaxaFromSPD(
|
||||
chunk3, chosendataProvider,chosendataProviderExtention,chosendataProviderUnfold);
|
||||
chunk3, chosendataProvider, chosendataProviderExtention,
|
||||
chosendataProviderUnfold, scope);
|
||||
Thread th1 = new Thread(t1);
|
||||
th1.start();
|
||||
Thread th2 = new Thread(t2);
|
||||
|
@ -148,12 +158,13 @@ public class TaxaProcedure extends StandardLocalExternalAlgorithm {
|
|||
th1.join();
|
||||
th2.join();
|
||||
th3.join();
|
||||
write("scientific_name; author; citation; credits; id; lsid; providere; rank; ");
|
||||
out.newLine();
|
||||
insertInTheTable(t1.getInfo());
|
||||
insertInTheTable(t2.getInfo());
|
||||
insertInTheTable(t3.getInfo());
|
||||
|
||||
Vector<TaxonomyItem> taxaList = t1.getTaxaList();
|
||||
taxaList.addAll(t2.getTaxaList());
|
||||
taxaList.addAll(t3.getTaxaList());
|
||||
MapDwCA fileMaker = new MapDwCA(super.config.getPersistencePath());
|
||||
Iterator<TaxonomyItem> it = taxaList.iterator();
|
||||
file = fileMaker.createDwCA(it);
|
||||
|
||||
insertInTheTableErrors(t1.getErrors());
|
||||
insertInTheTableErrors(t2.getErrors());
|
||||
insertInTheTableErrors(t3.getErrors());
|
||||
|
@ -163,25 +174,24 @@ public class TaxaProcedure extends StandardLocalExternalAlgorithm {
|
|||
throw e;
|
||||
} finally {
|
||||
DatabaseUtils.closeDBConnection(dbconnection);
|
||||
out.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void createTables() throws Exception {
|
||||
|
||||
|
||||
DatabaseFactory.executeSQLUpdate("create table " + tableError
|
||||
+ " (error character varying)", dbconnection);
|
||||
}
|
||||
|
||||
private void insertInTheTable(ArrayList<ArrayList<String>> arrays) throws IOException{
|
||||
|
||||
private void insertInTheTable(ArrayList<ArrayList<String>> arrays)
|
||||
throws IOException {
|
||||
|
||||
for (ArrayList<String> array : arrays) {
|
||||
// String query = "insert into " + outputtable + st + " values (";
|
||||
String writeString = new String();
|
||||
int i = 0;
|
||||
|
||||
|
||||
for (String s : array) {
|
||||
if (i != 0) {
|
||||
writeString = writeString + "; ";
|
||||
|
@ -198,10 +208,7 @@ public class TaxaProcedure extends StandardLocalExternalAlgorithm {
|
|||
i++;
|
||||
|
||||
}
|
||||
write(writeString);
|
||||
out.newLine();
|
||||
// query = query + ")";
|
||||
// /* DatabaseFactory.executeSQLUpdate(query, dbconnection);*/
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -220,146 +227,151 @@ public class TaxaProcedure extends StandardLocalExternalAlgorithm {
|
|||
|
||||
}
|
||||
|
||||
public void print(ArrayList<ArrayList<String>> arrays) {
|
||||
for (ArrayList<String> array : arrays) {
|
||||
|
||||
for (String s : array) {
|
||||
AnalysisLogger.getLogger().debug(s);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void shutdown() {
|
||||
AnalysisLogger.getLogger().debug("Shut down ");
|
||||
}
|
||||
|
||||
private void insertEnumValuesr() {
|
||||
if (dp == null || dpU == null || dpE == null) {
|
||||
dp = new DataPenum();
|
||||
dpE = new ExtentionDPEnum();
|
||||
dpU = new UnfoldDPEnum();
|
||||
setDynamicParameter();
|
||||
}
|
||||
|
||||
}
|
||||
public void setDynamicParameter() {
|
||||
|
||||
private void setDynamicParameter() {
|
||||
AnalysisLogger.getLogger().debug("Inside setDynamicParameter");
|
||||
ScopeProvider.instance.set("/gcube/devsec");
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"Procedure called in the scope"
|
||||
+ ScopeProvider.instance.get().toString());
|
||||
Manager manager = null;
|
||||
|
||||
manager = manager().build();
|
||||
AnalysisLogger.getLogger().debug("costruito manager");
|
||||
AnalysisLogger.getLogger().debug("prima dei plugin");
|
||||
List<PluginDescription> plugin = manager.getPluginsDescription();
|
||||
AnalysisLogger.getLogger().debug("preso i plugin");
|
||||
AnalysisLogger.getLogger().debug("build manager");
|
||||
AnalysisLogger.getLogger().debug("before plugin");
|
||||
List<PluginDescription> plugin = null;
|
||||
try {
|
||||
plugin = manager.getPluginsDescription();
|
||||
} catch (Exception e) {
|
||||
String eTracMes= e.getMessage();
|
||||
StringWriter sw = new StringWriter();
|
||||
PrintWriter pw = new PrintWriter(sw);
|
||||
e.printStackTrace(pw);
|
||||
sw.toString();
|
||||
AnalysisLogger.getLogger().debug(eTracMes);
|
||||
AnalysisLogger.getLogger().debug(sw.toString());
|
||||
} finally {
|
||||
dp.addEnum(DataProvidersType.class, "ALL");
|
||||
dpE.addEnum(ExtentionDPType.class, "ALL");
|
||||
dpU.addEnum(UnfoldDPEnumType.class, "NO OPTION");
|
||||
dpE.addEnum(ExtentionDPType.class, "NO OPTION");
|
||||
}
|
||||
AnalysisLogger.getLogger().debug("get plugin");
|
||||
|
||||
|
||||
dp.addEnum(DataProvidersType.class, "ALL");
|
||||
dpE.addEnum(ExtentionDPType.class, "ALL");
|
||||
dpU.addEnum(UnfoldDPEnumType.class, "NO OPTION");
|
||||
dpE.addEnum(ExtentionDPType.class, "NO OPTION");
|
||||
|
||||
|
||||
if (plugin != null) {
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"*****PluginDescription is NOT null - length: "
|
||||
+ plugin.size());
|
||||
if (plugin != null) {
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"*****PluginDescription is NOT null - length: "
|
||||
+ plugin.size());
|
||||
|
||||
for (int i = 0; i < plugin.size(); i++) {
|
||||
|
||||
PluginDescription pluginDescription = plugin.get(i);
|
||||
|
||||
for (int i = 0; i < plugin.size(); i++) {
|
||||
|
||||
PluginDescription pluginDescription = plugin.get(i);
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"For plugin ***"+pluginDescription.getName() );
|
||||
Map<Capabilities, List<Conditions>> pluginCapabilities = pluginDescription
|
||||
.getSupportedCapabilities();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"created maps");
|
||||
AnalysisLogger.getLogger().debug(
|
||||
" map size"+pluginCapabilities.size());
|
||||
|
||||
for (Entry<Capabilities, List<Conditions>> pluginCapability : pluginCapabilities
|
||||
.entrySet()) {
|
||||
|
||||
Capabilities capability = pluginCapability.getKey();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
capability.name().toString());
|
||||
if(capability.name().toString().equals("Unfold"))
|
||||
{
|
||||
|
||||
dpU.addEnum(UnfoldDPEnumType.class,pluginDescription.getName().toString() );
|
||||
}
|
||||
|
||||
if(capability.name().toString().equals("Expansion"))
|
||||
{
|
||||
|
||||
dpE.addEnum(ExtentionDPType.class,pluginDescription.getName().toString() );
|
||||
|
||||
}
|
||||
if(capability.name().toString().equals("Classification"))
|
||||
{
|
||||
dp.addEnum(DataProvidersType.class,pluginDescription.getName().toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} else
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"For plugin ***" + pluginDescription.getName());
|
||||
Map<Capabilities, List<Conditions>> pluginCapabilities = pluginDescription
|
||||
.getSupportedCapabilities();
|
||||
AnalysisLogger.getLogger().debug("created maps");
|
||||
AnalysisLogger.getLogger().debug(
|
||||
" map size" + pluginCapabilities.size());
|
||||
|
||||
for (Entry<Capabilities, List<Conditions>> pluginCapability : pluginCapabilities
|
||||
.entrySet()) {
|
||||
|
||||
Capabilities capability = pluginCapability.getKey();
|
||||
String capabilityName=capability.name().toString();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
capabilityName);
|
||||
if (capabilityName.equals("Unfold")) {
|
||||
|
||||
dpU.addEnum(UnfoldDPEnumType.class, pluginDescription
|
||||
.getName().toString());
|
||||
}
|
||||
|
||||
if (capabilityName.equals("Expansion")) {
|
||||
|
||||
dpE.addEnum(ExtentionDPType.class, pluginDescription
|
||||
.getName().toString());
|
||||
|
||||
}
|
||||
if (capabilityName.equals("Classification")) {
|
||||
dp.addEnum(DataProvidersType.class, pluginDescription
|
||||
.getName().toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} else
|
||||
AnalysisLogger.getLogger().debug("*****PluginDescription is null");
|
||||
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setInputParameters() {
|
||||
try{
|
||||
AnalysisLogger.getLogger().debug("inside setInputParameters ");
|
||||
try {
|
||||
AnalysisLogger.getLogger().debug("inside setInputParameters ");
|
||||
|
||||
addRemoteDatabaseInput(databaseParameterName, urlParameterName,
|
||||
userParameterName, passwordParameterName, "driver", "dialect");
|
||||
addRemoteDatabaseInput(databaseParameterName, urlParameterName,
|
||||
userParameterName, passwordParameterName, "driver",
|
||||
"dialect");
|
||||
|
||||
List<TableTemplates> templates = new ArrayList<TableTemplates>();
|
||||
templates.add(TableTemplates.GENERIC);
|
||||
InputTable tinput = new InputTable(templates, "SpeciesTable",
|
||||
"The table containing the species information");
|
||||
ColumnTypesList columns = new ColumnTypesList("SpeciesTable",
|
||||
"SpeciesColumns", "Select the columns for species name", false);
|
||||
addStringInput("OutputTableName", "The name of the output table",
|
||||
"occ_");
|
||||
addStringInput("ErrorTableName", "The name of the output table", "err_");
|
||||
ServiceType randomstring = new ServiceType(
|
||||
ServiceParameters.RANDOMSTRING, "OutputTable", "", "occ");
|
||||
ServiceType randomstringErr = new ServiceType(
|
||||
ServiceParameters.RANDOMSTRING, "ErrorTable", "", "err");
|
||||
|
||||
|
||||
AnalysisLogger.getLogger().debug("before setDynamicParameter() ");
|
||||
if(!call)
|
||||
setDynamicParameter();
|
||||
|
||||
// try {
|
||||
//// if (justcall == 0) {
|
||||
//// justcall = 1;
|
||||
//
|
||||
//// }
|
||||
// } catch (Throwable e) {
|
||||
// e.printStackTrace();
|
||||
// AnalysisLogger.getLogger().debug(e.toString());
|
||||
// }
|
||||
List<TableTemplates> templates = new ArrayList<TableTemplates>();
|
||||
templates.add(TableTemplates.GENERIC);
|
||||
InputTable tinput = new InputTable(templates, "SpeciesTable",
|
||||
"The table containing the species information");
|
||||
ColumnTypesList columns = new ColumnTypesList("SpeciesTable",
|
||||
"SpeciesColumns", "Select the columns for species name",
|
||||
false);
|
||||
addStringInput("OutputTableName", "The name of the output table",
|
||||
"occ_");
|
||||
addStringInput("ErrorTableName", "The name of the output table",
|
||||
"err_");
|
||||
ServiceType randomstring = new ServiceType(
|
||||
ServiceParameters.RANDOMSTRING, "OutputTable", "", "tax_");
|
||||
ServiceType randomstringErr = new ServiceType(
|
||||
ServiceParameters.RANDOMSTRING, "ErrorTable", "", "err");
|
||||
|
||||
addEnumerateInput(DataProvidersType.values(), dataProvider,
|
||||
"Choose Data Providere", "ALL");
|
||||
AnalysisLogger.getLogger().debug("Dopo DataProvidersType");
|
||||
addEnumerateInput(ExtentionDPType.values(),dataProviderExtention ,
|
||||
"Choose Expand Option Data Providere","ALL");
|
||||
AnalysisLogger.getLogger().debug("Dopo ExtentionDPType");
|
||||
addEnumerateInput(UnfoldDPEnumType.values(), dataProviderUnfold,
|
||||
"Choose Unfold Option Data Providere","ALL");
|
||||
AnalysisLogger.getLogger().debug("Dopo UnfoldDPEnumType");
|
||||
|
||||
AnalysisLogger.getLogger().debug("before setDynamicParameter() ");
|
||||
// if(!call)
|
||||
insertEnumValuesr();
|
||||
|
||||
inputs.add(tinput);
|
||||
inputs.add(columns);
|
||||
inputs.add(randomstring);
|
||||
inputs.add(randomstringErr);
|
||||
DatabaseType.addDefaultDBPars(inputs);
|
||||
}catch (Throwable e) {
|
||||
addEnumerateInput(DataProvidersType.values(), dataProvider,
|
||||
"Choose Data Providere", "ALL");
|
||||
AnalysisLogger.getLogger().debug("Dopo DataProvidersType");
|
||||
addEnumerateInput(ExtentionDPType.values(), dataProviderExtention,
|
||||
"Choose Expand Option Data Providere", "ALL");
|
||||
AnalysisLogger.getLogger().debug("Dopo ExtentionDPType");
|
||||
addEnumerateInput(UnfoldDPEnumType.values(), dataProviderUnfold,
|
||||
"Choose Unfold Option Data Providere", "ALL");
|
||||
AnalysisLogger.getLogger().debug("Dopo UnfoldDPEnumType");
|
||||
|
||||
inputs.add(tinput);
|
||||
inputs.add(columns);
|
||||
inputs.add(randomstring);
|
||||
inputs.add(randomstringErr);
|
||||
DatabaseType.addDefaultDBPars(inputs);
|
||||
} catch (Throwable e) {
|
||||
e.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(e.toString());
|
||||
}
|
||||
call=true;
|
||||
|
||||
// call=true;
|
||||
|
||||
}
|
||||
|
||||
|
@ -375,8 +387,8 @@ public class TaxaProcedure extends StandardLocalExternalAlgorithm {
|
|||
// outputtable, "The output table containing all the matches");
|
||||
OutputTable outErr = new OutputTable(outtemplate, outputErrortablename,
|
||||
tableError, "The output table containing all the matches");
|
||||
PrimitiveType f = new PrimitiveType(File.class.getName(), new File(
|
||||
fileName), PrimitiveTypes.FILE, "OccFile", "OccFile");
|
||||
PrimitiveType f = new PrimitiveType(File.class.getName(), file,
|
||||
PrimitiveTypes.FILE, "OccFile", "OccFile");
|
||||
map.put("Output", f);
|
||||
// map.put("Output", out);
|
||||
map.put("Errors", outErr);
|
||||
|
@ -388,16 +400,16 @@ public class TaxaProcedure extends StandardLocalExternalAlgorithm {
|
|||
|
||||
}
|
||||
|
||||
public void write(String writeSt) {
|
||||
try {
|
||||
out.write(writeSt);
|
||||
|
||||
} catch (IOException e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
}
|
||||
// public void write(String writeSt) {
|
||||
// try {
|
||||
// out.write(writeSt);
|
||||
//
|
||||
// } catch (IOException e) {
|
||||
// // TODO Auto-generated catch block
|
||||
// e.printStackTrace();
|
||||
// }
|
||||
//
|
||||
// }
|
||||
|
||||
enum DataProvidersType {
|
||||
}
|
||||
|
@ -408,6 +420,7 @@ public class TaxaProcedure extends StandardLocalExternalAlgorithm {
|
|||
return fields;
|
||||
}
|
||||
}
|
||||
|
||||
enum ExtentionDPType {
|
||||
}
|
||||
|
||||
|
@ -417,6 +430,7 @@ public class TaxaProcedure extends StandardLocalExternalAlgorithm {
|
|||
return fields;
|
||||
}
|
||||
}
|
||||
|
||||
enum UnfoldDPEnumType {
|
||||
}
|
||||
|
||||
|
|
|
@ -20,12 +20,13 @@ public class ThreadExtractionOccFromSPD implements Runnable {
|
|||
private String dataProvider;
|
||||
private String dataProviderUnfold;
|
||||
private String dataProviderExpand;
|
||||
String scope;
|
||||
|
||||
|
||||
public ThreadExtractionOccFromSPD(ArrayList<String> chunk, String dataProvider,String dataProviderExpand,String dataProviderUnfold ) {
|
||||
public ThreadExtractionOccFromSPD(ArrayList<String> chunk, String dataProvider,String dataProviderExpand,String dataProviderUnfold,String scope ) {
|
||||
this.chunk = chunk;
|
||||
for (String species : chunk) {
|
||||
System.out.println(species);
|
||||
|
||||
// AnalysisLogger.getLogger().debug(species);
|
||||
}
|
||||
this.dataProvider=dataProvider;
|
||||
|
@ -33,14 +34,15 @@ public class ThreadExtractionOccFromSPD implements Runnable {
|
|||
this.dataProviderUnfold=dataProviderUnfold;
|
||||
informations = new ArrayList<ArrayList<String>>();
|
||||
errors= new ArrayList<String>();
|
||||
this.scope=scope;
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
public void run() {
|
||||
|
||||
ScopeProvider.instance.set("/gcube/devsec");
|
||||
AnalysisLogger.getLogger().debug("SCOPE *******: "+scope);
|
||||
ScopeProvider.instance.set(scope);
|
||||
Manager manager=null;
|
||||
try{
|
||||
manager = manager().build();
|
||||
|
@ -48,11 +50,7 @@ public class ThreadExtractionOccFromSPD implements Runnable {
|
|||
for (String species : chunk) {
|
||||
if (species != null) {
|
||||
String query = new String();
|
||||
// if(dataProviderExpand.equals("NO OPTION"))
|
||||
// query= "SEARCH BY SN '"+species + "' RETURN occurrence";
|
||||
// else
|
||||
// query= "SEARCH BY SN '"+species + "' EXPAND WITH CatalogueOfLife RETURN occurrence";
|
||||
// System.out.println(query);
|
||||
|
||||
query=createQueryParameter(species);
|
||||
AnalysisLogger.getLogger().debug("QUERY *******: "+query);
|
||||
Stream<ResultElement> stream;
|
||||
|
@ -104,20 +102,17 @@ public class ThreadExtractionOccFromSPD implements Runnable {
|
|||
unfold=" UNFOLD WITH "+dataProviderUnfold;
|
||||
|
||||
query= query +unfold;
|
||||
|
||||
AnalysisLogger.getLogger().debug("expand is : "+dataProviderExpand);
|
||||
if(dataProviderExpand.equals("ALL"))
|
||||
expand="EXPAND";
|
||||
{if(dataProviderExpand.equals("NO OPTION"))
|
||||
{expand=" EXPAND";}
|
||||
else{
|
||||
AnalysisLogger.getLogger().debug("inside else ");
|
||||
if(dataProviderExpand.equals("NO OPTION"))
|
||||
expand="";
|
||||
else
|
||||
expand=" EXPAND WITH "+dataProviderExpand;
|
||||
}
|
||||
query= query+ expand;
|
||||
if(!expand.equals("")& !dataProviderExpand.equals("NO OPTION") )
|
||||
|
||||
|
||||
|
||||
|
||||
query= query+ expand;
|
||||
query=query+ where;
|
||||
query= query +" RETURN occurrence";
|
||||
return query;
|
||||
|
|
|
@ -3,6 +3,7 @@ package org.gcube.dataanalysis.JobSMspd;
|
|||
import static org.gcube.data.spd.client.plugins.AbstractPlugin.manager;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Vector;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
|
@ -21,9 +22,11 @@ public class ThreadExtractionTaxaFromSPD implements Runnable {
|
|||
private String dataProvider;
|
||||
private String dataProviderUnfold;
|
||||
private String dataProviderExpand;
|
||||
Vector <TaxonomyItem> taxaList= new Vector <TaxonomyItem>();
|
||||
String scope;
|
||||
|
||||
|
||||
public ThreadExtractionTaxaFromSPD(ArrayList<String> chunk, String dataProvider,String dataProviderExpand,String dataProviderUnfold ) {
|
||||
public ThreadExtractionTaxaFromSPD(ArrayList<String> chunk, String dataProvider,String dataProviderExpand,String dataProviderUnfold ,String scope) {
|
||||
this.chunk = chunk;
|
||||
for (String species : chunk) {
|
||||
System.out.println(species);
|
||||
|
@ -34,6 +37,8 @@ public class ThreadExtractionTaxaFromSPD implements Runnable {
|
|||
this.dataProviderUnfold=dataProviderUnfold;
|
||||
informations = new ArrayList<ArrayList<String>>();
|
||||
errors= new ArrayList<String>();
|
||||
this.scope=scope;
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
@ -41,7 +46,9 @@ public class ThreadExtractionTaxaFromSPD implements Runnable {
|
|||
|
||||
public void run() {
|
||||
|
||||
ScopeProvider.instance.set("/gcube/devsec");
|
||||
AnalysisLogger.getLogger().debug("SCOPE *******: "+scope);
|
||||
ScopeProvider.instance.set(scope);
|
||||
//ScopeProvider.instance.set("/gcube/devsec");
|
||||
Manager manager=null;
|
||||
try{
|
||||
manager = manager().build();
|
||||
|
@ -53,8 +60,9 @@ public class ThreadExtractionTaxaFromSPD implements Runnable {
|
|||
// query= "SEARCH BY SN '"+species + "' RETURN occurrence";
|
||||
// else
|
||||
// query= "SEARCH BY SN '"+species + "' EXPAND WITH CatalogueOfLife RETURN occurrence";
|
||||
// System.out.println(query);
|
||||
|
||||
query=createQueryParameter(species);
|
||||
System.out.println("QUERY *******: "+query);
|
||||
AnalysisLogger.getLogger().debug("QUERY *******: "+query);
|
||||
Stream<ResultElement> stream;
|
||||
try {
|
||||
|
@ -64,6 +72,8 @@ public class ThreadExtractionTaxaFromSPD implements Runnable {
|
|||
while (stream.hasNext()) {
|
||||
i++;
|
||||
TaxonomyItem ti = (TaxonomyItem) stream.next();
|
||||
System.out.println("Inside whiele: "+ti.toString());
|
||||
taxaList.add(ti);
|
||||
informations.add(crateRowTable(ti));
|
||||
|
||||
}
|
||||
|
@ -93,6 +103,7 @@ public class ThreadExtractionTaxaFromSPD implements Runnable {
|
|||
String where=new String();
|
||||
String expand=new String();
|
||||
String unfold=new String();
|
||||
|
||||
if(dataProvider.equals("ALL"))
|
||||
where="";
|
||||
else
|
||||
|
@ -106,14 +117,15 @@ public class ThreadExtractionTaxaFromSPD implements Runnable {
|
|||
query= query +unfold;
|
||||
|
||||
if(dataProviderExpand.equals("ALL"))
|
||||
expand="EXPAND";
|
||||
{if(dataProviderExpand.equals("NO OPTION"))
|
||||
expand=" EXPAND";
|
||||
else{
|
||||
if(dataProviderExpand.equals("NO OPTION"))
|
||||
expand="";
|
||||
else
|
||||
expand=" EXPAND WITH "+dataProviderExpand;
|
||||
}
|
||||
query= query+ expand;
|
||||
if(!expand.equals("")& !dataProviderExpand.equals("NO OPTION") )
|
||||
//if(!expand.equals("")& !dataProviderExpand.equals("NO OPTION") )
|
||||
|
||||
|
||||
|
||||
|
@ -148,4 +160,8 @@ public class ThreadExtractionTaxaFromSPD implements Runnable {
|
|||
{
|
||||
return errors;
|
||||
}
|
||||
public Vector<TaxonomyItem >getTaxaList()
|
||||
{
|
||||
return taxaList;
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue