git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-analysis/EcologicalEngine@55569 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
d171c45507
commit
c65adbc7f3
|
@ -11,6 +11,6 @@
|
|||
</sealing>
|
||||
</manifest>
|
||||
<selectedElements exportClassFiles="true" exportJavaFiles="false" exportOutputFolder="false">
|
||||
<javaElement handleIdentifier="=EcologicalEngine/src"/>
|
||||
<javaElement handleIdentifier="=EcologicalEngine1.2/src"/>
|
||||
</selectedElements>
|
||||
</jardesc>
|
||||
|
|
|
@ -19,6 +19,8 @@ import org.gcube.dataanalysis.ecoengine.utils.Tuple;
|
|||
import org.hibernate.SessionFactory;
|
||||
import org.jfree.data.category.DefaultCategoryDataset;
|
||||
|
||||
import com.mchange.v1.util.ArrayUtils;
|
||||
|
||||
public class BioClimateAnalysis {
|
||||
|
||||
private static String countHighProbabilityCells = "select count(*) from %1$s where probability>%2$s";
|
||||
|
@ -182,9 +184,27 @@ public class BioClimateAnalysis {
|
|||
globalEvolutionAnalysis(null, hspecTables, null, hspecTableNames, probabilityColumn, csquareColumn, threshold);
|
||||
}
|
||||
|
||||
|
||||
private String[] checkTableNames(String [] tablesNames){
|
||||
ArrayList<String> newtables = new ArrayList<String>();
|
||||
for (String table:tablesNames){
|
||||
int i=1;
|
||||
String originalTable = table;
|
||||
while (newtables.contains(table)){
|
||||
table = originalTable+"_"+i;
|
||||
i++;
|
||||
}
|
||||
newtables.add(table);
|
||||
}
|
||||
String [] tables = new String[tablesNames.length];
|
||||
for (int j=0;j<tablesNames.length;j++){
|
||||
tables[j] = newtables.get(j);
|
||||
}
|
||||
return tables;
|
||||
}
|
||||
|
||||
public void produceCharts(HashMap<String, HashMap<String, double[]>> GeoMap, String[] tablesNames) {
|
||||
// produce a char for each feature
|
||||
tablesNames = checkTableNames(tablesNames);
|
||||
producedImages = new HashMap<String,Image> ();
|
||||
for (String featurename : GeoMap.keySet()) {
|
||||
DefaultCategoryDataset chart = new DefaultCategoryDataset();
|
||||
|
|
|
@ -10,14 +10,12 @@ public interface Generator extends ComputationalAgent{
|
|||
|
||||
public INFRASTRUCTURE getInfrastructure();
|
||||
|
||||
public void init();
|
||||
public void init() throws Exception;
|
||||
|
||||
public void setConfiguration(AlgorithmConfiguration config);
|
||||
|
||||
public void shutdown();
|
||||
|
||||
public void stopProcess();
|
||||
|
||||
public String getLoad();
|
||||
|
||||
public void generate() throws Exception;
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
package org.gcube.dataanalysis.ecoengine.interfaces;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
|
||||
public interface SpatialProbabilityDistributionNode extends SpatialProbabilityDistribution {
|
||||
|
@ -22,4 +20,8 @@ public interface SpatialProbabilityDistributionNode extends SpatialProbabilityDi
|
|||
public int getNumberOfSpecies();
|
||||
|
||||
public int getNumberOfGeoInfo();
|
||||
|
||||
public int getOverallProcessedInfo();
|
||||
|
||||
public void stop();
|
||||
}
|
||||
|
|
|
@ -140,11 +140,6 @@ public class LocalSimpleSplitGenerator implements Generator {
|
|||
stopInterrupt = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stopProcess() {
|
||||
stopInterrupt = true;
|
||||
}
|
||||
|
||||
// waits for thread to be free
|
||||
private void wait4Thread(int index) {
|
||||
// wait until thread is free
|
||||
|
|
|
@ -222,11 +222,6 @@ public class LocalSplitGenerator implements Generator {
|
|||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stopProcess() {
|
||||
stopInterrupt = true;
|
||||
}
|
||||
|
||||
// waits for thread to be free
|
||||
private void wait4Thread(int index) {
|
||||
// wait until thread is free
|
||||
|
|
|
@ -37,7 +37,7 @@ public class RainyCloudGenerator implements Generator {
|
|||
|
||||
// if (oo.status.equals("DONE")||oo.status.equals("ERROR"))
|
||||
if (oo.status.equals("DONE")) {
|
||||
stopProcess();
|
||||
shutdown();
|
||||
return 100f;
|
||||
} else {
|
||||
float remoteStatus = (float) remoteGenerationManager.retrieveCompletion();
|
||||
|
@ -96,10 +96,6 @@ public class RainyCloudGenerator implements Generator {
|
|||
|
||||
@Override
|
||||
public void shutdown() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stopProcess() {
|
||||
interruptProcessing = true;
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,37 @@
|
|||
package org.gcube.dataanalysis.ecoengine.test;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.user.GeneratorT;
|
||||
import org.gcube.dataanalysis.ecoengine.user.ModelerT;
|
||||
|
||||
public class PerformanceTests {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
String configPath = "./cfg/";
|
||||
String csquareTable = "hcaf_d";
|
||||
String preprocessedTable = "maxminlat_hspen";
|
||||
String envelopeTable = "hspen";
|
||||
int numberOfResources = 4;
|
||||
String speciesCode = "Fis-22747";
|
||||
String userName = "gianpaolo.coro";
|
||||
String generatorName = "AQUAMAPS_SUITABLE";
|
||||
|
||||
String finalDistributionTable = "hspec_suitable_local";
|
||||
|
||||
// Generate
|
||||
long t0 = System.currentTimeMillis();
|
||||
// Generate
|
||||
AlgorithmConfiguration config = GeneratorT.getGenerationConfig(numberOfResources, generatorName, envelopeTable, preprocessedTable, "", userName, csquareTable, finalDistributionTable, configPath);
|
||||
config.setPersistencePath("./");
|
||||
config.setParam("ServiceUserName", "gianpaolo.coro");
|
||||
config.setParam("DatabaseUserName","utente");
|
||||
config.setParam("DatabasePassword","d4science");
|
||||
config.setParam("DatabaseURL","jdbc:postgresql://dbtest.research-infrastructures.eu/aquamapsorgupdated");
|
||||
config.setParam("DatabaseDriver","org.hibernate.dialect.PostgreSQLDialect");
|
||||
|
||||
GeneratorT.generate(config);
|
||||
|
||||
System.out.println("OVERALL ELAPSED: "+(System.currentTimeMillis()-t0));
|
||||
}
|
||||
|
||||
}
|
|
@ -27,7 +27,7 @@ public class TestBioClimateAnalysis {
|
|||
"hspen2012_06_02_08_54_48_004",
|
||||
"hspen2012_06_02_08_55_53_415"
|
||||
};
|
||||
|
||||
/*
|
||||
final String [] envelopeTablesNames = {
|
||||
"hspen2012_06_01_21_52_47_460",
|
||||
"hspen2012_06_01_21_52_47_485",
|
||||
|
@ -40,6 +40,20 @@ public class TestBioClimateAnalysis {
|
|||
"hspen2012_06_02_08_54_48_004",
|
||||
"hspen2012_06_02_08_55_53_415"
|
||||
};
|
||||
*/
|
||||
|
||||
final String [] envelopeTablesNames = {
|
||||
"test",
|
||||
"test",
|
||||
"test",
|
||||
"test",
|
||||
"test",
|
||||
"test",
|
||||
"test",
|
||||
"test",
|
||||
"test",
|
||||
"test"
|
||||
};
|
||||
|
||||
|
||||
bioClimate.speciesEvolutionAnalysis(envelopeTables,envelopeTablesNames, BioClimateAnalysis.salinityMinFeature, BioClimateAnalysis.salinityDefaultRange);
|
||||
|
|
|
@ -134,10 +134,10 @@ public class DatabaseFactory{
|
|||
Configuration cfg = new Configuration();
|
||||
cfg = cfg.configure(DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(new ByteArrayInputStream(document.asXML().getBytes())));
|
||||
cfg.setProperty("hibernate.hbm2ddl.auto", "create");
|
||||
|
||||
|
||||
SessionFactory DBSessionFactory = null;
|
||||
DBSessionFactory = cfg.buildSessionFactory();
|
||||
|
||||
|
||||
// close stream
|
||||
stream.close();
|
||||
return DBSessionFactory;
|
||||
|
|
|
@ -204,6 +204,7 @@ public class DatabaseUtils {
|
|||
dbHibConnection = DatabaseFactory.initDBConnection(defaultDatabaseFile, config);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
System.out.println("ERROR IN DB INITIALIZATION : "+e.getLocalizedMessage());
|
||||
e.printStackTrace();
|
||||
// AnalysisLogger.getLogger().trace(e);
|
||||
}
|
||||
|
|
|
@ -365,11 +365,16 @@ public class Operations {
|
|||
|
||||
public static int chunkize(int numberOfElements,int partitionFactor){
|
||||
int chunksize = numberOfElements/ partitionFactor;
|
||||
int rest = numberOfElements % partitionFactor;
|
||||
if (chunksize == 0)
|
||||
chunksize = 1;
|
||||
else if (rest !=0)
|
||||
chunksize++;
|
||||
/*
|
||||
int numOfChunks = numberOfElements / chunksize;
|
||||
if ((numberOfElements % chunksize) != 0)
|
||||
numOfChunks += 1;
|
||||
*/
|
||||
|
||||
return chunksize;
|
||||
}
|
||||
|
|
|
@ -1,8 +1,14 @@
|
|||
package org.gcube.dataanalysis.ecoengine.utils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
|
||||
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
|
||||
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
|
||||
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.SingleResource;
|
||||
|
||||
import com.google.gson.Gson;
|
||||
|
||||
public class ResourceFactory {
|
||||
|
||||
|
@ -42,4 +48,72 @@ public class ResourceFactory {
|
|||
return rs.toString();
|
||||
}
|
||||
|
||||
public static String getOverallResourceLoad(List<String> resourceLoadJson){
|
||||
long time = -1;
|
||||
double val = 0d;
|
||||
for (String json:resourceLoadJson){
|
||||
String[] arrays = json.replace("[", "").replace("]", "").split(",");
|
||||
Long timestamp = Long.parseLong(arrays[0]);
|
||||
Double value = Double.parseDouble(arrays[1]) ;
|
||||
if (time<0) time = timestamp;
|
||||
val += value;
|
||||
}
|
||||
|
||||
ResourceLoad rs = new ResourceLoad(time, val);
|
||||
return rs.toString();
|
||||
}
|
||||
|
||||
public static String getOverallResources(List<String> resourcesJson){
|
||||
List<SingleResource> generalList = new ArrayList<SingleResource>();
|
||||
int size = resourcesJson.size();
|
||||
if (size==0) return "[]";
|
||||
if (size==1) return resourcesJson.get(0);
|
||||
|
||||
int i=1;
|
||||
for (String json:resourcesJson){
|
||||
Gson gson = new Gson();
|
||||
Resources rr = gson.fromJson("{\"list\":"+json.replace("resID", "resId")+"}", Resources.class);
|
||||
List<SingleResource> l = rr.list;
|
||||
for (SingleResource sr: l){
|
||||
if (sr.value>0){
|
||||
sr.resId=sr.resId+"."+i;
|
||||
generalList.add(sr);
|
||||
}
|
||||
}
|
||||
i++;
|
||||
}
|
||||
|
||||
return HttpRequest.toJSon(generalList).replace("resId", "resID");
|
||||
}
|
||||
|
||||
public static void main(String[] args){
|
||||
ArrayList<String> arrayL = new ArrayList<String>();
|
||||
arrayL.add("[1339150993573, 1203.0]");
|
||||
arrayL.add("[1339150993573, 2503.0]");
|
||||
arrayL.add("[1339150993573, 503.0]");
|
||||
String s = getOverallResourceLoad(arrayL);
|
||||
System.out.println("S1:"+s);
|
||||
|
||||
ArrayList<String> arrayR = new ArrayList<String>();
|
||||
arrayR.add("[" +
|
||||
"{\"resID\":\"Thread_1\",\"value\":100.0}," +
|
||||
"{\"resID\":\"Thread_2\",\"value\":100.0}," +
|
||||
"{\"resID\":\"Thread_3\",\"value\":100.0}," +
|
||||
"{\"resID\":\"Thread_4\",\"value\":100.0}" +
|
||||
"]");
|
||||
|
||||
arrayR.add("[" +
|
||||
"{\"resID\":\"Thread_1\",\"value\":100.0}," +
|
||||
"{\"resID\":\"Thread_2\",\"value\":100.0}," +
|
||||
"{\"resID\":\"Thread_3\",\"value\":100.0}," +
|
||||
"{\"resID\":\"Thread_4\",\"value\":100.0}" +
|
||||
"]");
|
||||
|
||||
|
||||
String s2 = getOverallResources(arrayR);
|
||||
System.out.println("S2:"+s2);
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue