This commit is contained in:
Gianpaolo Coro 2012-06-08 13:23:23 +00:00
parent d171c45507
commit c65adbc7f3
13 changed files with 162 additions and 25 deletions

View File

@ -11,6 +11,6 @@
</sealing> </sealing>
</manifest> </manifest>
<selectedElements exportClassFiles="true" exportJavaFiles="false" exportOutputFolder="false"> <selectedElements exportClassFiles="true" exportJavaFiles="false" exportOutputFolder="false">
<javaElement handleIdentifier="=EcologicalEngine/src"/> <javaElement handleIdentifier="=EcologicalEngine1.2/src"/>
</selectedElements> </selectedElements>
</jardesc> </jardesc>

View File

@ -19,6 +19,8 @@ import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.hibernate.SessionFactory; import org.hibernate.SessionFactory;
import org.jfree.data.category.DefaultCategoryDataset; import org.jfree.data.category.DefaultCategoryDataset;
import com.mchange.v1.util.ArrayUtils;
public class BioClimateAnalysis { public class BioClimateAnalysis {
private static String countHighProbabilityCells = "select count(*) from %1$s where probability>%2$s"; private static String countHighProbabilityCells = "select count(*) from %1$s where probability>%2$s";
@ -182,9 +184,27 @@ public class BioClimateAnalysis {
globalEvolutionAnalysis(null, hspecTables, null, hspecTableNames, probabilityColumn, csquareColumn, threshold); globalEvolutionAnalysis(null, hspecTables, null, hspecTableNames, probabilityColumn, csquareColumn, threshold);
} }
private String[] checkTableNames(String [] tablesNames){
ArrayList<String> newtables = new ArrayList<String>();
for (String table:tablesNames){
int i=1;
String originalTable = table;
while (newtables.contains(table)){
table = originalTable+"_"+i;
i++;
}
newtables.add(table);
}
String [] tables = new String[tablesNames.length];
for (int j=0;j<tablesNames.length;j++){
tables[j] = newtables.get(j);
}
return tables;
}
public void produceCharts(HashMap<String, HashMap<String, double[]>> GeoMap, String[] tablesNames) { public void produceCharts(HashMap<String, HashMap<String, double[]>> GeoMap, String[] tablesNames) {
// produce a char for each feature // produce a char for each feature
tablesNames = checkTableNames(tablesNames);
producedImages = new HashMap<String,Image> (); producedImages = new HashMap<String,Image> ();
for (String featurename : GeoMap.keySet()) { for (String featurename : GeoMap.keySet()) {
DefaultCategoryDataset chart = new DefaultCategoryDataset(); DefaultCategoryDataset chart = new DefaultCategoryDataset();

View File

@ -10,14 +10,12 @@ public interface Generator extends ComputationalAgent{
public INFRASTRUCTURE getInfrastructure(); public INFRASTRUCTURE getInfrastructure();
public void init(); public void init() throws Exception;
public void setConfiguration(AlgorithmConfiguration config); public void setConfiguration(AlgorithmConfiguration config);
public void shutdown(); public void shutdown();
public void stopProcess();
public String getLoad(); public String getLoad();
public void generate() throws Exception; public void generate() throws Exception;

View File

@ -1,7 +1,5 @@
package org.gcube.dataanalysis.ecoengine.interfaces; package org.gcube.dataanalysis.ecoengine.interfaces;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration; import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
public interface SpatialProbabilityDistributionNode extends SpatialProbabilityDistribution { public interface SpatialProbabilityDistributionNode extends SpatialProbabilityDistribution {
@ -22,4 +20,8 @@ public interface SpatialProbabilityDistributionNode extends SpatialProbabilityDi
public int getNumberOfSpecies(); public int getNumberOfSpecies();
public int getNumberOfGeoInfo(); public int getNumberOfGeoInfo();
public int getOverallProcessedInfo();
public void stop();
} }

View File

@ -140,11 +140,6 @@ public class LocalSimpleSplitGenerator implements Generator {
stopInterrupt = true; stopInterrupt = true;
} }
@Override
public void stopProcess() {
stopInterrupt = true;
}
// waits for thread to be free // waits for thread to be free
private void wait4Thread(int index) { private void wait4Thread(int index) {
// wait until thread is free // wait until thread is free

View File

@ -222,11 +222,6 @@ public class LocalSplitGenerator implements Generator {
} }
@Override
public void stopProcess() {
stopInterrupt = true;
}
// waits for thread to be free // waits for thread to be free
private void wait4Thread(int index) { private void wait4Thread(int index) {
// wait until thread is free // wait until thread is free

View File

@ -37,7 +37,7 @@ public class RainyCloudGenerator implements Generator {
// if (oo.status.equals("DONE")||oo.status.equals("ERROR")) // if (oo.status.equals("DONE")||oo.status.equals("ERROR"))
if (oo.status.equals("DONE")) { if (oo.status.equals("DONE")) {
stopProcess(); shutdown();
return 100f; return 100f;
} else { } else {
float remoteStatus = (float) remoteGenerationManager.retrieveCompletion(); float remoteStatus = (float) remoteGenerationManager.retrieveCompletion();
@ -96,10 +96,6 @@ public class RainyCloudGenerator implements Generator {
@Override @Override
public void shutdown() { public void shutdown() {
}
@Override
public void stopProcess() {
interruptProcessing = true; interruptProcessing = true;
} }

View File

@ -0,0 +1,37 @@
package org.gcube.dataanalysis.ecoengine.test;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.user.GeneratorT;
import org.gcube.dataanalysis.ecoengine.user.ModelerT;
public class PerformanceTests {
public static void main(String[] args) throws Exception {
String configPath = "./cfg/";
String csquareTable = "hcaf_d";
String preprocessedTable = "maxminlat_hspen";
String envelopeTable = "hspen";
int numberOfResources = 4;
String speciesCode = "Fis-22747";
String userName = "gianpaolo.coro";
String generatorName = "AQUAMAPS_SUITABLE";
String finalDistributionTable = "hspec_suitable_local";
// Generate
long t0 = System.currentTimeMillis();
// Generate
AlgorithmConfiguration config = GeneratorT.getGenerationConfig(numberOfResources, generatorName, envelopeTable, preprocessedTable, "", userName, csquareTable, finalDistributionTable, configPath);
config.setPersistencePath("./");
config.setParam("ServiceUserName", "gianpaolo.coro");
config.setParam("DatabaseUserName","utente");
config.setParam("DatabasePassword","d4science");
config.setParam("DatabaseURL","jdbc:postgresql://dbtest.research-infrastructures.eu/aquamapsorgupdated");
config.setParam("DatabaseDriver","org.hibernate.dialect.PostgreSQLDialect");
GeneratorT.generate(config);
System.out.println("OVERALL ELAPSED: "+(System.currentTimeMillis()-t0));
}
}

View File

@ -27,7 +27,7 @@ public class TestBioClimateAnalysis {
"hspen2012_06_02_08_54_48_004", "hspen2012_06_02_08_54_48_004",
"hspen2012_06_02_08_55_53_415" "hspen2012_06_02_08_55_53_415"
}; };
/*
final String [] envelopeTablesNames = { final String [] envelopeTablesNames = {
"hspen2012_06_01_21_52_47_460", "hspen2012_06_01_21_52_47_460",
"hspen2012_06_01_21_52_47_485", "hspen2012_06_01_21_52_47_485",
@ -40,6 +40,20 @@ public class TestBioClimateAnalysis {
"hspen2012_06_02_08_54_48_004", "hspen2012_06_02_08_54_48_004",
"hspen2012_06_02_08_55_53_415" "hspen2012_06_02_08_55_53_415"
}; };
*/
final String [] envelopeTablesNames = {
"test",
"test",
"test",
"test",
"test",
"test",
"test",
"test",
"test",
"test"
};
bioClimate.speciesEvolutionAnalysis(envelopeTables,envelopeTablesNames, BioClimateAnalysis.salinityMinFeature, BioClimateAnalysis.salinityDefaultRange); bioClimate.speciesEvolutionAnalysis(envelopeTables,envelopeTablesNames, BioClimateAnalysis.salinityMinFeature, BioClimateAnalysis.salinityDefaultRange);

View File

@ -134,10 +134,10 @@ public class DatabaseFactory{
Configuration cfg = new Configuration(); Configuration cfg = new Configuration();
cfg = cfg.configure(DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(new ByteArrayInputStream(document.asXML().getBytes()))); cfg = cfg.configure(DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(new ByteArrayInputStream(document.asXML().getBytes())));
cfg.setProperty("hibernate.hbm2ddl.auto", "create"); cfg.setProperty("hibernate.hbm2ddl.auto", "create");
SessionFactory DBSessionFactory = null; SessionFactory DBSessionFactory = null;
DBSessionFactory = cfg.buildSessionFactory(); DBSessionFactory = cfg.buildSessionFactory();
// close stream // close stream
stream.close(); stream.close();
return DBSessionFactory; return DBSessionFactory;

View File

@ -204,6 +204,7 @@ public class DatabaseUtils {
dbHibConnection = DatabaseFactory.initDBConnection(defaultDatabaseFile, config); dbHibConnection = DatabaseFactory.initDBConnection(defaultDatabaseFile, config);
} }
} catch (Exception e) { } catch (Exception e) {
System.out.println("ERROR IN DB INITIALIZATION : "+e.getLocalizedMessage());
e.printStackTrace(); e.printStackTrace();
// AnalysisLogger.getLogger().trace(e); // AnalysisLogger.getLogger().trace(e);
} }

View File

@ -365,11 +365,16 @@ public class Operations {
public static int chunkize(int numberOfElements,int partitionFactor){ public static int chunkize(int numberOfElements,int partitionFactor){
int chunksize = numberOfElements/ partitionFactor; int chunksize = numberOfElements/ partitionFactor;
int rest = numberOfElements % partitionFactor;
if (chunksize == 0) if (chunksize == 0)
chunksize = 1; chunksize = 1;
else if (rest !=0)
chunksize++;
/*
int numOfChunks = numberOfElements / chunksize; int numOfChunks = numberOfElements / chunksize;
if ((numberOfElements % chunksize) != 0) if ((numberOfElements % chunksize) != 0)
numOfChunks += 1; numOfChunks += 1;
*/
return chunksize; return chunksize;
} }

View File

@ -1,8 +1,14 @@
package org.gcube.dataanalysis.ecoengine.utils; package org.gcube.dataanalysis.ecoengine.utils;
import java.util.ArrayList;
import java.util.List;
import org.gcube.contentmanagement.graphtools.utils.HttpRequest; import org.gcube.contentmanagement.graphtools.utils.HttpRequest;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad; import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.ResourceLoad;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources; import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.Resources;
import org.gcube.dataanalysis.ecoengine.connectors.livemonitor.SingleResource;
import com.google.gson.Gson;
public class ResourceFactory { public class ResourceFactory {
@ -42,4 +48,72 @@ public class ResourceFactory {
return rs.toString(); return rs.toString();
} }
public static String getOverallResourceLoad(List<String> resourceLoadJson){
long time = -1;
double val = 0d;
for (String json:resourceLoadJson){
String[] arrays = json.replace("[", "").replace("]", "").split(",");
Long timestamp = Long.parseLong(arrays[0]);
Double value = Double.parseDouble(arrays[1]) ;
if (time<0) time = timestamp;
val += value;
}
ResourceLoad rs = new ResourceLoad(time, val);
return rs.toString();
}
public static String getOverallResources(List<String> resourcesJson){
List<SingleResource> generalList = new ArrayList<SingleResource>();
int size = resourcesJson.size();
if (size==0) return "[]";
if (size==1) return resourcesJson.get(0);
int i=1;
for (String json:resourcesJson){
Gson gson = new Gson();
Resources rr = gson.fromJson("{\"list\":"+json.replace("resID", "resId")+"}", Resources.class);
List<SingleResource> l = rr.list;
for (SingleResource sr: l){
if (sr.value>0){
sr.resId=sr.resId+"."+i;
generalList.add(sr);
}
}
i++;
}
return HttpRequest.toJSon(generalList).replace("resId", "resID");
}
public static void main(String[] args){
ArrayList<String> arrayL = new ArrayList<String>();
arrayL.add("[1339150993573, 1203.0]");
arrayL.add("[1339150993573, 2503.0]");
arrayL.add("[1339150993573, 503.0]");
String s = getOverallResourceLoad(arrayL);
System.out.println("S1:"+s);
ArrayList<String> arrayR = new ArrayList<String>();
arrayR.add("[" +
"{\"resID\":\"Thread_1\",\"value\":100.0}," +
"{\"resID\":\"Thread_2\",\"value\":100.0}," +
"{\"resID\":\"Thread_3\",\"value\":100.0}," +
"{\"resID\":\"Thread_4\",\"value\":100.0}" +
"]");
arrayR.add("[" +
"{\"resID\":\"Thread_1\",\"value\":100.0}," +
"{\"resID\":\"Thread_2\",\"value\":100.0}," +
"{\"resID\":\"Thread_3\",\"value\":100.0}," +
"{\"resID\":\"Thread_4\",\"value\":100.0}" +
"]");
String s2 = getOverallResources(arrayR);
System.out.println("S2:"+s2);
}
} }