This commit is contained in:
Gianpaolo Coro 2012-05-03 16:17:48 +00:00
parent f9311ce5fe
commit cf2b74436a
6 changed files with 101 additions and 4 deletions

View File

@ -1,6 +1,7 @@
package org.gcube.dataanalysis.ecoengine.configuration;
import java.io.FileInputStream;
import java.io.Serializable;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;
@ -12,7 +13,9 @@ import org.hibernate.SessionFactory;
import com.rapidminer.RapidMiner;
public class AlgorithmConfiguration extends LexicalEngineConfiguration{
public class AlgorithmConfiguration extends LexicalEngineConfiguration implements Serializable{
private static final long serialVersionUID = 1L;
public static Properties getProperties(String absoluteFilePath) {
Properties props = new Properties();

View File

@ -148,7 +148,12 @@ public class InterpolateTables {
DatabaseFactory.executeSQLUpdate(copyFileQuery, referencedbConnection);
status = Math.min(status + statusstep, 99);
}
AnalysisLogger.getLogger().debug("DELETING ALL TEMPORARY FILES");
for (int i = 0; i < producedfiles.length; i++) {
producedfiles[i].delete();
}
interpolatedTables[interpolatedTables.length - 1] = table2;
AnalysisLogger.getLogger().debug("ALL TABLES HAVE BEEN PRODUCED");

View File

@ -41,7 +41,7 @@ public class AquamapsSuitable implements SpatialProbabilityDistributionTable{
if ((config.getParam("PreprocessedTable")!=null)&&(config.getParam("PreprocessedTable").length()>0))
hspenMinMaxLat = config.getParam("PreprocessedTable");
AnalysisLogger.getLogger().trace("Aquamaps Algorithm Init(->getting min max latitudes from "+hspenMinMaxLat);
AnalysisLogger.getLogger().trace("Aquamaps Algorithm Init ->getting min max latitudes from "+hspenMinMaxLat);
allSpeciesObservations = new HashMap<String, List<Object>>();
List<Object> SpeciesObservations = DatabaseFactory.executeSQLQuery(String.format(selectAllSpeciesObservationQuery, hspenMinMaxLat), dbHibConnection);
@ -54,7 +54,7 @@ public class AquamapsSuitable implements SpatialProbabilityDistributionTable{
allSpeciesObservations.put((String)speciesid, maxminInfo);
}
AnalysisLogger.getLogger().trace("Aquamaps Algorithm Init(->init finished");
AnalysisLogger.getLogger().trace("Aquamaps Algorithm Init ->init finished");
}
@Override

View File

@ -2,6 +2,8 @@ package org.gcube.dataanalysis.ecoengine.utils;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.hibernate.SessionFactory;
public class DatabaseUtils {
@ -175,4 +177,46 @@ public class DatabaseUtils {
return creationStatement;
}
public static void createBigTable(boolean createTable, String table, String dbdriver, String dbuser, String dbpassword, String dburl, String creationStatement, SessionFactory dbHibConnection) throws Exception {
if (createTable) {
try {
DatabaseFactory.executeSQLUpdate("drop table " + table, dbHibConnection);
} catch (Exception e) {
// e.printStackTrace();
}
DatabaseFactory.executeUpdateNoTransaction(creationStatement, dbdriver, dbuser, dbpassword, dburl, true);
}
}
public static SessionFactory initDBSession(AlgorithmConfiguration config) {
SessionFactory dbHibConnection = null;
try {
if ((config != null) && (config.getConfigPath() != null)) {
String defaultDatabaseFile = config.getConfigPath() + AlgorithmConfiguration.defaultConnectionFile;
config.setDatabaseDriver(config.getParam("DatabaseDriver"));
config.setDatabaseUserName(config.getParam("DatabaseUserName"));
config.setDatabasePassword(config.getParam("DatabasePassword"));
config.setDatabaseURL(config.getParam("DatabaseURL"));
dbHibConnection = DatabaseFactory.initDBConnection(defaultDatabaseFile, config);
}
} catch (Exception e) {
e.printStackTrace();
// AnalysisLogger.getLogger().trace(e);
}
return dbHibConnection;
}
public static void closeDBConnection(SessionFactory dbHibConnection){
try{
dbHibConnection.close();
}catch(Exception e){
}
}
}

View File

@ -362,4 +362,16 @@ public class Operations {
double [] parabinterp = parabolicInterpolation(1,10,9);
System.out.println("");
}
public static int chunkize(int numberOfElements,int partitionFactor){
int chunksize = numberOfElements/ partitionFactor;
if (chunksize == 0)
chunksize = 1;
int numOfChunks = numberOfElements / chunksize;
if ((numberOfElements % chunksize) != 0)
numOfChunks += 1;
return chunksize;
}
}

View File

@ -1,10 +1,16 @@
package org.gcube.dataanalysis.ecoengine.utils;
import java.io.BufferedWriter;
import java.io.FileInputStream;
import java.io.FileWriter;
import org.gcube.contentmanagement.graphtools.data.BigSamplesTable;
import org.gcube.contentmanagement.lexicalmatcher.utils.FileTools;
import com.rapidminer.example.ExampleSet;
import com.rapidminer.example.table.DataRow;
import com.rapidminer.example.table.ExampleTable;
import com.thoughtworks.xstream.XStream;
public class Transformations {
@ -115,4 +121,31 @@ public class Transformations {
return out;
}
public static Object getObjectFromFile(String file) throws Exception{
/*
FileInputStream fis = new FileInputStream(file);
ObjectInputStream ois = new ObjectInputStream(fis);
return ois.readObject();
*/
XStream xstream = new XStream();
return xstream.fromXML(new FileInputStream(file));
// return xstream.fromXML(FileTools.loadString(file, "UTF-8"));
}
public static void dumpObjectToFile(String file, Object toWrite) throws Exception {
XStream xstream = new XStream();
BufferedWriter bw = new BufferedWriter(new FileWriter(file));
bw.write(xstream.toXML(toWrite));
bw.close();
/*
FileOutputStream fos = new FileOutputStream(file);
ObjectOutputStream ois = new ObjectOutputStream(fos);
ois.writeObject(toWrite);
*/
}
}