git-svn-id: http://svn.research-infrastructures.eu/public/d4science/gcube/trunk/application/perform-service@176810 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
37029e9e2d
commit
decc90cccd
|
@ -121,6 +121,28 @@ public class ExportCSVQuery extends Query {
|
|||
|
||||
return String.format("SELECT %1$s FROM %2$s %3$s",
|
||||
selectedFields, tablename, conditionString);
|
||||
}
|
||||
|
||||
public String getQueryForFields(String...fields ) {
|
||||
StringBuilder b=new StringBuilder();
|
||||
for(String f:fields)
|
||||
b.append(f+",");
|
||||
|
||||
String selectedFields=b.toString().substring(0,b.lastIndexOf(","));
|
||||
|
||||
for(Entry<String,Map<String,String>> mapping:mappings.entrySet()) {
|
||||
StringBuilder caseBuilder=new StringBuilder("CASE "+mapping.getKey());
|
||||
for(Entry<String,String> condition: mapping.getValue().entrySet())
|
||||
caseBuilder.append(String.format("WHEN '%1$s' THEN '%2$s'", condition.getKey(),condition.getValue()));
|
||||
caseBuilder.append("END AS "+mapping.getKey()+",");
|
||||
|
||||
selectedFields.replaceAll(mapping.getKey(), caseBuilder.toString());
|
||||
}
|
||||
|
||||
String conditionString =getConditionString();
|
||||
if(conditionString.length()>0) conditionString= "WHERE "+conditionString;
|
||||
|
||||
return String.format("SELECT %1$s FROM %2$s %3$s",
|
||||
selectedFields, tablename, conditionString);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
package org.gcube.application.perform.service.engine.impl;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
|
@ -34,7 +33,6 @@ import org.gcube.application.perform.service.engine.model.InvalidRequestExceptio
|
|||
import org.gcube.application.perform.service.engine.model.importer.AnalysisType;
|
||||
import org.gcube.application.perform.service.engine.model.importer.ImportRoutineDescriptor;
|
||||
import org.gcube.application.perform.service.engine.model.importer.ImportedTable;
|
||||
import org.gcube.application.perform.service.engine.utils.StorageUtils;
|
||||
import org.gcube.data.analysis.dataminermanagercl.shared.data.computations.ComputationId;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -60,11 +58,8 @@ public class PerformanceManagerImpl implements PerformanceManager{
|
|||
for(ImportedTable t:tables) {
|
||||
SchemaDefinition schema=t.getSchema();
|
||||
if(schema.getAnalysisEnabled()) {
|
||||
log.debug("Exporting {} : {} ",schema.getRelatedDescription(),t.getTableName());
|
||||
File csv=t.exportCSV(request);
|
||||
String storageId=StorageUtils.putOntoStorage(csv);
|
||||
|
||||
toReturn.put(t.getSchema().getRelatedDescription(), storageId);
|
||||
log.debug("Exporting {} : {} ",schema.getRelatedDescription(),t.getTableName());
|
||||
toReturn.putAll(t.exportCSV(request));
|
||||
}
|
||||
}
|
||||
return toReturn;
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package org.gcube.application.perform.service.engine.impl;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.gcube.application.perform.service.engine.model.importer.AnalysisType;
|
||||
|
@ -15,7 +16,7 @@ public class SchemaDefinition {
|
|||
private static final String ROUTINE_ID="routine";
|
||||
private static final String CSV="csv";
|
||||
private static final String ENABLE_ANALYSIS="enable_analysis";
|
||||
|
||||
private static final String REPORT_FIELDS="report_fields";
|
||||
|
||||
public String getRelatedDescription() {
|
||||
return relatedDescription;
|
||||
|
@ -64,6 +65,9 @@ public class SchemaDefinition {
|
|||
return speciesField;
|
||||
}
|
||||
|
||||
public ArrayList<String> getToReportFields() {
|
||||
return toReportFields;
|
||||
}
|
||||
|
||||
public SchemaDefinition(AnalysisType relatedAnalysis, Properties props) {
|
||||
super();
|
||||
|
@ -77,7 +81,11 @@ public class SchemaDefinition {
|
|||
this.routineIdFieldName=props.getProperty(ROUTINE_ID);
|
||||
this.analysisEnabled=Boolean.parseBoolean(props.getProperty(ENABLE_ANALYSIS, "false"));
|
||||
|
||||
|
||||
if(props.containsKey(REPORT_FIELDS)) {
|
||||
String fieldList=props.getProperty(REPORT_FIELDS);
|
||||
for(String field: fieldList.split(","))
|
||||
toReportFields.add(field);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
@ -96,6 +104,10 @@ public class SchemaDefinition {
|
|||
|
||||
private String routineIdFieldName;
|
||||
|
||||
private ArrayList<String> toReportFields=new ArrayList<>();
|
||||
|
||||
|
||||
|
||||
private String areaField="area";
|
||||
private String periodField="period";
|
||||
private String quarterField="quarter";
|
||||
|
|
|
@ -3,10 +3,12 @@ package org.gcube.application.perform.service.engine.model.importer;
|
|||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.sql.Connection;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import java.sql.Types;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
|
@ -23,10 +25,11 @@ import org.gcube.application.perform.service.engine.impl.Query;
|
|||
import org.gcube.application.perform.service.engine.impl.SchemaDefinition;
|
||||
import org.gcube.application.perform.service.engine.model.CSVExportRequest;
|
||||
import org.gcube.application.perform.service.engine.model.DBField;
|
||||
import org.gcube.application.perform.service.engine.model.DBField.ImportRoutine;
|
||||
import org.gcube.application.perform.service.engine.model.DBQueryDescriptor;
|
||||
import org.gcube.application.perform.service.engine.model.InternalException;
|
||||
import org.gcube.application.perform.service.engine.model.InvalidRequestException;
|
||||
import org.gcube.application.perform.service.engine.model.DBField.ImportRoutine;
|
||||
import org.gcube.application.perform.service.engine.utils.StorageUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -44,19 +47,19 @@ public class ImportedTable {
|
|||
private ArrayList<String> csvFields; // Fields actually expected in csv
|
||||
|
||||
private String tablename;
|
||||
// private DBField routineIdField;
|
||||
// private DBField routineIdField;
|
||||
|
||||
// private String farmUUIDField;
|
||||
// private String associationUUIDField;
|
||||
// private String companyUUIDField;
|
||||
// private String batchUUIDField;
|
||||
// private Boolean analysisEnabled;
|
||||
// private String farmUUIDField;
|
||||
// private String associationUUIDField;
|
||||
// private String companyUUIDField;
|
||||
// private String batchUUIDField;
|
||||
// private Boolean analysisEnabled;
|
||||
|
||||
private Query insertQuery;
|
||||
|
||||
private SchemaDefinition schema;
|
||||
|
||||
|
||||
|
||||
|
||||
public ImportedTable(String tablename, SchemaDefinition schema, ArrayList<DBField> csvFieldsDefinition) {
|
||||
super();
|
||||
this.schema=schema;
|
||||
|
@ -65,13 +68,13 @@ public class ImportedTable {
|
|||
|
||||
csvFields=new ArrayList<>();
|
||||
labels=new HashMap<>();
|
||||
|
||||
|
||||
for(DBField field:csvFieldsDefinition) {
|
||||
String escaped=escapeString(field.getFieldName());
|
||||
csvFields.add(field.getFieldName());
|
||||
labels.put(field.getFieldName(), new DBField(field.getType(),escaped));
|
||||
}
|
||||
|
||||
|
||||
insertQuery=prepareInsertionQuery();
|
||||
|
||||
}
|
||||
|
@ -79,34 +82,34 @@ public class ImportedTable {
|
|||
private DBField getRoutineIdField() {
|
||||
return new DBField(Types.BIGINT,schema.getRoutineIdFieldName());
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
private Query prepareInsertionQuery() {
|
||||
|
||||
|
||||
StringBuilder fieldList=new StringBuilder();
|
||||
StringBuilder valueString=new StringBuilder();
|
||||
ArrayList<DBField> queryFields=new ArrayList<>();
|
||||
|
||||
|
||||
|
||||
|
||||
for(DBField f:labels.values()) {
|
||||
queryFields.add(f);
|
||||
fieldList.append(f.getFieldName()+",");
|
||||
valueString.append("?,");
|
||||
}
|
||||
|
||||
|
||||
queryFields.add(getRoutineIdField());
|
||||
|
||||
|
||||
|
||||
|
||||
String insertSQL= String.format("INSERT INTO %1$s (%2$s) VALUES (%3$s)", tablename,
|
||||
fieldList+getRoutineIdField().getFieldName(),valueString+"?");
|
||||
|
||||
|
||||
|
||||
|
||||
return new Query(insertSQL, queryFields.toArray(new DBField[queryFields.size()]));
|
||||
}
|
||||
|
||||
|
||||
public String createStatement() {
|
||||
StringBuilder fieldDefinitions=new StringBuilder();
|
||||
|
||||
|
||||
for(DBField f:labels.values()) {
|
||||
String type="text";
|
||||
switch(f.getType()) {
|
||||
|
@ -115,20 +118,20 @@ public class ImportedTable {
|
|||
case Types.REAL : type="real";
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
fieldDefinitions.append(f.getFieldName()+" "+type+",");
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
String standardDefinitions=
|
||||
String.format( "%1$s bigint,"
|
||||
+ "FOREIGN KEY (%1$s) REFERENCES "+ImportRoutine.TABLE+"("+ImportRoutine.ID+")",getRoutineIdField().getFieldName());
|
||||
|
||||
|
||||
return String.format("CREATE TABLE IF NOT EXISTS %1$s (%2$s, %3$s)",
|
||||
tablename,fieldDefinitions.substring(0,fieldDefinitions.lastIndexOf(",")),standardDefinitions);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Checks if passed set of labels is
|
||||
*
|
||||
|
@ -172,19 +175,18 @@ public class ImportedTable {
|
|||
DBField routineField=getRoutineIdField();
|
||||
Query cleanQuery=new Query(String.format("DELETE FROM %1$s WHERE %2$s =?", this.tablename,routineField.getFieldName()),
|
||||
new DBField[] {routineField});
|
||||
|
||||
|
||||
return cleanQuery.get(conn, new DBQueryDescriptor(routineField,toClean.getId())).executeUpdate();
|
||||
|
||||
|
||||
}
|
||||
|
||||
public File exportCSV(CSVExportRequest request) throws InvalidRequestException, SQLException, InternalException, IOException {
|
||||
public Map<String,String> exportCSV(CSVExportRequest request) throws InvalidRequestException, SQLException, InternalException, IOException {
|
||||
|
||||
|
||||
log.debug("Exporting {} from {} ",request, this);
|
||||
|
||||
Connection conn= DataBaseManager.get().getConnection();
|
||||
FileWriter writer=null;
|
||||
CSVPrinter printer=null;
|
||||
|
||||
try {
|
||||
ExportCSVQuery exportQuery=new ExportCSVQuery("",null,request,schema,labels);
|
||||
exportQuery.setFieldList(labels.values());
|
||||
|
@ -220,51 +222,88 @@ public class ImportedTable {
|
|||
exportQuery.setMapping(schema.getFarmUUIDField(), farmMapping);
|
||||
exportQuery.setMapping(schema.getBatchUUIDField(), batchMapping);
|
||||
|
||||
|
||||
|
||||
log.trace("Performing actual query towards {} ",tablename);
|
||||
|
||||
|
||||
Map<String,String> toReturn=new HashMap<String,String>();
|
||||
|
||||
String sqlExport=exportQuery.getQuery();
|
||||
log.debug("Query is {} ",sqlExport);
|
||||
ResultSet csvRs=conn.createStatement().executeQuery(sqlExport);
|
||||
Statement stmt=conn.createStatement();
|
||||
ResultSet csvRs=stmt.executeQuery(sqlExport);
|
||||
|
||||
File toReturn=File.createTempFile("csv_out", ".csv");
|
||||
writer=new FileWriter(toReturn);
|
||||
|
||||
printer = CSVFormat.DEFAULT.withHeader(csvFields.toArray(new String[csvFields.size()])).print(writer);
|
||||
|
||||
printer.printRecords(csvRs);
|
||||
toReturn.put(this.schema.getRelatedDescription(), putIntoStorage(csvRs, csvFields.toArray(new String[csvFields.size()])));
|
||||
if(schema.getToReportFields().size()>0) {
|
||||
ArrayList<String> personalLabels=schema.getToReportFields();
|
||||
|
||||
log.trace("Extracting {} from {} ",tablename);
|
||||
// Extract personal found values from same query
|
||||
ArrayList<String> toExtractFields=new ArrayList<String>();
|
||||
for(String label:personalLabels) {
|
||||
toExtractFields.add(labels.get(label).getFieldName());
|
||||
}
|
||||
String sqlPersonal=exportQuery.getQueryForFields(toExtractFields.toArray(new String[toExtractFields.size()]));
|
||||
log.debug("Query is {} ",sqlPersonal);
|
||||
csvRs=stmt.executeQuery(sqlPersonal);
|
||||
|
||||
|
||||
|
||||
toReturn.put(this.schema.getRelatedDescription()+"_internal", putIntoStorage(csvRs, personalLabels.toArray(new String[personalLabels.size()])));
|
||||
}
|
||||
|
||||
|
||||
return toReturn;
|
||||
}finally {
|
||||
conn.close();
|
||||
if(printer!=null) {
|
||||
printer.flush();
|
||||
printer.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
public SchemaDefinition getSchema() {
|
||||
return schema;
|
||||
}
|
||||
|
||||
|
||||
int MAX_LENGTH=25;
|
||||
|
||||
|
||||
private String escapeString(String fieldname) {
|
||||
String toReturn=fieldname;
|
||||
if(toReturn.length()>MAX_LENGTH)
|
||||
toReturn=toReturn.substring(0, MAX_LENGTH);
|
||||
|
||||
|
||||
DBField clashing=new DBField(0,"\""+toReturn+"\"");
|
||||
int counter=1;
|
||||
while(labels.containsValue(clashing)) {
|
||||
clashing=new DBField(0,"\""+toReturn+"_"+counter+"\"");
|
||||
counter++;
|
||||
}
|
||||
|
||||
|
||||
return clashing.getFieldName();
|
||||
}
|
||||
|
||||
|
||||
private static final String putIntoStorage(ResultSet toExport,String...headers) throws IOException, SQLException {
|
||||
CSVPrinter printer=null;
|
||||
File dataFile=null;
|
||||
try {
|
||||
dataFile=File.createTempFile("csv_out", ".csv");
|
||||
printer = CSVFormat.DEFAULT.withHeader(headers).print(new FileWriter(dataFile));
|
||||
|
||||
printer.printRecords(toExport);
|
||||
printer.flush();
|
||||
return StorageUtils.putOntoStorage(dataFile);
|
||||
}finally {
|
||||
if(printer!=null) {
|
||||
printer.close();
|
||||
}
|
||||
if(dataFile!=null) {
|
||||
Files.deleteIfExists(dataFile.toPath());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -5,4 +5,5 @@ association=producer_association_id
|
|||
batch=aggregated_batch_id
|
||||
routine=internal_routine_id
|
||||
csv=csv/Grow_out_Aggregated_Batch_Data_Entry_KPI.csv
|
||||
enable_analysis=true
|
||||
enable_analysis=true
|
||||
report_fields=aggregated_batch_id
|
Loading…
Reference in New Issue