This commit is contained in:
Fabio Sinibaldi 2019-02-06 17:35:12 +00:00
parent 33173ead11
commit 1733188c79
18 changed files with 286 additions and 92 deletions

View File

@ -60,6 +60,7 @@
<dependency>
<groupId>org.gcube.core</groupId>
<artifactId>common-encryption</artifactId>
<version>1.0.0-SNAPSHOT</version>
</dependency>

View File

@ -20,13 +20,17 @@ public class ExportCSVQuery extends Query {
private Map<String,DBField> actualStructure;
public ExportCSVQuery(String query, DBField[] fields, CSVExportRequest theRequest, SchemaDefinition schema, Map<String,DBField> actualStructure) {
super(query, fields);
private ArrayList<String> exportCSVFieldOrder;
public ExportCSVQuery(String query, DBField[] queryParams, CSVExportRequest theRequest,
SchemaDefinition schema, Map<String,DBField> actualStructure, ArrayList<String> exportFieldsOrder) {
super(query, queryParams);
this.theRequest=theRequest;
this.schema=schema;
this.actualStructure=actualStructure;
exportCSVFieldOrder=exportFieldsOrder;
}
@ -45,8 +49,8 @@ public class ExportCSVQuery extends Query {
@Override
public String getQuery() {
String selectedFields=replaceWithMappings(getFieldList());
// String selectedFields=replaceWithMappings(getFieldList());
String selectedFields=getFieldList();
String conditionString =getConditionString();
if(conditionString.length()>0) conditionString= "WHERE "+conditionString;
@ -64,7 +68,8 @@ public class ExportCSVQuery extends Query {
log.debug("Creating query for fields {} against table {} ",b,tablename);
String selectedFields=replaceWithMappings(b.toString());
// String selectedFields=replaceWithMappings(b.toString());
String selectedFields=b.toString();
String condition=getFilterWithMapping(filterMappingKey);
@ -80,15 +85,16 @@ public class ExportCSVQuery extends Query {
String actualField=actualField(mappingFilterKey);
log.debug("Setting filter By Mappings for field {}, size {} ",actualField,mappings.get(mappingFilterKey).size());
conditionBuilder.append("(");
for(Entry<String,String> mappingFilter:mappings.get(mappingFilterKey).entrySet()) {
conditionBuilder.append(String.format("%1$s = '%2$s' OR", actualField,mappingFilter.getKey()));
}
conditionBuilder.setLength(conditionBuilder.lastIndexOf("OR"));
conditionBuilder.append(")");
// Add selection filter..
String filteringCondition=getConditionString();
if(filteringCondition.length()>0) conditionBuilder.append(" AND "+filteringCondition);
if(filteringCondition.length()>0) conditionBuilder.append(" AND ("+filteringCondition+")");
}else {
log.debug("No mappings to search For ");
conditionBuilder.append("FALSE");
@ -97,25 +103,26 @@ public class ExportCSVQuery extends Query {
}
private String replaceWithMappings(String selectionFields) {
String toReturn=selectionFields;
// fieldLabel -> (uuid->name)
for(Entry<String,Map<String,String>> mapping: mappings.entrySet()) {
if(exists(mapping.getKey())) {
String actualMapped=actualField(mapping.getKey());
if(toReturn.contains(actualMapped)) {
StringBuilder caseBuilder=new StringBuilder("CASE "+actualMapped);
for(Entry<String,String> condition: mapping.getValue().entrySet())
caseBuilder.append(String.format("WHEN '%1$s' THEN '%2$s'", condition.getKey(),condition.getValue()));
caseBuilder.append("END AS "+actualMapped);
toReturn=toReturn.replace(actualMapped, caseBuilder.toString());
}
}
}
return toReturn.toString();
}
// private String replaceWithMappings(String selectionFields) {
// String toReturn=selectionFields;
// // fieldLabel -> (uuid->name)
// for(Entry<String,Map<String,String>> mapping: mappings.entrySet()) {
// if(exists(mapping.getKey())) {
// String actualMapped=actualField(mapping.getKey());
// if(toReturn.contains(actualMapped)) {
// StringBuilder caseBuilder=new StringBuilder("CASE "+actualMapped);
// for(Entry<String,String> condition: mapping.getValue().entrySet())
// caseBuilder.append(String.format("WHEN '%1$s' THEN '%2$s'", condition.getKey(),condition.getValue()));
//
// caseBuilder.append(String.format(" ELSE %1$s END AS %1$s", actualMapped));
//
// toReturn=toReturn.replace(actualMapped, caseBuilder.toString());
// }
// }
// }
//
// return toReturn.toString();
// }
private String getConditionString() {
@ -141,7 +148,7 @@ public class ExportCSVQuery extends Query {
StringBuilder toReturn=new StringBuilder("");
for(String orGroup:orGroups) {
toReturn.append(orGroup+ " AND ");
toReturn.append("("+orGroup+") AND ");
}
if(toReturn.length()>0)
@ -155,8 +162,10 @@ public class ExportCSVQuery extends Query {
private String getFieldList() {
StringBuilder b=new StringBuilder();
for(DBField f:actualStructure.values())
for(String label:exportCSVFieldOrder) {
DBField f = actualStructure.get(label);
b.append(f.getFieldName()+",");
}
return b.toString().substring(0,b.lastIndexOf(","));
}

View File

@ -258,20 +258,7 @@ public class PerformanceManagerImpl implements PerformanceManager{
});
});
// CSVRecord record=parser.getRecords().get(0);
//
// for(Entry<String,Integer> header:headers.entrySet()) {
// String value=record.get(header.getKey());
// String name=header.getKey();
// int type=Types.VARCHAR;
//
// //Deanonimized fields will always contain strings
// if(!deanonimizationLabels.contains(name)) {
// //NB INT will be managed as real in order to deal with Dataminer output format
// if(value.matches(FLOAT_REGEX)||value.matches(INTEGER_REGEX)) type=Types.REAL;
// }
// toReturn.add(new DBField(type, name));
// }
return toReturn;
}finally{

View File

@ -14,11 +14,19 @@ public class SchemaDefinition {
private static final String BATCH="batch";
private static final String COMPANY="company";
private static final String ROUTINE_ID="routine";
private static final String AREA="area";
private static final String SPECIES="species";
private static final String PERIOD="period";
private static final String QUARTER="quarter";
private static final String CSV="csv";
private static final String ENABLE_ANALYSIS="enable_analysis";
private static final String REPORT_FIELDS="report_fields";
private static final String REPORT_LABELS="report_labels";
public String getRelatedDescription() {
return relatedDescription;
}
@ -83,7 +91,14 @@ public class SchemaDefinition {
this.associationUUIDField = props.getProperty(ASSOCIATION);
this.batchUUIDField = props.getProperty(BATCH);
this.companyUUIDField = props.getProperty(COMPANY);
this.routineIdFieldName=props.getProperty(ROUTINE_ID);
this.routineIdFieldName=props.getProperty(ROUTINE_ID);
this.areaField=props.getProperty(AREA);
this.speciesField=props.getProperty(SPECIES);
this.quarterField=props.getProperty(QUARTER);
this.periodField=props.getProperty(PERIOD);
this.analysisEnabled=Boolean.parseBoolean(props.getProperty(ENABLE_ANALYSIS, "false"));
if(props.containsKey(REPORT_FIELDS)) {
@ -117,8 +132,8 @@ public class SchemaDefinition {
private ArrayList<String> toReportLabels=new ArrayList<>();
private String areaField="area";
private String periodField="period";
private String quarterField="quarter";
private String speciesField="species";
private String areaField;
private String periodField;
private String quarterField;
private String speciesField;
}

View File

@ -0,0 +1,94 @@
package org.gcube.application.perform.service.engine.model.importer;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.gcube.application.perform.service.engine.model.DBField;
public class CSVRecordConverter {
private Map<String,DBField> labels;
public CSVRecordConverter(Map<String, DBField> labels) {
super();
this.labels = labels;
}
private Map<String,Map<String,String>> mappings=new HashMap<>();
private String conditionField=null;
private Set<String> conditionValues=new HashSet<>();
private boolean isAlwaysMap=false;
private ResultSetMetaData rsMeta=null;
public Object[] convert(ResultSet rs)throws SQLException{
if(rsMeta==null)rsMeta=rs.getMetaData();
Object[] toReturn=new Object[rsMeta.getColumnCount()];
if(mappingCondition(rs)) return map(rs,toReturn);
else
for(int i=0;i<toReturn.length;i++)
toReturn[i]=rs.getObject(i+1);
return toReturn;
}
public void setMapping(String field, Map<String,String> mapping) {
String actualFieldName=labels.get(field).getFieldName();
actualFieldName=actualFieldName.substring(1, actualFieldName.length()-1);
mappings.put(actualFieldName, mapping);
}
public void setCondition(String field, Set<String> values) {
this.conditionField=labels.get(field).getFieldName();
conditionField=conditionField.substring(1, conditionField.length()-1);
this.conditionValues=values;
}
public void setAlwaysMap(boolean isAlwaysMap) {
this.isAlwaysMap = isAlwaysMap;
}
public void reset() {
rsMeta=null;
}
private boolean mappingCondition(ResultSet rs) throws SQLException{
// Optimized pass-all
if(isAlwaysMap) return true;
// DefaultBehaviour
if(conditionField==null) return false;
else {
String currentValue=rs.getString(conditionField);
return conditionValues.contains(currentValue);
}
}
private Object[] map(ResultSet rs,Object[] toReturn) throws SQLException{
for(int i=0;i<toReturn.length;i++) {
toReturn[i]=rs.getObject(i+1);
String field=rsMeta.getColumnName(i+1);
if(mappings.containsKey(field)) {
Map<String,String> fieldMapping=mappings.get(field);
String value=rs.getString(i+1);
if(value!=null&&fieldMapping.containsKey(value))
toReturn[i]=fieldMapping.get(value);
}
}
return toReturn;
}
}

View File

@ -86,10 +86,10 @@ public class ImportedTable {
check(schema.getCompanyUUIDField());
check(schema.getFarmUUIDField());
// check(schema.getAreaField());
// check(schema.getPeriodField());
// check(schema.getQuarterField());
// check(schema.getSpeciesField());
check(schema.getAreaField());
check(schema.getPeriodField());
check(schema.getQuarterField());
check(schema.getSpeciesField());
for(String f:schema.getToReportFields())
check(f);
@ -216,7 +216,8 @@ public class ImportedTable {
Connection conn= DataBaseManager.get().getConnection();
try {
ExportCSVQuery exportQuery=new ExportCSVQuery("",null,request,schema,labels);
CSVRecordConverter queryConverter=new CSVRecordConverter(labels);
ExportCSVQuery exportQuery=new ExportCSVQuery("",null,request,schema,labels,csvFields);
exportQuery.setTablename(tablename);
Map<String,String> farmMapping=new HashMap<>();
@ -244,23 +245,34 @@ public class ImportedTable {
}
}
// Set mappings for query and csv printer
if(schema.getAssociationUUIDField()!=null) {
log.debug("Setting Association Mapping : "+associationMapping);
exportQuery.setMapping(schema.getAssociationUUIDField(), associationMapping);
queryConverter.setMapping(schema.getAssociationUUIDField(), associationMapping);
}
if(schema.getCompanyUUIDField()!=null) {
log.debug("Setting Company Mapping : "+companyMapping);
exportQuery.setMapping(schema.getCompanyUUIDField(), companyMapping);
queryConverter.setMapping(schema.getCompanyUUIDField(), companyMapping);
}
if(schema.getFarmUUIDField()!=null) {
log.debug("Setting Farm Mapping : "+farmMapping);
exportQuery.setMapping(schema.getFarmUUIDField(), farmMapping);
queryConverter.setMapping(schema.getFarmUUIDField(), farmMapping);
}
if(schema.getBatchUUIDField()!=null) {
log.debug("Setting Batch Mapping : "+batchMapping);
exportQuery.setMapping(schema.getBatchUUIDField(), batchMapping);
queryConverter.setMapping(schema.getBatchUUIDField(), batchMapping);
}
// Set mapping condition NB only farm supported at the moment
if(schema.getFarmUUIDField()!=null)
queryConverter.setCondition(schema.getFarmUUIDField(), farmMapping.keySet());
log.trace("Performing actual query towards {} ",tablename);
@ -274,29 +286,36 @@ public class ImportedTable {
toReturn.put(this.schema.getRelatedDescription(), putIntoStorage(csvRs, csvFields.toArray(new String[csvFields.size()])));
toReturn.put(this.schema.getRelatedDescription(), putIntoStorage(csvRs,
csvFields.toArray(new String[csvFields.size()]),queryConverter));
if(schema.getToReportFields().size()>0) {
ArrayList<String> personalLabels=schema.getToReportFields();
ArrayList<String> toExtractCSVFields=schema.getToReportFields();
queryConverter.reset();
log.trace("Extracting {} from {} ",tablename);
// Extract personal found values from same query
ArrayList<String> toExtractFields=new ArrayList<String>();
for(String label:personalLabels) {
toExtractFields.add(labels.get(label).getFieldName());
String[] toExtractFields=new String[toExtractCSVFields.size()];
for(String label:toExtractCSVFields) {
String fieldName=labels.get(label).getFieldName();
toExtractFields[toExtractCSVFields.indexOf(label)]=fieldName;
}
String sqlPersonal=exportQuery.getQueryForMappedFields(schema.getFarmUUIDField(),
toExtractFields.toArray(new String[toExtractFields.size()]));
toExtractFields);
log.debug("Query is {} ",sqlPersonal);
csvRs=stmt.executeQuery(sqlPersonal);
toReturn.put(this.schema.getRelatedDescription()+"_internal", putIntoStorage(csvRs,
schema.getToReportLabels().toArray(new String[schema.getToReportLabels().size()])));
schema.getToReportLabels().toArray(new String[schema.getToReportLabels().size()]),queryConverter));
}
@ -331,14 +350,15 @@ public class ImportedTable {
}
private static final String putIntoStorage(ResultSet toExport,String...headers) throws IOException, SQLException {
private static final String putIntoStorage(ResultSet toExport,String[] headers, CSVRecordConverter converter) throws IOException, SQLException {
CSVPrinter printer=null;
File dataFile=null;
try {
dataFile=File.createTempFile("csv_out", ".csv");
printer = CSVFormat.DEFAULT.withHeader(headers).print(new FileWriter(dataFile));
printer.printRecords(toExport);
while(toExport.next()) {
printer.printRecord(converter.convert(toExport));
}
printer.flush();
return StorageUtils.putOntoStorage(dataFile);
}finally {

View File

@ -3,6 +3,9 @@ farm=farm_id
company=company_id
association=producer_association_affiliation
batch=aggregated_batch_id
species=species
quarter=quarter
area=area
routine=internal_routine_id
csv=csv/GROW_OUT_AGGREGATED.csv
enable_analysis=true

View File

@ -1,3 +1,3 @@
description=AnagraphicTable
routine=internal_routine_id
csv=csv/GROW_OUT_INDIVIDUAL_anagraphic.csv
csv=csv/GROW_OUT_INDIVIDUAL_anagraphic.csv

View File

@ -4,6 +4,9 @@ company=company_id
association=producer_association_affiliation
batch=batch_id
routine=internal_routine_id
species=species_stocking
area=area
quarter=quarter
csv=csv/GROW_OUT_INDIVIDUAL.csv
enable_analysis=true
report_fields=batch_id,farm_id,area

View File

@ -3,6 +3,9 @@ farm=farm_id
company=company_id
association=producer_association_affiliation
batch=batch_id
species=species_stocking
area=area
quarter=quarter
routine=internal_routine_id
csv=csv/GROW_OUT_INDIVIDUAL.csv
enable_analysis=true

View File

@ -3,6 +3,8 @@ farm=farm_id
company=company_id
association=producer_association_affiliation
batch=aggregated_batches_id
species=species
period=stocking_period
routine=internal_routine_id
csv=csv/HATCHERY_AGGREGATED.csv
enable_analysis=true

View File

@ -2,6 +2,8 @@ description=BatchesTable
farm=farm_id
company=company_id
batch=batch_id
species=species
period=stocking_period
routine=internal_routine_id
csv=csv/HATCHERY_INDIVIDUAL.csv
enable_analysis=true

View File

@ -2,6 +2,8 @@ description=BatchesTable
farm=farm_id
company=company_id
batch=batch_id
species=species
period=stocking_period
routine=internal_routine_id
csv=csv/HATCHERY_INDIVIDUAL.csv
enable_analysis=true

View File

@ -3,6 +3,9 @@ farm=farm_id
company=company_id
batch=batch_id
routine=internal_routine_id
species=species
area=area
period=period
csv=csv/PRE_ONGROWING.csv
enable_analysis=true
report_fields=batch_id,farm_id

View File

@ -3,6 +3,9 @@ farm=farm_id
company=company_id
batch=batch_id
routine=internal_routine_id
species=species
area=area
period=period
csv=csv/PRE_ONGROWING.csv
enable_analysis=true
report_fields=batch_id,farm_id

View File

@ -10,6 +10,7 @@ import java.util.Set;
import org.gcube.application.perform.service.engine.DataBaseManager;
import org.gcube.application.perform.service.engine.impl.PerformanceManagerImpl;
import org.gcube.application.perform.service.engine.model.DBField;
import org.gcube.application.perform.service.engine.model.InternalException;
import org.gcube.application.perform.service.engine.model.importer.AnalysisType;
import org.gcube.application.perform.service.engine.model.importer.ImportedTable;
@ -26,11 +27,18 @@ public class DeleteSchema {
Connection conn=DataBaseManager.get().getConnection();
Statement stmt=conn.createStatement();
for(Entry<AnalysisType,Set<ImportedTable>> entry:PerformanceManagerImpl.getAnalysisConfiguration().entrySet()) {
for(ImportedTable t:entry.getValue()) {
stmt.execute("DROP TABLE "+t.getTableName());
}
}
// CLEAN IMPORTS
stmt.executeUpdate("DELETE FROM "+DBField.ImportRoutine.TABLE);
conn.commit();
// throw new RuntimeException("Uncomment commit to really perform cleanup");

View File

@ -48,10 +48,7 @@ public class LoadSchemaTest {
//
// }
// CLEAN IMPORTS
Connection conn = DataBaseManager.get().getConnection();
conn.createStatement().executeUpdate("DELETE FROM "+DBField.ImportRoutine.TABLE);
conn.commit();

View File

@ -1,29 +1,71 @@
package org.gcube.application.perform.service;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.HashMap;
import java.util.Map.Entry;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.GenericType;
import javax.ws.rs.core.Response;
import org.apache.commons.io.IOUtils;
import org.gcube.application.perform.service.engine.impl.PerformanceManagerImpl;
import org.gcube.application.perform.service.engine.model.importer.AnalysisType;
import org.junit.Test;
public class PerformanceTest extends CommonTest{
@Test
public void getPerformance() {
WebTarget target=
target(ServiceConstants.Performance.PATH).
queryParam(ServiceConstants.Performance.AREA_PARAMETER, "A1","A2").
queryParam(ServiceConstants.Performance.QUARTER_PARAMETER, "Q1","Q2").
queryParam(ServiceConstants.Performance.SPECIES_ID_PARAMETER, "Gadilidae","Tonno").
queryParam(ServiceConstants.Performance.PERIOD_PARAMETER, "First","Spring").
queryParam(ServiceConstants.Performance.FARM_ID_PARAMETER, "13625424","1233556","12346").
queryParam(ServiceConstants.Performance.BATCH_TYPE_PARAMETER, "GROW_OUT_AGGREGATED");
import com.google.common.base.Charsets;
import com.google.common.io.Files;
import com.vividsolutions.jts.util.Assert;
System.out.println(target.getUri());
Response resp=target.request().get();
System.out.println(resp.getStatus() + " : "+ resp.readEntity(String.class));
public class PerformanceTest extends CommonTest{
@Test
public void getPerformance() throws IOException {
File folder= Files.createTempDir();
for(AnalysisType analysis:PerformanceManagerImpl.getAnalysisConfiguration().keySet()) {
// AnalysisType analysis=new AnalysisType("GROW_OUT_INDIVIDUAL","GROW_OUT_INDIVIDUAL");
WebTarget target=
target(ServiceConstants.Performance.PATH).
// queryParam(ServiceConstants.Performance.AREA_PARAMETER, "A1","A2").
//// queryParam(ServiceConstants.Performance.QUARTER_PARAMETER, "Q1","Q2").
// queryParam(ServiceConstants.Performance.SPECIES_ID_PARAMETER, "Gadilidae","Tonno").
// queryParam(ServiceConstants.Performance.PERIOD_PARAMETER, "First","Spring").
queryParam(ServiceConstants.Performance.FARM_ID_PARAMETER, "13625424").
queryParam(ServiceConstants.Performance.BATCH_TYPE_PARAMETER, analysis.getId());
System.out.println(target.getUri());
Response resp=target.request().get();
Assert.isTrue(resp.getStatus()==200);
File subFolder= new File(folder,analysis.getId());
subFolder.mkdirs();
for(Entry<String,String> entry : resp.readEntity(new GenericType<HashMap<String, String>>() { }).entrySet()) {
URL csvUrl=new URL(entry.getValue());
File csv=new File(subFolder,entry.getKey()+".csv");
csv.createNewFile();
BufferedWriter writer=Files.newWriter(csv, Charsets.UTF_8);
IOUtils.copy(csvUrl.openStream(), writer);
writer.close();
}
// System.out.println(analysis.getId()+" "+resp.getStatus() + " : "+ resp.readEntity(String.class));
}
System.out.println("Wrote to : "+ folder.getAbsolutePath());
}
}