Compare commits

...

10 Commits

17 changed files with 267 additions and 37 deletions

View File

@ -8,7 +8,7 @@
</parent>
<groupId>org.gcube.application</groupId>
<artifactId>perform-service</artifactId>
<version>1.0.0</version>
<version>1.1.0-SNAPSHOT</version>
<name>Perform Service</name>
<description>Service for Farm Perform Data Management</description>
<packaging>war</packaging>

View File

@ -28,6 +28,10 @@ public class LocalConfiguration {
public static final String COMMIT_SCHEMA="schema.load.commit";
public static final String DEANONIMIZATION_INCLUDE_SNAPSHOT="deanonimization.default_include_SNAPSHOT";
public static final String PERFORMANCE_DEFAULT_INCLUDE_SNAPSHOT="performance.default_include_SNAPSHOT";
static LocalConfiguration instance=null;

View File

@ -89,6 +89,7 @@ public class PerformServiceLifecycleManager extends ApplicationLifecycleHandler
PerformanceManagerImpl.importSchema(schema,webinfPath);
}catch(Throwable t) {
log.warn("SKPPING CONFIGURATION FILE "+schemaFile.getPath(),t);
throw new InternalException("Unable to init schema. Offending file is "+schemaFile.getAbsolutePath(),t);
}finally {
if(fis!=null) fis.close();
}

View File

@ -27,9 +27,15 @@ public interface ServiceConstants {
public static final String EXCEL_FILE_VERSION_PARAMETER="source_version";
public static final String STATUS_PARAMETER="status";
public static final String LAST_METHOD="last";
public static final String SNAPSHOT_METHOD="snapshot";
}
public static interface Snapshot{
public static final String PATH="snapshot";
public static final String BATCH_TYPE_PARAMETER="batch_type";
public static final String FARM_ID_PARAMETER="farmid";
}
public static interface Performance{
public static final String PATH="performance";
public static final String FARM_ID_PARAMETER="farmid";
@ -39,6 +45,10 @@ public interface ServiceConstants {
public static final String BATCH_TYPE_PARAMETER="batch_type";
public static final String PERIOD_PARAMETER="period";
public static final String INCLUDE_SNAPSHOST="include_snapshots";
public static final String DEANONIMIZE_SNAPSHOST="deanonimize_snapshots";
public static final String START_YEAR="start_year";
public static final String STATISTICS_PATH="statistics";
}

View File

@ -18,4 +18,7 @@ public interface PerformanceManager {
public void loadOutputData(ImportRoutineDescriptor desc)throws SQLException, InvalidRequestException, InternalException, IOException, DMException;
public Map<String,String> getStatistics(AnalysisType type)throws SQLException, InvalidRequestException, InternalException, IOException;
public void snapshotImportedData(ImportRoutineDescriptor desc)throws SQLException, InvalidRequestException, InternalException, IOException;
}

View File

@ -22,9 +22,9 @@ public class ExportCSVQuery extends Query {
private ArrayList<String> exportCSVFieldOrder;
public ExportCSVQuery(String query, DBField[] queryParams, CSVExportRequest theRequest,
public ExportCSVQuery(CSVExportRequest theRequest,
SchemaDefinition schema, Map<String,DBField> actualStructure, ArrayList<String> exportFieldsOrder) {
super(query, queryParams);
super("", null);
this.theRequest=theRequest;
this.schema=schema;
this.actualStructure=actualStructure;
@ -33,6 +33,8 @@ public class ExportCSVQuery extends Query {
}
public void setMapping(String field, Map<String,String> mapping) {
mappings.put(field, mapping);
@ -64,6 +66,9 @@ public class ExportCSVQuery extends Query {
StringBuilder b=new StringBuilder();
for(String f:fields)
b.append(f+",");
if(schema.getSnapshotFieldName()!=null)
b.append(schema.getSnapshotFieldName()+",");
b.setLength(b.lastIndexOf(","));
log.debug("Creating query for fields {} against table {} ",b,tablename);
@ -146,6 +151,13 @@ public class ExportCSVQuery extends Query {
if(theRequest.getPeriods().size()>0 && schema.getPeriodField()!=null && exists(schema.getPeriodField()))
orGroups.add(getFilterByMultipleValues(theRequest.getPeriods(), schema.getPeriodField()));
// SNAPSHOTS
if(!theRequest.getIncludeHistoric())
if(schema.getSnapshotFieldName()!=null && exists(schema.getSnapshotFieldName()))
orGroups.add(schema.getSnapshotFieldName()+" = "+theRequest.getIncludeHistoric());
StringBuilder toReturn=new StringBuilder("");
for(String orGroup:orGroups) {
toReturn.append("("+orGroup+") AND ");
@ -166,6 +178,8 @@ public class ExportCSVQuery extends Query {
DBField f = actualStructure.get(label);
b.append(f.getFieldName()+",");
}
if(schema.getSnapshotFieldName()!=null)
b.append(schema.getSnapshotFieldName()+",");
return b.toString().substring(0,b.lastIndexOf(","));
}

View File

@ -165,13 +165,32 @@ public class PerformanceManagerImpl implements PerformanceManager{
String createStmt=t.createStatement();
log.debug("Creating Table with stmt {} ",createStmt);
stmt.execute(createStmt);
if(Boolean.parseBoolean(LocalConfiguration.getProperty(LocalConfiguration.COMMIT_SCHEMA)))
conn.commit();
for(String sql:t.updateSchemaStatement()) {
try {
stmt.execute(sql);
}catch(SQLException e) {
log.warn("Error while trying to update schema. Depending on DBMS it can mean the update was already in place.",e);
if(Boolean.parseBoolean(LocalConfiguration.getProperty(LocalConfiguration.COMMIT_SCHEMA)))
conn.commit();
}
}
}
}
if(Boolean.parseBoolean(LocalConfiguration.getProperty(LocalConfiguration.COMMIT_SCHEMA)))
conn.commit();
}
public static void importSchema(SchemaDefinition schema,String csvBasePath) throws IOException, SQLException, InternalException {
log.info("Loading schema {} ",schema);
@ -274,6 +293,12 @@ public class PerformanceManagerImpl implements PerformanceManager{
}
}
@Override
public void snapshotImportedData(ImportRoutineDescriptor desc)
throws SQLException, InvalidRequestException, InternalException, IOException {
throw new RuntimeException("Method not yet implemented");
}
// ************************** SCHEMA PARSING
private static final String FLOAT_REGEX="\\d*\\.\\d*";

View File

@ -13,7 +13,6 @@ public class SchemaDefinition {
private static final String ASSOCIATION="association";
private static final String BATCH="batch";
private static final String COMPANY="company";
private static final String ROUTINE_ID="routine";
private static final String AREA="area";
private static final String SPECIES="species";
private static final String PERIOD="period";
@ -24,7 +23,10 @@ public class SchemaDefinition {
private static final String REPORT_FIELDS="report_fields";
private static final String REPORT_LABELS="report_labels";
//SYSTEM FIELDS
private static final String ROUTINE_ID="routine";
private static final String SNAPSHOT="snapshot";
private static final String HISTORIC="history_year";
public String getRelatedDescription() {
@ -58,7 +60,12 @@ public class SchemaDefinition {
public String getRoutineIdFieldName() {
return routineIdFieldName;
}
public String getHistoricYearFieldName() {
return historicYearFieldName;
}
public String getSnapshotFieldName() {
return snapshotFieldName;
}
public String getAreaField() {
return areaField;
@ -82,7 +89,7 @@ public class SchemaDefinition {
return toReportLabels;
}
public SchemaDefinition(AnalysisType relatedAnalysis, Properties props) {
public SchemaDefinition(AnalysisType relatedAnalysis, Properties props) throws Exception {
super();
this.relatedDescription = props.getProperty(DESCRIPTION);
this.relatedAnalysis = relatedAnalysis;
@ -91,12 +98,16 @@ public class SchemaDefinition {
this.associationUUIDField = props.getProperty(ASSOCIATION);
this.batchUUIDField = props.getProperty(BATCH);
this.companyUUIDField = props.getProperty(COMPANY);
this.routineIdFieldName=props.getProperty(ROUTINE_ID);
this.areaField=props.getProperty(AREA);
this.speciesField=props.getProperty(SPECIES);
this.quarterField=props.getProperty(QUARTER);
this.periodField=props.getProperty(PERIOD);
this.snapshotFieldName=props.getProperty(SNAPSHOT);
this.historicYearFieldName=props.getProperty(HISTORIC);
this.routineIdFieldName=props.getProperty(ROUTINE_ID);
if(!((snapshotFieldName==null)==(historicYearFieldName==null)))
throw new Exception(SNAPSHOT+" and "+HISTORIC+" fields need to be specified together");
this.analysisEnabled=Boolean.parseBoolean(props.getProperty(ENABLE_ANALYSIS, "false"));
@ -126,6 +137,9 @@ public class SchemaDefinition {
private Boolean analysisEnabled;
private String snapshotFieldName;
private String historicYearFieldName;
private String routineIdFieldName;
private ArrayList<String> toReportFields=new ArrayList<>();

View File

@ -16,6 +16,34 @@ public class CSVExportRequest {
private Set<String> speciesIds=new HashSet<>();
private Set<String> periods=new HashSet<>();
private Boolean includeHistoric=false;
private Integer startYear=0;
private Boolean deanonimizeSNAPSHOTS=false;
public Boolean getIncludeHistoric() {
return includeHistoric;
}
public Boolean getDeanonimizeSNAPSHOTS() {
return deanonimizeSNAPSHOTS;
}
public void setDeanonimizeSNAPSHOTS(Boolean deanonimizeSNAPSHOTS) {
this.deanonimizeSNAPSHOTS = deanonimizeSNAPSHOTS;
}
public Integer getStartYear() {
return startYear;
}
public void setIncludeHistoric(Boolean includeHistoric) {
this.includeHistoric = includeHistoric;
}
public void setStartYear(Integer startYear) {
this.startYear = startYear;
}
public Set<String> getAreas() {
return areas;
}
@ -74,12 +102,15 @@ public class CSVExportRequest {
periods.addAll(toAdd);
return this;
}
@Override
public String toString() {
return "CSVExportRequest [type=" + type + ", farmIds=" + farmIds + ", quarters=" + quarters + ", areas=" + areas
+ ", speciesIds=" + speciesIds + ", periods=" + periods + "]";
+ ", speciesIds=" + speciesIds + ", periods=" + periods + ", includeHistoric=" + includeHistoric
+ ", startYear=" + startYear + "]";
}
}

View File

@ -4,8 +4,8 @@ import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.gcube.application.perform.service.engine.model.DBField;
@ -22,8 +22,8 @@ public class CSVRecordConverter {
}
private Map<String,Map<String,String>> mappings=new HashMap<>();
private String conditionField=null;
private Set<String> conditionValues=new HashSet<>();
private Map<DBField,Set<? extends Object>> condition=new HashMap<>();
// private Set<String> conditionValues=new HashSet<>();
private boolean isAlwaysMap=false;
@ -48,10 +48,17 @@ public class CSVRecordConverter {
mappings.put(actualFieldName, mapping);
}
public void setCondition(String field, Set<String> values) {
this.conditionField=labels.get(field).getFieldName();
conditionField=conditionField.substring(1, conditionField.length()-1);
this.conditionValues=values;
// public void setCondition(String field, Set<String> values) {
// this.conditionField=labels.get(field).getFieldName();
// conditionField=conditionField.substring(1, conditionField.length()-1);
// this.conditionValues=values;
// }
public void addCondition(DBField conditionField,Set<? extends Object> values) {
String label=labels.get(conditionField.getFieldName()).getFieldName();
DBField labelledField=new DBField(conditionField.getType(),label.replace("\"",""));
this.condition.put(labelledField, values);
}
public void setAlwaysMap(boolean isAlwaysMap) {
@ -67,10 +74,13 @@ public class CSVRecordConverter {
if(isAlwaysMap) return true;
// DefaultBehaviour
if(conditionField==null) return false;
if(condition.isEmpty()) return false;
else {
String currentValue=rs.getString(conditionField);
return conditionValues.contains(currentValue);
for(Entry<DBField,Set<? extends Object>> entry:condition.entrySet()) {
Object obj=rs.getObject(entry.getKey().getFieldName());
if(!entry.getValue().contains(obj)) return false;
}
return true;
}
}

View File

@ -11,7 +11,11 @@ import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
@ -75,12 +79,14 @@ public class ImportedTable {
csvFields=new ArrayList<>();
labels=new HashMap<>();
for(DBField field:csvFieldsDefinition) {
String escaped=escapeString(field.getFieldName());
csvFields.add(field.getFieldName());
labels.put(field.getFieldName(), new DBField(field.getType(),escaped));
for(DBField field:csvFieldsDefinition)
addLabelledField(field,true);
if(hasSNAPSHOT()) {
addLabelledField(new DBField(Types.BOOLEAN,schema.getSnapshotFieldName()),false);
addLabelledField(new DBField(Types.BIGINT,schema.getHistoricYearFieldName()),false);
}
check(schema.getAssociationUUIDField());
check(schema.getBatchUUIDField());
check(schema.getCompanyUUIDField());
@ -97,6 +103,15 @@ public class ImportedTable {
}
private void addLabelledField(DBField field,boolean escape) {
if(escape) {
String escaped=escapeString(field.getFieldName());
csvFields.add(field.getFieldName());
labels.put(field.getFieldName(), new DBField(field.getType(),escaped));
}else labels.put(field.getFieldName(), field);
}
private void check(String field) throws InternalException {
if(field!=null)
if(!labels.containsKey(field)) throw new InternalException("Incoherent schema definition for table "+tablename+". Field "+field+" not found in csv.");
@ -106,6 +121,17 @@ public class ImportedTable {
return new DBField(Types.BIGINT,schema.getRoutineIdFieldName());
}
private DBField getSNAPSHOTField() {
return labels.get(schema.getSnapshotFieldName());
}
private DBField getHistoricYearField() {
return labels.get(schema.getHistoricYearFieldName());
}
private boolean hasSNAPSHOT() {
return !(schema.getSnapshotFieldName()==null);
}
private Query prepareInsertionQuery() {
@ -150,10 +176,26 @@ public class ImportedTable {
String.format( "%1$s bigint,"
+ "FOREIGN KEY (%1$s) REFERENCES "+ImportRoutine.TABLE+"("+ImportRoutine.ID+")",getRoutineIdField().getFieldName());
if(hasSNAPSHOT())
standardDefinitions=
String.format( "%1$s bigint, %2$s boolean DEFAULT FALSE, %3$s int DEFAULT 0,"
+ "FOREIGN KEY (%1$s) REFERENCES "+ImportRoutine.TABLE+"("+ImportRoutine.ID+")",
getRoutineIdField().getFieldName(),
getSNAPSHOTField().getFieldName(),
getHistoricYearField().getFieldName());
return String.format("CREATE TABLE IF NOT EXISTS %1$s (%2$s, %3$s)",
tablename,fieldDefinitions.substring(0,fieldDefinitions.lastIndexOf(",")),standardDefinitions);
}
public List<String> updateSchemaStatement() {
ArrayList<String> toReturn=new ArrayList<>();
if(hasSNAPSHOT()) toReturn.addAll(Arrays.asList(String.format("ALTER TABLE %1$s ADD %2$s boolean DEFAULT FALSE",getTableName(),getSNAPSHOTField().getFieldName()),
String.format("ALTER TABLE %1$s ADD %2$s int DEFAULT 0",getTableName(),getSNAPSHOTField().getFieldName())));
return toReturn;
}
/**
* Checks if passed set of labels is
@ -235,7 +277,8 @@ public class ImportedTable {
try {
CSVRecordConverter queryConverter=new CSVRecordConverter(labels);
ExportCSVQuery exportQuery=new ExportCSVQuery("",null,request,schema,labels,csvFields);
ExportCSVQuery exportQuery=new ExportCSVQuery(request,schema,labels,csvFields);
exportQuery.setTablename(tablename);
Map<String,String> farmMapping=new HashMap<>();
@ -286,11 +329,15 @@ public class ImportedTable {
queryConverter.setMapping(schema.getBatchUUIDField(), batchMapping);
}
// Set mapping condition NB only farm supported at the moment
if(schema.getFarmUUIDField()!=null)
queryConverter.setCondition(schema.getFarmUUIDField(), farmMapping.keySet());
// Set mapping condition
if(schema.getFarmUUIDField()!=null) {
queryConverter.addCondition(new DBField(Types.VARCHAR,schema.getFarmUUIDField()), farmMapping.keySet());
}
if(hasSNAPSHOT()) {
if(!request.getDeanonimizeSNAPSHOTS())
queryConverter.addCondition(getSNAPSHOTField(), Collections.singleton(false));
}
log.trace("Performing actual query towards {} ",tablename);

View File

@ -118,4 +118,6 @@ public class Import {
}
}
}

View File

@ -1,18 +1,14 @@
package org.gcube.application.perform.service.rest;
import java.util.Collections;
import java.util.ArrayList;
import java.util.List;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class InterfaceCommons {
private static final Logger log= LoggerFactory.getLogger(InterfaceCommons.class);
public static final void checkMandatory(Object toCheck, String name) throws WebApplicationException{
if(toCheck==null)
@ -20,10 +16,15 @@ public class InterfaceCommons {
}
public static final List<String> getParameter(MultivaluedMap<String,String> map,String paramName, boolean mandatory){
// log.debug()
if(map.containsKey(paramName)) {
return map.get(paramName);
}else if(mandatory) throw new WebApplicationException(String.format("Parameter %1$s is mandatory",paramName),Response.Status.BAD_REQUEST);
return Collections.emptyList();
return new ArrayList<String>();
}
public static final String getParamOrDefault(MultivaluedMap<String,String> map,String paramName, String defaultValue) {
List<String> found=getParameter(map,paramName,false);
if(found.isEmpty()) return defaultValue;
else return found.get(0);
}
}

View File

@ -16,6 +16,7 @@ import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import org.gcube.application.perform.service.LocalConfiguration;
import org.gcube.application.perform.service.PerformServiceManager;
import org.gcube.application.perform.service.ServiceConstants;
import org.gcube.application.perform.service.engine.PerformanceManager;
@ -69,6 +70,16 @@ public class Performance {
request.addSpecies(InterfaceCommons.getParameter(parameters, ServiceConstants.Performance.SPECIES_ID_PARAMETER, false));
request.addPeriods(InterfaceCommons.getParameter(parameters, ServiceConstants.Performance.PERIOD_PARAMETER, false));
request.setIncludeHistoric(
Boolean.valueOf(
InterfaceCommons.getParamOrDefault(parameters, ServiceConstants.Performance.INCLUDE_SNAPSHOST,
LocalConfiguration.getProperty(LocalConfiguration.PERFORMANCE_DEFAULT_INCLUDE_SNAPSHOT))));
request.setStartYear(Integer.valueOf(InterfaceCommons.getParamOrDefault(parameters, ServiceConstants.Performance.START_YEAR, "1900")));
request.setDeanonimizeSNAPSHOTS(Boolean.valueOf(
InterfaceCommons.getParamOrDefault(parameters, ServiceConstants.Performance.DEANONIMIZE_SNAPSHOST,
LocalConfiguration.getProperty(LocalConfiguration.DEANONIMIZATION_INCLUDE_SNAPSHOT))));
log.debug("Export request : {} ",request);

View File

@ -0,0 +1,52 @@
package org.gcube.application.perform.service.rest;
import java.util.List;
import javax.inject.Inject;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response;
import org.gcube.application.perform.service.PerformServiceManager;
import org.gcube.application.perform.service.ServiceConstants;
import org.gcube.application.perform.service.engine.Importer;
import org.gcube.application.perform.service.engine.model.DBField;
import org.gcube.application.perform.service.engine.model.DBQueryDescriptor;
import org.gcube.application.perform.service.engine.model.importer.ImportRoutineDescriptor;
import org.gcube.smartgears.annotations.ManagedBy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Path(ServiceConstants.Snapshot.PATH)
@ManagedBy(PerformServiceManager.class)
public class Snapshot {
private static final Logger log= LoggerFactory.getLogger(Snapshot.class);
@Inject
private Importer importer;
@PUT
public void snapshot(@QueryParam(ServiceConstants.Snapshot.BATCH_TYPE_PARAMETER) String batchType,
@QueryParam(ServiceConstants.Snapshot.FARM_ID_PARAMETER)Long farmid) {
InterfaceCommons.checkMandatory(batchType, ServiceConstants.Snapshot.BATCH_TYPE_PARAMETER);
InterfaceCommons.checkMandatory(farmid, ServiceConstants.Snapshot.FARM_ID_PARAMETER);
try {
List<ImportRoutineDescriptor> found=importer.getDescriptors(new DBQueryDescriptor().add(
DBField.ImportRoutine.fields.get(DBField.ImportRoutine.FARM_ID), farmid).
add(DBField.ImportRoutine.fields.get(DBField.ImportRoutine.BATCH_TYPE), batchType));
}catch(Throwable t) {
log.warn("Unexpected Exception ",t);
throw new WebApplicationException("Unexpected Exception.", t,Response.Status.INTERNAL_SERVER_ERROR);
}
}
}

View File

@ -11,4 +11,7 @@ dm.importer.computationid=org.gcube.dataanalysis.wps.statisticalmanager.synchser
schema.load=true
schema.load.skipError=true
schema.load.commit=true
deanonimization.default_include_SNAPSHOT=false
performance.default_include_SNAPSHOT=false

View File

@ -7,6 +7,8 @@ species=species
quarter=quarter
area=area
routine=internal_routine_id
snapshot=internal_snapshot
history_year=internal_historic_year
csv=csv/GROW_OUT_AGGREGATED.csv
enable_analysis=true
report_fields=aggregated_batch_id,farm_id,area