This commit is contained in:
Fabio Sinibaldi 2019-01-23 17:30:13 +00:00
parent 009b4be6a0
commit e06ab292bd
31 changed files with 134 additions and 61 deletions

View File

@ -26,6 +26,8 @@ public class LocalConfiguration {
public static final String LOAD_SCHEMA="schema.load";
public static final String SKIP_ON_SCHEMA_ERROR="schema.load.skipError";
public static final String COMMIT_SCHEMA="schema.load.commit";
static LocalConfiguration instance=null;

View File

@ -33,12 +33,15 @@ public class PerformServiceLifecycleManager extends ApplicationLifecycleHandler
@Override
public void onStart(Start e) {
super.onStart(e);
try{
ApplicationContext context=ContextProvider.get();
ApplicationContext context=e.context();
log.debug("Starting Service. ApplicationContext is {} ",context);
log.debug("Application is {} ",context.application());
URL resourceUrl = context.application().getResource("/WEB-INF/config.properties");
LocalConfiguration.init(resourceUrl);
ServletContext ctx=ContextProvider.get().application();
ServletContext ctx=context.application();
String webinfPath=ctx.getRealPath("/WEB-INF");
if(Boolean.parseBoolean(LocalConfiguration.getProperty(LocalConfiguration.LOAD_SCHEMA))) {
initSchema(webinfPath);
@ -52,7 +55,6 @@ public class PerformServiceLifecycleManager extends ApplicationLifecycleHandler
}catch(Exception ex) {
throw new RuntimeException("Unable to init",ex);
}
super.onStart(e);
}

View File

@ -3,6 +3,7 @@ package org.gcube.application.perform.service;
import java.net.URL;
import org.gcube.application.perform.service.engine.impl.ImporterImpl;
import org.gcube.application.perform.service.engine.impl.PerformanceManagerImpl;
import org.gcube.application.perform.service.engine.utils.ScopeUtils;
import org.gcube.smartgears.ApplicationManager;
import org.gcube.smartgears.ContextProvider;
@ -24,7 +25,7 @@ public class PerformServiceManager implements ApplicationManager{
try {
new ImporterImpl().init();
PerformanceManagerImpl.initDatabase();
}catch(Throwable t) {
log.warn("UNABLE TO INIT SERVICE UNDER SCOPE "+ScopeUtils.getCurrentScope(), t);
}

View File

@ -19,10 +19,13 @@ public class ExportCSVQuery extends Query {
private CSVExportRequest theRequest;
private SchemaDefinition schema;
public ExportCSVQuery(String query, DBField[] fields, CSVExportRequest theRequest, SchemaDefinition schema) {
private Map<String,DBField> actualStructure;
public ExportCSVQuery(String query, DBField[] fields, CSVExportRequest theRequest, SchemaDefinition schema, Map<String,DBField> actualStructure) {
super(query, fields);
this.theRequest=theRequest;
this.schema=schema;
this.actualStructure=actualStructure;
}
@ -35,38 +38,43 @@ public class ExportCSVQuery extends Query {
ArrayList<String> orGroups=new ArrayList<String>();
// AREA
if(theRequest.getAreas().size()>0) {
if(theRequest.getAreas().size()>0 && schema.getAreaField()!=null) {
String areaField=actualStructure.get(schema.getAreaField()).getFieldName();
StringBuilder areas=new StringBuilder();
for(String area:theRequest.getAreas()) {
areas.append(String.format("%1$s= '%2$s' OR", schema.getAreaField(),area));
areas.append(String.format("%1$s= '%2$s' OR", areaField,area));
}
orGroups.add(areas.substring(0,areas.lastIndexOf("OR")));
}
// QUARTER
if(theRequest.getQuarters().size()>0) {
if(theRequest.getQuarters().size()>0 && schema.getQuarterField()!=null) {
String quarterField=actualStructure.get(schema.getQuarterField()).getFieldName();
StringBuilder quarterString=new StringBuilder();
for(String q:theRequest.getQuarters()) {
quarterString.append(String.format("%1$s= '%2$s' OR", schema.getQuarterField(),q));
quarterString.append(String.format("%1$s= '%2$s' OR", quarterField,q));
}
orGroups.add(quarterString.substring(0,quarterString.lastIndexOf("OR")));
}
// SPECIES ID
if(theRequest.getSpeciesIds().size()>0) {
if(theRequest.getSpeciesIds().size()>0 && schema.getSpeciesField()!=null) {
String speciesField=actualStructure.get(schema.getSpeciesField()).getFieldName();
StringBuilder speciesString=new StringBuilder();
for(String s:theRequest.getSpeciesIds()) {
speciesString.append(String.format("%1$s= '%2$s' OR", schema.getSpeciesField(),s));
speciesString.append(String.format("%1$s= '%2$s' OR", speciesField,s));
}
orGroups.add(speciesString.substring(0,speciesString.lastIndexOf("OR")));
}
// PERIOD
if(theRequest.getPeriods().size()>0) {
if(theRequest.getPeriods().size()>0 && schema.getPeriodField()!=null) {
String periodField=actualStructure.get(schema.getPeriodField()).getFieldName();
StringBuilder periodString=new StringBuilder();
for(String p:theRequest.getPeriods()) {
periodString.append(String.format("%1$s= '%2$s' OR", schema.getPeriodField(),p));
periodString.append(String.format("%1$s= '%2$s' OR", periodField,p));
}
orGroups.add(periodString.substring(0,periodString.lastIndexOf("OR")));
}

View File

@ -18,6 +18,7 @@ import org.gcube.application.perform.service.engine.dm.DMException;
import org.gcube.application.perform.service.engine.dm.DMUtils;
import org.gcube.application.perform.service.engine.dm.ImporterMonitor;
import org.gcube.application.perform.service.engine.model.BeanNotFound;
import org.gcube.application.perform.service.engine.model.DBField;
import org.gcube.application.perform.service.engine.model.DBField.ImportRoutine;
import org.gcube.application.perform.service.engine.model.DBQueryDescriptor;
import org.gcube.application.perform.service.engine.model.InternalException;
@ -65,18 +66,24 @@ public class ImporterImpl implements Importer {
try {
conn.setAutoCommit(true);
PreparedStatement psOrphans=Queries.ORPHAN_IMPORTS.prepare(conn);
String hostname=getHostname();
DBField lockField=ImportRoutine.fields.get(ImportRoutine.LOCK);
PreparedStatement psOrphans=Queries.ORPHAN_IMPORTS.get(conn,new DBQueryDescriptor(lockField, hostname));
PreparedStatement psAcquire=Queries.ACQUIRE_IMPORT_ROUTINE.prepare(conn);
// set ps
ResultSet rsOrphans=psOrphans.executeQuery();
long monitoredCount=0l;
while(rsOrphans.next()) {
Long id=rsOrphans.getLong(ImportRoutine.ID);
try {
ImportRoutineDescriptor desc=Queries.rowToDescriptor(rsOrphans);
String hostname=getHostname();
DBQueryDescriptor acquireDesc=new DBQueryDescriptor().
add(ImportRoutine.fields.get(ImportRoutine.LOCK), hostname).
add(lockField,hostname).
add(ImportRoutine.fields.get(ImportRoutine.ID), id);
Queries.ACQUIRE_IMPORT_ROUTINE.fill(psAcquire, acquireDesc);

View File

@ -7,6 +7,7 @@ import java.io.Reader;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.ArrayList;
import java.util.HashMap;
@ -18,6 +19,7 @@ import java.util.Set;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import org.gcube.application.perform.service.LocalConfiguration;
import org.gcube.application.perform.service.engine.DataBaseManager;
import org.gcube.application.perform.service.engine.PerformanceManager;
import org.gcube.application.perform.service.engine.dm.DMUtils;
@ -83,6 +85,21 @@ public class PerformanceManagerImpl implements PerformanceManager{
}
public static void initDatabase() throws SQLException, InternalException {
DataBaseManager db=DataBaseManager.get();
Connection conn=db.getConnection();
Statement stmt=conn.createStatement();
for(Entry<AnalysisType,Set<ImportedTable>> entry:getAnalysisConfiguration().entrySet()) {
for(ImportedTable t:entry.getValue()) {
String createStmt=t.createStatement();
log.debug("Creating Table with stmt {} ",createStmt);
stmt.execute(createStmt);
}
}
if(Boolean.parseBoolean(LocalConfiguration.getProperty(LocalConfiguration.COMMIT_SCHEMA)))
conn.commit();
}
public static void importSchema(SchemaDefinition schema,String csvBasePath) throws IOException, SQLException, InternalException {
log.info("Loading schema {} ",schema);
@ -95,7 +112,7 @@ public class PerformanceManagerImpl implements PerformanceManager{
AnalysisType analysisType=schema.getRelatedAnalysis();
String tablename=(analysisType.getId()+schema.getRelatedDescription()).toLowerCase().replaceAll(" ", "_");
String tablename=(analysisType.getId()+"_"+schema.getRelatedDescription()).toLowerCase().replaceAll(" ", "_");
@ -103,9 +120,6 @@ public class PerformanceManagerImpl implements PerformanceManager{
tablename, schema,
csvFieldsDefinition);
table.create();
if(!analysisConfiguration.containsKey(analysisType))
analysisConfiguration.put(schema.getRelatedAnalysis(), new HashSet<>());
analysisConfiguration.get(schema.getRelatedAnalysis()).add(table);

View File

@ -22,7 +22,7 @@ public class Query {
public Query(String query,DBField[] fields) {
this.query=query;
this.psFields=new ArrayList<>(Arrays.asList(fields));
this.psFields=fields!=null?new ArrayList<>(Arrays.asList(fields)):null;
}

View File

@ -113,9 +113,9 @@ public class DBField {
public static final String COMPANY_UUID="companyuuid";
public static final String ASSOCIATION_UUID="associationuuid";
public static final String FARM_LABEL="1234";
public static final String ASSOCIATION_LABEL="1234";
public static final String COMPANY_LABEL="1234";
public static final String FARM_LABEL="name";
public static final String ASSOCIATION_LABEL="association_name";
public static final String COMPANY_LABEL="company_name";
static {
@ -123,8 +123,11 @@ public class DBField {
fields.put(COMPANY_ID, new DBField(Types.BIGINT,COMPANY_ID));
fields.put(ASSOCIATION_ID, new DBField(Types.BIGINT,ASSOCIATION_ID));
fields.put(UUID, new DBField(Integer.MIN_VALUE,UUID));
fields.put(COMPANY_UUID, new DBField(Types.VARCHAR,COMPANY_UUID));
fields.put(ASSOCIATION_UUID, new DBField(Types.VARCHAR,ASSOCIATION_UUID));
fields.put(COMPANY_UUID, new DBField(Integer.MIN_VALUE,COMPANY_UUID));
fields.put(ASSOCIATION_UUID, new DBField(Integer.MIN_VALUE,ASSOCIATION_UUID));
fields.put(FARM_LABEL, new DBField(Types.VARCHAR,FARM_LABEL));
fields.put(ASSOCIATION_LABEL, new DBField(Types.VARCHAR,ASSOCIATION_LABEL));
fields.put(COMPANY_LABEL, new DBField(Types.VARCHAR,COMPANY_LABEL));
}
}

View File

@ -68,7 +68,7 @@ public class ImportedTable {
for(DBField field:csvFieldsDefinition) {
String escaped=escapeString(field.getFieldName());
csvFields.add(escaped);
csvFields.add(field.getFieldName());
labels.put(field.getFieldName(), new DBField(field.getType(),escaped));
}
@ -104,7 +104,7 @@ public class ImportedTable {
return new Query(insertSQL, queryFields.toArray(new DBField[queryFields.size()]));
}
public void create() throws SQLException, InternalException {
public String createStatement() {
StringBuilder fieldDefinitions=new StringBuilder();
for(DBField f:labels.values()) {
@ -124,16 +124,8 @@ public class ImportedTable {
String.format( "%1$s bigint,"
+ "FOREIGN KEY (%1$s) REFERENCES "+ImportRoutine.TABLE+"("+ImportRoutine.ID+")",getRoutineIdField().getFieldName());
String stmt=String.format("CREATE TABLE IF NOT EXISTS %1$s (%2$s, %3$s)",
return String.format("CREATE TABLE IF NOT EXISTS %1$s (%2$s, %3$s)",
tablename,fieldDefinitions.substring(0,fieldDefinitions.lastIndexOf(",")),standardDefinitions);
Connection conn=DataBaseManager.get().getConnection();
try {
conn.createStatement().execute(stmt);
}finally {
conn.close();
}
}
@ -187,7 +179,7 @@ public class ImportedTable {
FileWriter writer=null;
CSVPrinter printer=null;
try {
ExportCSVQuery exportQuery=new ExportCSVQuery("",null,request,schema);
ExportCSVQuery exportQuery=new ExportCSVQuery("",null,request,schema,labels);
exportQuery.setFieldList(labels.values());
exportQuery.setTablename(tablename);
@ -231,7 +223,7 @@ public class ImportedTable {
File toReturn=File.createTempFile("csv_out", ".csv");
writer=new FileWriter(toReturn);
printer = CSVFormat.DEFAULT.withHeader().print(writer);
printer = CSVFormat.DEFAULT.withHeader(csvFields.toArray(new String[csvFields.size()])).print(writer);
printer.printRecords(csvRs);
@ -256,7 +248,20 @@ public class ImportedTable {
return schema;
}
int MAX_LENGTH=25;
private String escapeString(String fieldname) {
return fieldname;
String toReturn=fieldname;
if(toReturn.length()>MAX_LENGTH)
toReturn=toReturn.substring(0, MAX_LENGTH);
DBField clashing=new DBField(0,"\""+toReturn+"\"");
int counter=1;
while(labels.containsValue(clashing)) {
clashing=new DBField(0,"\""+toReturn+"_"+counter+"\"");
counter++;
}
return clashing.getFieldName();
}
}

View File

@ -5,7 +5,6 @@ import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.util.UUID;
import org.gcube.application.perform.service.engine.impl.PerformanceManagerImpl;
import org.gcube.contentmanagement.blobstorage.service.IClient;
import org.gcube.contentmanagement.blobstorage.transport.backend.RemoteBackendException;
import org.gcube.contentmanager.storageclient.wrapper.AccessType;
@ -29,7 +28,8 @@ public class StorageUtils {
public static final String putOntoStorage(File source) throws RemoteBackendException, FileNotFoundException{
IClient client=getClient();
log.debug("Uploading local file "+source.getAbsolutePath());
return client.put(true).LFile(new FileInputStream(source)).RFile(UUID.randomUUID().toString());
String id=client.put(true).LFile(new FileInputStream(source)).RFile(UUID.randomUUID().toString());
return client.getHttpUrl().RFile(id);
}
}

View File

@ -8,6 +8,7 @@ mapping-db.ep.category=Database
dm.importer.computationid=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PERFORMFISH_DATA_EXTRACTOR
schema.load=false
schema.load=true
schema.load.skipError=true
schema.load.commit=true

View File

@ -1,2 +0,0 @@
company_id,farm_id,operation_system,producer_association_affiliation,aggregated_batches_id,stocking_period,species,year_of_reference,number_of_batches_aggregated_in_a_single_record_n,date_of_stocking_for_the_first_batch_dd_mm_yy,larvae_at_1_dph_n,t_c_min_stocking,t_c_max_stocking,percentage_of_fish_with_non_inflated_swim_bladder_perc_on_1_dph_larvae,estimated_number_of_weaned_fish_n,rotifers_distribution_billions,artemia_distribution_kg,fish_with_head_deformities_perc_end_of_weaning,fish_with_spinal_deformities_percend_of_weaning,fish_with_fin_deformities_perc_end_of_weaning,t_c_min_end_of_weaning,t_c_max_end_of_weaning,number_of_fish_discarded_with_head_deformities_n,number_of_fish_discarded_with_spinal_deformities_n,number_of_fish_discarded_with_fin_deformities_n,fish_with_head_deformities_perc_2400_dd,fish_with_spinal_deformities_perc_2400_dd,fish_with_fin_deformities_perc_2400_dd,number_of_discarded_slow_grower_fish_n,number_of_fish_vaccinated_against_p_damselae_n,number_of_fish_vaccinated_against_v_anguillarum_n,number_of_fish_vaccinated_against_betanodavirus_n,estimated_number_of_fish_produced_n,date_of_last_closed_batch_dd_mm_yy,average_length_of_rearing_period_in_number_of_days_n,t_c_min_2400_dd,t_c_max_2400_dd,weaned_fish_perc,deformed_fish_at_2400dd_perc,discarded_slow_grower_fish_at_2400dd_perc,fish_produced_at_2400dd_perc,fish_produced_per_fte_employees_number_fish_fte_employees,survival_estimation_at_2400_dd_perc,head_deformities_at_end_of_weaning_perc,head_deformities_at_2400dd_perc,spinal_deformities_at_end_of_weaning_perc,spinal_deformities_at_2400dd_perc,fin_deformities_at_end_of_weaning_perc,fin_deformities_at_2400dd_perc,swim_bladder_non_inflation_at_500dd_perc,artemia_requirement_kg_of_artemia_million_fish_produced,rotifers_requirement_billions_rotifer_million_fish_produced,vaccinated_fish_against_p_damselae_at_2400dd_perc,vaccinated_fish_against_v_anguillarum_at_2400dd_perc,vaccinated_fish_against_betanodavirus_at_2400dd_perc,employees_2018,employees_2019,employees_2020
Company_3b92fc3c-4df4-4cd0-8301-63502986a731,HID,Partial recirculated system,Association_6f48cb69-7859-4524-b4f9-a126b05c08f3,12.0,Natural,S.aurata,-3.0,12.0,20.0,12.0,12.0,40.0,12.0,12.0,12.0,12.0,12.0,12.0,12.0,12.0,30.0,12.0,12.0,12.0,12.0,12.0,12.0,12.0,12.0,12.0,12.0,12.0,30.0,12.0,11.0,30.0,100.0,300.0,100.0,100.0,0,100.0,12.0,12.0,12.0,12.0,12.0,12.0,12.0,1000000.0,1000000.0,100.0,100.0,100.0,12.0,12.0,0.0
1 company_id farm_id operation_system producer_association_affiliation aggregated_batches_id stocking_period species year_of_reference number_of_batches_aggregated_in_a_single_record_n date_of_stocking_for_the_first_batch_dd_mm_yy larvae_at_1_dph_n t_c_min_stocking t_c_max_stocking percentage_of_fish_with_non_inflated_swim_bladder_perc_on_1_dph_larvae estimated_number_of_weaned_fish_n rotifers_distribution_billions artemia_distribution_kg fish_with_head_deformities_perc_end_of_weaning fish_with_spinal_deformities_percend_of_weaning fish_with_fin_deformities_perc_end_of_weaning t_c_min_end_of_weaning t_c_max_end_of_weaning number_of_fish_discarded_with_head_deformities_n number_of_fish_discarded_with_spinal_deformities_n number_of_fish_discarded_with_fin_deformities_n fish_with_head_deformities_perc_2400_dd fish_with_spinal_deformities_perc_2400_dd fish_with_fin_deformities_perc_2400_dd number_of_discarded_slow_grower_fish_n number_of_fish_vaccinated_against_p_damselae_n number_of_fish_vaccinated_against_v_anguillarum_n number_of_fish_vaccinated_against_betanodavirus_n estimated_number_of_fish_produced_n date_of_last_closed_batch_dd_mm_yy average_length_of_rearing_period_in_number_of_days_n t_c_min_2400_dd t_c_max_2400_dd weaned_fish_perc deformed_fish_at_2400dd_perc discarded_slow_grower_fish_at_2400dd_perc fish_produced_at_2400dd_perc fish_produced_per_fte_employees_number_fish_fte_employees survival_estimation_at_2400_dd_perc head_deformities_at_end_of_weaning_perc head_deformities_at_2400dd_perc spinal_deformities_at_end_of_weaning_perc spinal_deformities_at_2400dd_perc fin_deformities_at_end_of_weaning_perc fin_deformities_at_2400dd_perc swim_bladder_non_inflation_at_500dd_perc artemia_requirement_kg_of_artemia_million_fish_produced rotifers_requirement_billions_rotifer_million_fish_produced vaccinated_fish_against_p_damselae_at_2400dd_perc vaccinated_fish_against_v_anguillarum_at_2400dd_perc vaccinated_fish_against_betanodavirus_at_2400dd_perc employees_2018 employees_2019 employees_2020
2 Company_3b92fc3c-4df4-4cd0-8301-63502986a731 HID Partial recirculated system Association_6f48cb69-7859-4524-b4f9-a126b05c08f3 12.0 Natural S.aurata -3.0 12.0 20.0 12.0 12.0 40.0 12.0 12.0 12.0 12.0 12.0 12.0 12.0 12.0 30.0 12.0 12.0 12.0 12.0 12.0 12.0 12.0 12.0 12.0 12.0 12.0 30.0 12.0 11.0 30.0 100.0 300.0 100.0 100.0 0 100.0 12.0 12.0 12.0 12.0 12.0 12.0 12.0 1000000.0 1000000.0 100.0 100.0 100.0 12.0 12.0 0.0

View File

@ -0,0 +1,2 @@
Hatchery facility ID,Latitude (coordinates),Longitude (coordinates),Type of operation system,Number of FTE employees (season 2018-19) (n),Number of FTE employees (season 2019-20) (n),Number of FTE employees (season 2020-21) (n),Producer Association affiliation
12.0,12,12,Open water system,12.0,12.0,12.0,Asociación Empresarial de Acuicultura de España (APROMAR) - Spain
1 Hatchery facility ID Latitude (coordinates) Longitude (coordinates) Type of operation system Number of FTE employees (season 2018-19) (n) Number of FTE employees (season 2019-20) (n) Number of FTE employees (season 2020-21) (n) Producer Association affiliation
2 12.0 12 12 Open water system 12.0 12.0 12.0 Asociación Empresarial de Acuicultura de España (APROMAR) - Spain

View File

@ -0,0 +1,2 @@
Pre-grow facility ID,Latitude,Longitude,Type of operation system,Producer Association affiliation
12.0,12.0,12.0,Open flow-through System,Associazione Piscicoltori Italiani (API) - Italy
1 Pre-grow facility ID Latitude Longitude Type of operation system Producer Association affiliation
2 12.0 12.0 12.0 Open flow-through System Associazione Piscicoltori Italiani (API) - Italy

View File

@ -0,0 +1,3 @@
description=AnagraphicTable
routine=internal_routine_id
csv=csv/Grow_out_Aggregated_Batch_Data_Entry_KPI_aggregated.csv

View File

@ -4,4 +4,5 @@ company=company_id
association=producer_association_affiliation
batch=batch_id
routine=internal_routine_id
csv=csv/Grow_out_Individual_Batch_Data_Entry_KPI.csv
csv=csv/Grow_out_Individual_Batch_Data_Entry_KPI.csv
enable_analysis=true

View File

@ -0,0 +1,3 @@
description=AnagraphicTable
routine=internal_routine_id
csv=csv/Grow_out_Aggregated_Batch_Data_Entry_KPI_aggregated.csv

View File

@ -0,0 +1,3 @@
description=AnagraphicTable
routine=internal_routine_id
csv=csv/Hatchery_Aggregated_Batch_Data_Entry_KPI_aggregated.csv

View File

@ -2,7 +2,7 @@ description=BatchesTable
farm=farm_id
company=company_id
association=producer_association_affiliation
batch=batch_id
batch=aggregated_batches_id
routine=internal_routine_id
csv=csv/Hatchery_Aggregated_Batch_Data_Entry_KPI.csv
enable_analysis=true

View File

@ -0,0 +1,3 @@
description=AnagraphicTable
routine=internal_routine_id
csv=csv/Hatchery_Aggregated_Batch_Data_Entry_KPI_aggregated.csv

View File

@ -3,5 +3,5 @@ farm=farm_id
company=company_id
batch=batch_id
routine=internal_routine_id
csv=csv/Hatchery_Individual_Batch_Data_Entry_KPI.csv
csv=csv/Hatchery_ Individual_Batch_Data_Entry_KPI.csv
enable_analysis=true

View File

@ -0,0 +1,3 @@
description=AnagraphicTable
routine=internal_routine_id
csv=csv/Hatchery_Aggregated_Batch_Data_Entry_KPI_aggregated.csv

View File

@ -3,5 +3,5 @@ farm=farm_id
company=company_id
batch=batch_id
routine=internal_routine_id
csv=csv/Hatchery_Individual_Batch_Data_Entry_KPI_CLOSED_BATCHES.csv
csv=csv/Hatchery_ Individual_Batch_Data_Entry_KPI_CLOSED_BATCHES.csv
enable_analysis=true

View File

@ -0,0 +1,3 @@
description=AnagraphicTable
routine=internal_routine_id
csv=csv/Pre-grow_Batch_Data_Entry_KPI_anagraphic.csv

View File

@ -1,7 +1,7 @@
description=BatchesTable
farm=farm_id
company=company_id
association=producer_association_id
batch=aggregated_batch_id
batch=batch_id
routine=internal_routine_id
csv=csv/Grow_out_Aggregated_Batch_Data_Entry_KPI.csv
csv=csv/Pre-grow_Batch_Data_Entry_KPI.csv
enable_analysis=true

View File

@ -0,0 +1,3 @@
description=AnagraphicTable
routine=internal_routine_id
csv=csv/Pre-grow_Batch_Data_Entry_KPI_anagraphic.csv

View File

@ -3,5 +3,5 @@ farm=farm_id
company=company_id
batch=batch_id
routine=internal_routine_id
csv=csv/Pre-grow_Batch_Data_Entry_KPI.csv
csv=csv/Pre-grow_Batch_Data_Entry_KPI_CLOSED_BATCHES.csv
enable_analysis=true

View File

@ -58,6 +58,14 @@ public class InitializeDataBase {
+ "primary key ("+ImportRoutine.ID+"))");
stmt.executeUpdate("CREATE OR REPLACE VIEW "+Farm.TABLE+" AS ("
+ "Select f.farmid as "+Farm.FARM_ID+", f.uuid as "+Farm.UUID+", c.companyid as "+Farm.COMPANY_ID+", "
+ "c.uuid as "+Farm.COMPANY_UUID+", a.associationid as "+Farm.ASSOCIATION_ID+", a.uuid as "+Farm.ASSOCIATION_UUID+", "
+ "c.name as "+Farm.COMPANY_LABEL+", a.name as "+Farm.ASSOCIATION_LABEL+", f.name as "+Farm.FARM_LABEL+" "
+ "FROM farms as f INNER JOIN companies as c ON f.companyid=c.companyid "
+ "INNER JOIN associations as a ON c.associationid = a. associationid)");
// CREATE FARM VIEW
try {
ResultSet rs=stmt.executeQuery("Select * from "+Farm.TABLE);
@ -66,11 +74,7 @@ public class InitializeDataBase {
}
}catch(SQLException e) {
// Expected error on table not found, trying to create it
stmt.executeUpdate("CREATE VIEW "+Farm.TABLE+" AS ("
+ "Select f.farmid as "+Farm.FARM_ID+", f.uuid as "+Farm.UUID+", c.companyid as "+Farm.COMPANY_ID+", "
+ "c.uuid as "+Farm.COMPANY_UUID+", a.associationid as "+Farm.ASSOCIATION_ID+", a.uuid as "+Farm.ASSOCIATION_UUID+" "
+ "FROM farms as f INNER JOIN companies as c ON f.companyid=c.companyid "
+ "INNER JOIN associations as a ON c.associationid = a. associationid)");
}
// stmt.executeQuery("CREATE VIEW suca as SELECT 1");
@ -106,7 +110,7 @@ public class InitializeDataBase {
// conn.commit();
conn.commit();
}

View File

@ -5,16 +5,18 @@ import java.net.MalformedURLException;
import java.nio.file.Paths;
import java.sql.SQLException;
import org.gcube.application.perform.service.engine.impl.PerformanceManagerImpl;
import org.gcube.application.perform.service.engine.model.InternalException;
public class LoadSchemaTest {
public static void main(String[] args) throws MalformedURLException, IOException, SQLException, InternalException {
TokenSetter.set("/gcube/devsec");
LocalConfiguration.init(Paths.get("src/main/webapp/WEB-INF/config.properties").toUri().toURL());
PerformServiceLifecycleManager.initSchema("src/main/webapp/WEB-INF");
TokenSetter.set("/gcube/preprod/preVRE");
PerformanceManagerImpl.initDatabase();
}
}