This commit is contained in:
Fabio Sinibaldi 2019-01-22 18:12:55 +00:00
parent 7a0a558425
commit 54b1f808a9
3 changed files with 37 additions and 52 deletions

View File

@ -20,7 +20,6 @@ import org.gcube.application.perform.service.engine.dm.ImporterMonitor;
import org.gcube.application.perform.service.engine.model.BeanNotFound;
import org.gcube.application.perform.service.engine.model.DBField.ImportRoutine;
import org.gcube.application.perform.service.engine.model.DBQueryDescriptor;
import org.gcube.application.perform.service.engine.model.ISQueryDescriptor;
import org.gcube.application.perform.service.engine.model.InternalException;
import org.gcube.application.perform.service.engine.model.importer.ImportRequest;
import org.gcube.application.perform.service.engine.model.importer.ImportRoutineDescriptor;
@ -41,21 +40,9 @@ public class ImporterImpl implements Importer {
private static final Logger log= LoggerFactory.getLogger(ImporterImpl.class);
private static ISQueryDescriptor isQueryDescriptor=null;
// private static synchronized ISQueryDescriptor getISQueryDescriptor() {
// if(isQueryDescriptor==null) {
// isQueryDescriptor=
// new ISQueryDescriptor(
// LocalConfiguration.getProperty(LocalConfiguration.MAPPING_DB_ENDPOINT_NAME), null,
// LocalConfiguration.getProperty(LocalConfiguration.MAPPING_DB_ENDPOINT_CATEGORY));
// }
// return isQueryDescriptor;
// }
private static final String getHostname() {
try{
ApplicationContext context=ContextProvider.get();

View File

@ -47,7 +47,7 @@ public class PerformanceManagerImpl implements PerformanceManager{
}
@Override
public Map<String, String> generateCSV(CSVExportRequest request) throws InvalidRequestException, SQLException, InternalException, IOException {
public Map<String, String> generateCSV(CSVExportRequest request) throws SQLException, InvalidRequestException, InternalException, IOException {
log.trace("Serving {} ",request);
HashMap<String,String> toReturn=new HashMap<>();
Set<ImportedTable> tables=getAnalysisSet(request);
@ -66,7 +66,7 @@ public class PerformanceManagerImpl implements PerformanceManager{
}
@Override
public void loadOutputData(ImportRoutineDescriptor desc) throws IOException, SQLException, InternalException {
public void loadOutputData(ImportRoutineDescriptor desc) throws SQLException, InvalidRequestException, InternalException, IOException{
log.info("Importing output for {} ",desc);
ComputationId computation=DMUtils.getComputation(desc);
Map<String,String> outputs=DMUtils.getOutputFiles(computation);

View File

@ -1,7 +1,6 @@
package org.gcube.application.perform.service.rest;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import javax.inject.Inject;
@ -11,14 +10,17 @@ import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import org.gcube.application.perform.service.PerformServiceManager;
import org.gcube.application.perform.service.ServiceConstants;
import org.gcube.application.perform.service.engine.PerformanceManager;
import org.gcube.application.perform.service.engine.model.CSVExportRequest;
import org.gcube.application.perform.service.engine.model.InternalException;
import org.gcube.application.perform.service.engine.model.InvalidRequestException;
import org.gcube.application.perform.service.engine.model.importer.AnalysisType;
import org.gcube.smartgears.annotations.ManagedBy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -30,8 +32,8 @@ public class Performance {
private static final Logger log= LoggerFactory.getLogger(Performance.class);
// @Inject
// private PerformanceManager perform;
@Inject
private PerformanceManager perform;
/**
@ -47,42 +49,38 @@ public class Performance {
@Produces(MediaType.APPLICATION_JSON)
public Map<String,String> getPerformance(@Context UriInfo info){
try {
// MultivaluedMap<String, String> parameters=info.getQueryParameters();
// log.info("Forming request from {}",parameters);
// String batchType=InterfaceCommons.getParameter(parameters, ServiceConstants.Performance.BATCH_TYPE_PARAMETER, true).get(0);
//
// CSVExportRequest request=new CSVExportRequest( new AnalysisType(batchType, batchType));
//
// request.addAreas(InterfaceCommons.getParameter(parameters, ServiceConstants.Performance.AREA_PARAMETER, false));
// request.addQuarters(InterfaceCommons.getParameter(parameters, ServiceConstants.Performance.QUARTER_PARAMETER, false));
// for(String s:InterfaceCommons.getParameter(parameters, ServiceConstants.Performance.FARM_ID_PARAMETER, true))
// request.addFarmId(Long.parseLong(s));
//
// request.addSpecies(InterfaceCommons.getParameter(parameters, ServiceConstants.Performance.SPECIES_ID_PARAMETER, false));
// request.addPeriods(InterfaceCommons.getParameter(parameters, ServiceConstants.Performance.PERIOD_PARAMETER, false));
//
//
// log.debug("Export request : {} ",request);
//
// return perform.generateCSV(request);
HashMap<String,String> toReturn=new HashMap();
toReturn.put("BatchesTable", "aslfgurt-dfgumk374");
toReturn.put("AntiparasiticTable", "aslfgurt-dfgumk374");
return toReturn;
MultivaluedMap<String, String> parameters=info.getQueryParameters();
log.info("Forming request from {}",parameters);
String batchType=InterfaceCommons.getParameter(parameters, ServiceConstants.Performance.BATCH_TYPE_PARAMETER, true).get(0);
CSVExportRequest request=new CSVExportRequest( new AnalysisType(batchType, batchType));
request.addAreas(InterfaceCommons.getParameter(parameters, ServiceConstants.Performance.AREA_PARAMETER, false));
request.addQuarters(InterfaceCommons.getParameter(parameters, ServiceConstants.Performance.QUARTER_PARAMETER, false));
for(String s:InterfaceCommons.getParameter(parameters, ServiceConstants.Performance.FARM_ID_PARAMETER, true))
request.addFarmId(Long.parseLong(s));
request.addSpecies(InterfaceCommons.getParameter(parameters, ServiceConstants.Performance.SPECIES_ID_PARAMETER, false));
request.addPeriods(InterfaceCommons.getParameter(parameters, ServiceConstants.Performance.PERIOD_PARAMETER, false));
// }catch(NumberFormatException e) {
// throw new WebApplicationException(String.format("Unable to parse parameters."),Response.Status.BAD_REQUEST);
// }catch(SQLException e) {
// log.debug("Exception while getting Batch",e);
// throw new WebApplicationException("Unexpected Exception occurred while dealing with database.", e,Response.Status.INTERNAL_SERVER_ERROR);
// } catch (InvalidRequestException e) {
// log.debug("Exception while getting Batch",e);
// throw new WebApplicationException("Unable to search for Batch. ",e,Response.Status.BAD_REQUEST);
// } catch (InternalException e) {
// log.warn("Unexpected Exception while getting Batch",e);
// throw new WebApplicationException("Unexpected Exception.", e,Response.Status.INTERNAL_SERVER_ERROR);
log.debug("Export request : {} ",request);
return perform.generateCSV(request);
}catch(NumberFormatException e) {
throw new WebApplicationException(String.format("Unable to parse parameters."),Response.Status.BAD_REQUEST);
}catch(SQLException e) {
log.debug("Exception while getting Batch",e);
throw new WebApplicationException("Unexpected Exception occurred while dealing with database.", e,Response.Status.INTERNAL_SERVER_ERROR);
} catch (InvalidRequestException e) {
log.debug("Exception while getting Batch",e);
throw new WebApplicationException("Unable to search for Batch. ",e,Response.Status.BAD_REQUEST);
} catch (InternalException e) {
log.warn("Unexpected Exception while getting Batch",e);
throw new WebApplicationException("Unexpected Exception.", e,Response.Status.INTERNAL_SERVER_ERROR);
}catch(Throwable t) {
log.warn("Unexpected Exception while getting Batch",t);
throw new WebApplicationException("Unexpected Exception.", t,Response.Status.INTERNAL_SERVER_ERROR);