Added some log for support debug

This commit is contained in:
Giancarlo Panichi 2020-05-28 11:05:40 +02:00
parent c18f95a480
commit abced0e5ad
1 changed files with 643 additions and 482 deletions

View File

@ -67,13 +67,11 @@ import com.google.gwt.user.server.rpc.RemoteServiceServlet;
* Jan 16, 2019
*/
@SuppressWarnings("serial")
public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
implements PerformFishAnalyticsService {
public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet implements PerformFishAnalyticsService {
/** The log. */
protected static Logger log = LoggerFactory.getLogger(PerformFishAnalyticsServiceImpl.class);
/** The date format. */
private SimpleDateFormat dateFormat = new SimpleDateFormat("dd-MM-yyyy 'at' HH:mm:ss-SSS z");
@ -84,20 +82,22 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
* @throws Exception
* the exception
*/
private EntityManagerFactory getDBFactory()
throws Exception {
private EntityManagerFactory getDBFactory() throws Exception {
if (ContextUtil.isSessionExpired(this.getThreadLocalRequest()))
throw new SessionExpired("The session is expired");
//PortalContextInfo pContext = ContextUtil.getPortalContext(this.getThreadLocalRequest());
// PortalContextInfo pContext =
// ContextUtil.getPortalContext(this.getThreadLocalRequest());
EntityManagerFactoryCreator.instanceLocalMode();
EntityManagerFactory dbFactory = EntityManagerFactoryCreator.getEntityManagerFactory();
new DatabaseUtil().fillDatabaseIfEmpty(dbFactory, this.getThreadLocalRequest().getServletContext());
return dbFactory;
}
/* (non-Javadoc)
/*
* (non-Javadoc)
*
* @see javax.servlet.GenericServlet#destroy()
*/
@Override
@ -108,8 +108,10 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
EntityManagerFactoryCreator.instanceLocalMode();
EntityManagerFactory dbFactory = EntityManagerFactoryCreator.getEntityManagerFactory();
dbFactory.close();
// String dbFolderPath = EntityManagerFactoryCreator.getPersistenceFolderPath();
// FileUtil.deleteDirectoryRecursion(new File(dbFolderPath).toPath());
// String dbFolderPath =
// EntityManagerFactoryCreator.getPersistenceFolderPath();
// FileUtil.deleteDirectoryRecursion(new
// File(dbFolderPath).toPath());
log.info("DB Factory closed correctly");
} catch (Exception e) {
log.info("Error occurred on closing the DB Factory: ", e);
@ -134,8 +136,7 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
try {
EntityManagerFactory dbFactory = getDBFactory();
GenericPersistenceDaoBuilder<Population> builderPopulation =
new GenericPersistenceDaoBuilder<Population>(
GenericPersistenceDaoBuilder<Population> builderPopulation = new GenericPersistenceDaoBuilder<Population>(
dbFactory, Population.class.getSimpleName());
List<Population> listPopulation = builderPopulation.getPersistenceEntity().getList();
log.info("List of {} are: {}", Population.class.getSimpleName(), listPopulation);
@ -150,8 +151,10 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
}
}
List<PopulationType> listPopulationTypeDTO = ToAvoidIndirectSerialization.toGWTSerializable(listPopulationType, population, true);
log.info("Returning "+listPopulationTypeDTO.size()+ " type/s for population name: "+populationName);
List<PopulationType> listPopulationTypeDTO = ToAvoidIndirectSerialization
.toGWTSerializable(listPopulationType, population, true);
log.info("Returning " + listPopulationTypeDTO.size() + " type/s for population name: "
+ populationName);
return listPopulationTypeDTO;
}
}
@ -164,8 +167,12 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
}
}
/* (non-Javadoc)
* @see org.gcube.portlets.user.performfishanalytics.client.PerformFishAnalyticsService#getPopulationTypeWithListKPI(java.lang.String)
/*
* (non-Javadoc)
*
* @see org.gcube.portlets.user.performfishanalytics.client.
* PerformFishAnalyticsService#getPopulationTypeWithListKPI(java.lang.
* String)
*/
@Override
public PopulationType getPopulationTypeWithListKPI(String populationTypeId) throws Exception {
@ -176,8 +183,7 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
try {
EntityManagerFactory dbFactory = getDBFactory();
GenericPersistenceDaoBuilder<PopulationType> builderPopulationType =
new GenericPersistenceDaoBuilder<PopulationType>(
GenericPersistenceDaoBuilder<PopulationType> builderPopulationType = new GenericPersistenceDaoBuilder<PopulationType>(
dbFactory, PopulationType.class.getSimpleName());
Map<String, String> filterMap = new HashMap<String, String>();
@ -192,12 +198,17 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
List<KPI> listGWTKPI = new ArrayList<KPI>(selectedPopType.getListKPI().size());
for (KPI toKPI : selectedPopType.getListKPI()) {
KPI gwtKPI = convert(toKPI);
gwtKPI.setPopulationType(selectedPopType);//I'm setting population type only at first level
//gwtKPI.setLeaf(toKPI.getListKPI()==null || toKPI.getListKPI().isEmpty());
gwtKPI.setPopulationType(selectedPopType);// I'm setting
// population type
// only at first
// level
// gwtKPI.setLeaf(toKPI.getListKPI()==null ||
// toKPI.getListKPI().isEmpty());
listGWTKPI.add(gwtKPI);
}
List<PopulationType> listPopulationTypeDTO = ToAvoidIndirectSerialization.toGWTSerializable(listPopType, null, false);
List<PopulationType> listPopulationTypeDTO = ToAvoidIndirectSerialization.toGWTSerializable(listPopType,
null, false);
PopulationType toReturn = listPopulationTypeDTO.get(0);
toReturn.setListKPI(listGWTKPI);
@ -206,7 +217,8 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
log.debug(kpi.toString());
}
}
log.info("Returning type "+toReturn.getName()+" having list of KPI count: "+toReturn.getListKPI().size());
log.info("Returning type " + toReturn.getName() + " having list of KPI count: "
+ toReturn.getListKPI().size());
return toReturn;
} catch (Exception e) {
log.error("Error on loading list of KPI for popluation type with id: " + populationTypeId, e);
@ -217,7 +229,8 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
/**
* Convert.
*
* @param kpi the kpi
* @param kpi
* the kpi
* @return the kpi
*/
private KPI convert(KPI kpi) {
@ -248,33 +261,38 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
/**
* Gets the gwtkpi.
*
* @param toKPI the to kpi
* @param populationType the population type
* @param toKPI
* the to kpi
* @param populationType
* the population type
* @return the gwtkpi
*/
private KPI getGWTKPI(KPI toKPI, PopulationType populationType) {
KPI gwtKPI = new KPI(toKPI.getId(),toKPI.getCode(),toKPI.getName(),toKPI.getDescription(), null,populationType,toKPI.getDeepIndex());
KPI gwtKPI = new KPI(toKPI.getId(), toKPI.getCode(), toKPI.getName(), toKPI.getDescription(), null,
populationType, toKPI.getDeepIndex());
gwtKPI.setLeaf(toKPI.getListKPI() == null || toKPI.getListKPI().isEmpty());
return gwtKPI;
}
/**
* Check grant to access farm id.
*
* @param farmID the farm id
* @param farmID
* the farm id
* @return true, if successful
* @throws Exception the exception
* @throws Exception
* the exception
*/
@Override
public boolean checkGrantToAccessFarmID(String farmID) throws Exception {
try {
if (ContextUtil.isSessionExpired(this.getThreadLocalRequest()))
throw new SessionExpired("The session is expired");
log.info("Checking the rights to access the farmID {} for current user", farmID);
if (ContextUtil.isWithinPortal()) {
log.info("On Portal");
GCubeUser currentUser = PortalContext.getConfiguration().getCurrentUser(this.getThreadLocalRequest());
long userId = currentUser.getUserId();
log.info("User {} has the userId {}", currentUser.getUsername(), userId);
@ -288,7 +306,8 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
log.debug("Parsed FARM_ID as long is: " + farmId);
if (farmId == -1) {
throw new Exception("Your input farm ID seems to be not valid. Please contact the D4Science support");
throw new Exception(
"Your input farm ID seems to be not valid. Please contact the D4Science support");
}
long groupId;
@ -296,7 +315,8 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
groupId = PortalContext.getConfiguration().getCurrentGroupId(this.getThreadLocalRequest());
} catch (Exception e) {
log.error("Error getting the group id: ", e);
throw new Exception("Your input farm ID seems to be not valid. Please contact the D4Science support");
throw new Exception(
"Your input farm ID seems to be not valid. Please contact the D4Science support");
}
log.debug("The group id is: " + groupId);
@ -320,15 +340,24 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
log.info("I'm in testing mode, grant the rights to access the farmID {} for current user", farmID);
return true;
}
} catch (Throwable e) {
log.error(e.getLocalizedMessage(), e);
throw e;
}
}
/* (non-Javadoc)
* @see org.gcube.portlets.user.performfishanalytics.client.PerformFishAnalyticsService#validParameters(org.gcube.portlets.user.performfishanalytics.shared.performfishservice.PerformFishInitParameter)
/*
* (non-Javadoc)
*
* @see org.gcube.portlets.user.performfishanalytics.client.
* PerformFishAnalyticsService#validParameters(org.gcube.portlets.user.
* performfishanalytics.shared.performfishservice.PerformFishInitParameter)
*/
@Override
public PerformFishInitParameter validParameters(PerformFishInitParameter initParams) throws Exception {
try {
Map<String, String> inputParameters = initParams.getParameters();
String farmID = inputParameters.get(PerformFishAnalyticsConstant.PERFORM_FISH_FARMID_PARAM);
boolean grantAccess = checkGrantToAccessFarmID(farmID);
@ -336,30 +365,39 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
throw new Exception("You have no rights to access to this FARM. You does not belong to it.");
return initParams;
} catch (Throwable e) {
log.error(e.getLocalizedMessage(), e);
throw e;
}
}
/* (non-Javadoc)
* @see org.gcube.portlets.user.performfishanalytics.client.PerformFishAnalyticsService#submitRequestToPerformFishService(java.util.Map)
/*
* (non-Javadoc)
*
* @see org.gcube.portlets.user.performfishanalytics.client.
* PerformFishAnalyticsService#submitRequestToPerformFishService(java.util.
* Map)
*/
@Override
public PerformFishResponse submitRequestToPerformFishService(Map<String, List<String>> mapParameters) throws Exception{
public PerformFishResponse submitRequestToPerformFishService(Map<String, List<String>> mapParameters)
throws Exception {
try {
log.info("Submitting request with parameters: " + mapParameters);
ServiceParameters performFishService = null;
try {
performFishService = ContextUtil.getPerformFishService(this.getThreadLocalRequest());
} catch (Exception e) {
log.error("Error on getting the perform fish service from IS: " + performFishService, e);
throw new Exception("Error on getting the perform fish service from IS: "+performFishService+" Please contact the suport");
throw new Exception("Error on getting the perform fish service from IS: " + performFishService
+ " Please contact the suport");
}
String serviceURL = performFishService.getUrl() + "/performance";
log.debug("Calling service: " + serviceURL);
HttpCallerUtil httpCaller = new HttpCallerUtil(serviceURL, null, null);
String gCubeToken = ContextUtil.getPortalContext(this.getThreadLocalRequest()).getUserToken();
//mapParameters.put("gcube-token", Arrays.asList(ContextUtil.getPortalContext(this.getThreadLocalRequest()).getUserToken()));
// mapParameters.put("gcube-token",
// Arrays.asList(ContextUtil.getPortalContext(this.getThreadLocalRequest()).getUserToken()));
String response;
try {
Date startTime = getCurrentTimeToDate(System.currentTimeMillis());
@ -367,7 +405,10 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
response = httpCaller.callGet(null, mapParameters, gCubeToken);
Date endTime = getCurrentTimeToDate(System.currentTimeMillis());
log.info("The response is: " + response + " with status: " + httpCaller.getStatusCode());
log.info("The perform-fish response returned just now {}. Response returned in {} "+ TimeUnit.MILLISECONDS.toString(), dateFormat.format(endTime), getDateDiff(startTime, endTime, TimeUnit.MILLISECONDS));
log.info(
"The perform-fish response returned just now {}. Response returned in {} "
+ TimeUnit.MILLISECONDS.toString(),
dateFormat.format(endTime), getDateDiff(startTime, endTime, TimeUnit.MILLISECONDS));
if (response == null)
throw new Exception("The response is null");
@ -378,21 +419,25 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
UUID respSessionID = UUID.randomUUID();
// ContextUtil.getPerformFishService(UUID.randomUUID());
return new PerformFishResponse(theResponseParams, respSessionID.toString());
}
catch (Exception e) {
log.error("Error interacting with the service: "+performFishService.getUrl() +" with parameters: "+mapParameters, e);
throw new Exception("There was an error interacting with the "+ContextUtil.PERFORM_SERVICE+" in this VRE ("
} catch (Exception e) {
log.error("Error interacting with the service: " + performFishService.getUrl() + " with parameters: "
+ mapParameters, e);
throw new Exception(
"There was an error interacting with the " + ContextUtil.PERFORM_SERVICE + " in this VRE ("
+ ContextUtil.getPortalContext(this.getThreadLocalRequest()).getCurrentScope() + ")"
+ ". Please report this issue at www.d4science.org/contact-us");
}
} catch (Throwable e) {
log.error(e.getLocalizedMessage(), e);
throw e;
}
}
/**
* Gets the current time to date.
*
* @param currentTime the current time
* @param currentTime
* the current time
* @return the current time to date
*/
private Date getCurrentTimeToDate(long currentTime) {
@ -404,149 +449,218 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
/**
* Get a diff between two dates.
*
* @param date1 the oldest date
* @param date2 the newest date
* @param timeUnit the unit in which you want the diff
* @param date1
* the oldest date
* @param date2
* the newest date
* @param timeUnit
* the unit in which you want the diff
* @return the diff value, in the provided unit
*/
public static long getDateDiff(Date date1, Date date2, TimeUnit timeUnit) {
private static long getDateDiff(Date date1, Date date2, TimeUnit timeUnit) {
long diffInMillies = date2.getTime() - date1.getTime();
return timeUnit.convert(diffInMillies, TimeUnit.MILLISECONDS);
}
/* (non-Javadoc)
* @see org.gcube.portlets.user.performfishanalytics.client.PerformFishAnalyticsService#callingDataMinerPerformFishCorrelationAnalysis(org.gcube.portlets.user.performfishanalytics.shared.performfishservice.PerformFishResponse, java.util.Map)
/*
* (non-Javadoc)
*
* @see org.gcube.portlets.user.performfishanalytics.client.
* PerformFishAnalyticsService#
* callingDataMinerPerformFishCorrelationAnalysis(org.gcube.portlets.user.
* performfishanalytics.shared.performfishservice.PerformFishResponse,
* java.util.Map)
*/
@Override
public DataMinerResponse callingDataMinerPerformFishCorrelationAnalysis(PerformFishResponse peformFishReponse, Map<String, List<String>> mapParameters) throws Exception{
public DataMinerResponse callingDataMinerPerformFishCorrelationAnalysis(PerformFishResponse peformFishReponse,
Map<String, List<String>> mapParameters) throws Exception {
try {
log.info("Validating Perform-Fish service response...");
String URLToBatchesTable = peformFishReponse.getMapParameters().get(PerformFishAnalyticsConstant.BATCHES_TABLE);
String URLToBatchesTable = peformFishReponse.getMapParameters()
.get(PerformFishAnalyticsConstant.BATCHES_TABLE);
if (URLToBatchesTable == null || URLToBatchesTable.isEmpty())
throw new Exception("Something seems "+PerformFishAnalyticsConstant.BATCHES_TABLE+ " is null or emty");
throw new Exception(
"Something seems " + PerformFishAnalyticsConstant.BATCHES_TABLE + " is null or emty");
//Checking that the perform-fish PerformFishAnalyticsConstant.BATCHES_TABLE has at least 1 row
// Checking that the perform-fish
// PerformFishAnalyticsConstant.BATCHES_TABLE has at least 1 row
CSVFile csvFile = readCSVFile(URLToBatchesTable);
log.info("CSVFile read from {} - {}", URLToBatchesTable, csvFile);
if(csvFile==null || csvFile.getValueRows() == null || csvFile.getValueRows().size()<PerformFishAnalyticsConstant.CSV_BATCHES_TABLE_MINIMUM_SIZE){
log.warn("The "+PerformFishAnalyticsConstant.BATCHES_TABLE+" CSV rows are"+csvFile.getValueRows()+". It is less than "+PerformFishAnalyticsConstant.CSV_BATCHES_TABLE_MINIMUM_SIZE);
throw new Exception("Your request does not produce enough data for the analysis, please change your selection and try again");
if (csvFile == null || csvFile.getValueRows() == null
|| csvFile.getValueRows().size() < PerformFishAnalyticsConstant.CSV_BATCHES_TABLE_MINIMUM_SIZE) {
log.warn("The " + PerformFishAnalyticsConstant.BATCHES_TABLE + " CSV rows are" + csvFile.getValueRows()
+ ". It is less than " + PerformFishAnalyticsConstant.CSV_BATCHES_TABLE_MINIMUM_SIZE);
throw new Exception(
"Your request does not produce enough data for the analysis, please change your selection and try again");
}
log.info("Calling the DM service with client parameters: " + mapParameters.toString());
//PortalContextInfo pContext = ContextUtil.getPortalContext(this.getThreadLocalRequest());
// PortalContextInfo pContext =
// ContextUtil.getPortalContext(this.getThreadLocalRequest());
Map<String, List<String>> dmRequestParameters = new HashMap<String, List<String>>();
dmRequestParameters.put("request", Arrays.asList("Execute"));
dmRequestParameters.put("service", Arrays.asList("WPS"));
dmRequestParameters.put("Version", Arrays.asList("1.0.0"));
//dmRequestParameters.put("gcube-token", Arrays.asList(pContext.getUserToken()));
// dmRequestParameters.put("gcube-token",
// Arrays.asList(pContext.getUserToken()));
dmRequestParameters.put("lang", Arrays.asList("en-US"));
dmRequestParameters.put("Identifier", Arrays.asList("org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PERFORMFISH_ANALYSIS"));
dmRequestParameters.put("Identifier", Arrays.asList(
"org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PERFORMFISH_ANALYSIS"));
dmRequestParameters.putAll(mapParameters);
return callTheDataMinerPerformFishCorrelationAnalysis(peformFishReponse, dmRequestParameters);
} catch (Throwable e) {
log.error(e.getLocalizedMessage(), e);
throw e;
}
}
/* (non-Javadoc)
* @see org.gcube.portlets.user.performfishanalytics.client.PerformFishAnalyticsService#callingDataMinerPerformFishCorrelationAnalysis(org.gcube.portlets.user.performfishanalytics.shared.performfishservice.PerformFishResponse, java.util.Map)
/*
* (non-Javadoc)
*
* @see org.gcube.portlets.user.performfishanalytics.client.
* PerformFishAnalyticsService#
* callingDataMinerPerformFishCorrelationAnalysis(org.gcube.portlets.user.
* performfishanalytics.shared.performfishservice.PerformFishResponse,
* java.util.Map)
*/
@Override
public DataMinerResponse callingDataMinerPerformFishAnnualCorrelationAnalysis(PerformFishResponse peformFishReponse, Map<String, List<String>> mapParameters) throws Exception{
public DataMinerResponse callingDataMinerPerformFishAnnualCorrelationAnalysis(PerformFishResponse peformFishReponse,
Map<String, List<String>> mapParameters) throws Exception {
try {
log.info("Calling the DM service with client parameters: " + mapParameters.toString());
//PortalContextInfo pContext = ContextUtil.getPortalContext(this.getThreadLocalRequest());
// PortalContextInfo pContext =
// ContextUtil.getPortalContext(this.getThreadLocalRequest());
Map<String, List<String>> dmRequestParameters = new HashMap<String, List<String>>();
dmRequestParameters.put("request", Arrays.asList("Execute"));
dmRequestParameters.put("service", Arrays.asList("WPS"));
dmRequestParameters.put("Version", Arrays.asList("1.0.0"));
//dmRequestParameters.put("gcube-token", Arrays.asList(pContext.getUserToken()));
// dmRequestParameters.put("gcube-token",
// Arrays.asList(pContext.getUserToken()));
dmRequestParameters.put("lang", Arrays.asList("en-US"));
dmRequestParameters.put("Identifier", Arrays.asList("org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PERFORMFISH_ANALYSIS_ANNUAL"));
dmRequestParameters.put("Identifier", Arrays.asList(
"org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PERFORMFISH_ANALYSIS_ANNUAL"));
dmRequestParameters.putAll(mapParameters);
return callTheDataMinerPerformFishCorrelationAnalysis(peformFishReponse, dmRequestParameters);
} catch (Throwable e) {
log.error(e.getLocalizedMessage(), e);
throw e;
}
}
@Override
public DataMinerResponse callDMServiceToLoadSynopticTable(PerformFishResponse performFishResponse,
Map<String, List<String>> mapParameters) throws Exception {
try {
log.info("Validating Perform-Fish service response...");
String URLToBatchesTable = performFishResponse.getMapParameters().get(PerformFishAnalyticsConstant.BATCHES_TABLE);
String URLToBatchesTable = performFishResponse.getMapParameters()
.get(PerformFishAnalyticsConstant.BATCHES_TABLE);
if (URLToBatchesTable == null || URLToBatchesTable.isEmpty())
throw new Exception("Something seems "+PerformFishAnalyticsConstant.BATCHES_TABLE+ " is null or emty");
throw new Exception(
"Something seems " + PerformFishAnalyticsConstant.BATCHES_TABLE + " is null or emty");
//Checking that the perform-fish PerformFishAnalyticsConstant.BATCHES_TABLE has at least 1 row
// Checking that the perform-fish
// PerformFishAnalyticsConstant.BATCHES_TABLE has at least 1 row
// CSVFile csvFile = readCSVFile(URLToBatchesTable);
// log.info("CSVFile read from {} - {}", URLToBatchesTable, csvFile);
// if(csvFile==null || csvFile.getValueRows() == null || csvFile.getValueRows().size()<PerformFishAnalyticsConstant.CSV_BATCHES_TABLE_MINIMUM_SIZE){
// log.warn("The "+PerformFishAnalyticsConstant.BATCHES_TABLE+" CSV rows are"+csvFile.getValueRows()+". It is less than "+PerformFishAnalyticsConstant.CSV_BATCHES_TABLE_MINIMUM_SIZE);
// throw new Exception("Your request does not produce enough data for the analysis, please change your selection and try again");
// log.info("CSVFile read from {} - {}", URLToBatchesTable,
// csvFile);
// if(csvFile==null || csvFile.getValueRows() == null ||
// csvFile.getValueRows().size()<PerformFishAnalyticsConstant.CSV_BATCHES_TABLE_MINIMUM_SIZE){
// log.warn("The "+PerformFishAnalyticsConstant.BATCHES_TABLE+" CSV
// rows
// are"+csvFile.getValueRows()+". It is less than
// "+PerformFishAnalyticsConstant.CSV_BATCHES_TABLE_MINIMUM_SIZE);
// throw new Exception("Your request does not produce enough data
// for
// the analysis, please change your selection and try again");
// }
log.info("Calling the DM service with client parameters: " + mapParameters.toString());
//PortalContextInfo pContext = ContextUtil.getPortalContext(this.getThreadLocalRequest());
// PortalContextInfo pContext =
// ContextUtil.getPortalContext(this.getThreadLocalRequest());
Map<String, List<String>> dmRequestParameters = new HashMap<String, List<String>>();
dmRequestParameters.put("request", Arrays.asList("Execute"));
dmRequestParameters.put("service", Arrays.asList("WPS"));
dmRequestParameters.put("Version", Arrays.asList("1.0.0"));
//dmRequestParameters.put("gcube-token", Arrays.asList(pContext.getUserToken()));
// dmRequestParameters.put("gcube-token",
// Arrays.asList(pContext.getUserToken()));
dmRequestParameters.put("lang", Arrays.asList("en-US"));
//dmRequestParameters.put("Identifier", Arrays.asList("org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PERFORMFISH_SYNOPTICTABLE_BATCH"));
// dmRequestParameters.put("Identifier",
// Arrays.asList("org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PERFORMFISH_SYNOPTICTABLE_BATCH"));
dmRequestParameters.putAll(mapParameters);
return callTheDataMiner(dmRequestParameters);
}
} catch (Throwable e) {
log.error(e.getLocalizedMessage(), e);
throw e;
}
}
@Override
public DataMinerResponse callDMServiceToLoadSynopticAnnualTable(PerformFishResponse thePerformFishResponse,
Map<String, List<String>> mapParameters) throws Exception {
try {
log.info("Validating Perform-Fish service response...");
log.info("Calling the DM service with client parameters: " + mapParameters.toString());
//PortalContextInfo pContext = ContextUtil.getPortalContext(this.getThreadLocalRequest());
// PortalContextInfo pContext =
// ContextUtil.getPortalContext(this.getThreadLocalRequest());
Map<String, List<String>> dmRequestParameters = new HashMap<String, List<String>>();
dmRequestParameters.put("request", Arrays.asList("Execute"));
dmRequestParameters.put("service", Arrays.asList("WPS"));
dmRequestParameters.put("Version", Arrays.asList("1.0.0"));
//dmRequestParameters.put("gcube-token", Arrays.asList(pContext.getUserToken()));
// dmRequestParameters.put("gcube-token",
// Arrays.asList(pContext.getUserToken()));
dmRequestParameters.put("lang", Arrays.asList("en-US"));
dmRequestParameters.put("Identifier", Arrays.asList("org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PERFORMFISH_SYNOPTIC_TABLE_FARM"));
dmRequestParameters.put("Identifier", Arrays.asList(
"org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PERFORMFISH_SYNOPTIC_TABLE_FARM"));
dmRequestParameters.putAll(mapParameters);
return callTheDataMiner(dmRequestParameters);
} catch (Throwable e) {
log.error(e.getLocalizedMessage(), e);
throw e;
}
}
/**
* Call the data miner perform fish correlation analysis.
*
* @param peformFishReponse the peform fish reponse
* @param mapParameters the map parameters
* @param peformFishReponse
* the peform fish reponse
* @param mapParameters
* the map parameters
* @return the data miner response
* @throws Exception the exception
* @throws Exception
* the exception
*/
private DataMinerResponse callTheDataMinerPerformFishCorrelationAnalysis(PerformFishResponse peformFishReponse, Map<String, List<String>> dmRequestParameters) throws Exception{
private DataMinerResponse callTheDataMinerPerformFishCorrelationAnalysis(PerformFishResponse peformFishReponse,
Map<String, List<String>> dmRequestParameters) throws Exception {
PortalContextInfo pContext = ContextUtil.getPortalContext(this.getThreadLocalRequest());
ServiceParameters dataMinerService = ContextUtil.getDataMinerService(this.getThreadLocalRequest());
log.info("Found DM service: " + dataMinerService.getUrl() + " int this scope: " + pContext.getCurrentScope());
// if(!ContextUtil.isWithinPortal()){
// dataMinerService = new ServiceParameters("http://dataminer-prototypes.d4science.org/wps/WebProcessingService", null, null, dmRequestParameters);
// log.info("I'm in TEST MODE replacing it with HARD CODED: "+dataMinerService);
// dataMinerService = new
// ServiceParameters("http://dataminer-prototypes.d4science.org/wps/WebProcessingService",
// null, null, dmRequestParameters);
// log.info("I'm in TEST MODE replacing it with HARD CODED:
// "+dataMinerService);
// }
dataMinerService.setProperties(dmRequestParameters);
// Asdding client parameters to DM service request
DMServiceResponse dmResponse = null;
String response;
try {
response = new HttpCallerUtil(dataMinerService.getUrl(), null, null).performGETRequestWithRetry(dmRequestParameters, pContext.getUserToken(), 5);
response = new HttpCallerUtil(dataMinerService.getUrl(), null, null)
.performGETRequestWithRetry(dmRequestParameters, pContext.getUserToken(), 5);
if (response == null) {
log.error("The presponse returned is null");
throw new Exception("The presponse returned is null");
@ -557,9 +671,9 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
throw new Exception("The service did not produce any result. Change your selection and try again.");
}
if (dmResponse == null || dmResponse.isWithError())
throw new Exception("The response returned by DM service contains an Exception Status. (The call is: "+dmResponse.getHttpRequestURL()+"). Please report this issue at www.d4science.org/contact-us");
throw new Exception("The response returned by DM service contains an Exception Status. (The call is: "
+ dmResponse.getHttpRequestURL() + "). Please report this issue at www.d4science.org/contact-us");
try {
@ -568,14 +682,16 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
for (DataMinerOutputData dataMinerOutputData : listOut) {
// I'm using this specific output data of DM
if (dataMinerOutputData.getFileDescription().toLowerCase().contains("outputcharts")) {
log.info("The output: "+dataMinerOutputData.getFileDescription()+ " with: "+dataMinerOutputData.getMimeType()+" is the candidate to unzip");
log.info("The output: " + dataMinerOutputData.getFileDescription() + " with: "
+ dataMinerOutputData.getMimeType() + " is the candidate to unzip");
toDMOutputData = dataMinerOutputData;
break;
}
}
if (toDMOutputData == null || toDMOutputData.getPublicURL() == null)
throw new Exception("The response returned by DM service does not contain a file to unzip with name: 'outputcharts'. Please report this issue at www.d4science.org/contact-us");
throw new Exception(
"The response returned by DM service does not contain a file to unzip with name: 'outputcharts'. Please report this issue at www.d4science.org/contact-us");
String theZipFileURL = toDMOutputData.getPublicURL();
log.info("I'm using the file: " + theZipFileURL);
@ -599,12 +715,18 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
}
}
/* (non-Javadoc)
* @see org.gcube.portlets.user.performfishanalytics.client.PerformFishAnalyticsService#callingDataMinerPerformFishAnalysis(org.gcube.portlets.user.performfishanalytics.shared.performfishservice.PerformFishResponse, java.util.Map)
/*
* (non-Javadoc)
*
* @see org.gcube.portlets.user.performfishanalytics.client.
* PerformFishAnalyticsService#callingDataMinerPerformFishAnalysis(org.gcube
* .portlets.user.performfishanalytics.shared.performfishservice.
* PerformFishResponse, java.util.Map)
*/
@Override
public DataMinerResponse callingDataMinerPerformFishAnalysis(Map<String, List<String>> algorithmMapParameters) throws Exception{
public DataMinerResponse callingDataMinerPerformFishAnalysis(Map<String, List<String>> algorithmMapParameters)
throws Exception {
try {
log.info("Calling the DM service with algorithm parameters: " + algorithmMapParameters.toString());
Map<String, List<String>> dmRequestParameters = new HashMap<String, List<String>>();
@ -612,18 +734,29 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
dmRequestParameters.put("service", Arrays.asList("WPS"));
dmRequestParameters.put("Version", Arrays.asList("1.0.0"));
dmRequestParameters.put("lang", Arrays.asList("en-US"));
dmRequestParameters.put("Identifier", Arrays.asList("org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PERFORMFISH_ANALYSIS"));
dmRequestParameters.put("Identifier", Arrays.asList(
"org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PERFORMFISH_ANALYSIS"));
dmRequestParameters.putAll(algorithmMapParameters);
return callTheDataMiner(dmRequestParameters);
} catch (Throwable e) {
log.error(e.getLocalizedMessage(), e);
throw e;
}
}
/* (non-Javadoc)
* @see org.gcube.portlets.user.performfishanalytics.client.PerformFishAnalyticsService#callingDataMinerPerformFishAnalysis(org.gcube.portlets.user.performfishanalytics.shared.performfishservice.PerformFishResponse, java.util.Map)
/*
* (non-Javadoc)
*
* @see org.gcube.portlets.user.performfishanalytics.client.
* PerformFishAnalyticsService#callingDataMinerPerformFishAnalysis(org.gcube
* .portlets.user.performfishanalytics.shared.performfishservice.
* PerformFishResponse, java.util.Map)
*/
@Override
public DataMinerResponse callingDataMinerPerformFishAnnualAnalysis(Map<String, List<String>> algorithmMapParameters) throws Exception{
public DataMinerResponse callingDataMinerPerformFishAnnualAnalysis(Map<String, List<String>> algorithmMapParameters)
throws Exception {
try {
log.info("Calling the DM service with algorithm parameters: " + algorithmMapParameters.toString());
Map<String, List<String>> dmRequestParameters = new HashMap<String, List<String>>();
@ -631,24 +764,31 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
dmRequestParameters.put("service", Arrays.asList("WPS"));
dmRequestParameters.put("Version", Arrays.asList("1.0.0"));
dmRequestParameters.put("lang", Arrays.asList("en-US"));
dmRequestParameters.put("Identifier", Arrays.asList("org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PERFORMFISH_ANALYSIS_ANNUAL"));
dmRequestParameters.put("Identifier", Arrays.asList(
"org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PERFORMFISH_ANALYSIS_ANNUAL"));
dmRequestParameters.putAll(algorithmMapParameters);
return callTheDataMiner(dmRequestParameters);
} catch (Throwable e) {
log.error(e.getLocalizedMessage(), e);
throw e;
}
}
/**
* Call the data miner.
*
* @param dmServiceRequestParameters the dm request parameters
* @param dmServiceRequestParameters
* the dm request parameters
* @return the data miner response
* @throws Exception the exception
* @throws Exception
* the exception
*/
private DataMinerResponse callTheDataMiner(Map<String, List<String>> dmServiceRequestParameters) throws Exception {
ServiceParameters dataMinerService = ContextUtil.getDataMinerService(this.getThreadLocalRequest());
log.info("Found DM service: "+dataMinerService.getUrl() + " int this scope: "+ContextUtil.getPortalContext(this.getThreadLocalRequest()).getCurrentScope());
log.info("Found DM service: " + dataMinerService.getUrl() + " int this scope: "
+ ContextUtil.getPortalContext(this.getThreadLocalRequest()).getCurrentScope());
dataMinerService.setProperties(dmServiceRequestParameters);
// Addding client parameters to DM service request
@ -662,7 +802,8 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
throw new Exception("Error retrieving the token: " + token);
}
try {
response = new HttpCallerUtil(dataMinerService.getUrl(), null, null).performGETRequestWithRetry(dmServiceRequestParameters,token, 5);
response = new HttpCallerUtil(dataMinerService.getUrl(), null, null)
.performGETRequestWithRetry(dmServiceRequestParameters, token, 5);
if (response == null) {
log.error("The presponse returned is null");
throw new Exception("The presponse returned is null");
@ -674,7 +815,8 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
}
if (dmResponse == null || dmResponse.isWithError())
throw new Exception("The response returned by DM service contains an Exception Status. (The call is: "+dmResponse.getHttpRequestURL()+"). Please report this issue at www.d4science.org/contact-us");
throw new Exception("The response returned by DM service contains an Exception Status. (The call is: "
+ dmResponse.getHttpRequestURL() + "). Please report this issue at www.d4science.org/contact-us");
try {
@ -683,20 +825,23 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
for (DataMinerOutputData dataMinerOutputData : listOut) {
// I'm using this specific output data of DM
if (dataMinerOutputData.getFileDescription().toLowerCase().contains("outputcharts")) {
log.info("The output: "+dataMinerOutputData.getFileDescription()+ " with: "+dataMinerOutputData.getMimeType()+" is the candidate to unzip");
log.info("The output: " + dataMinerOutputData.getFileDescription() + " with: "
+ dataMinerOutputData.getMimeType() + " is the candidate to unzip");
toDMOutputData = dataMinerOutputData;
break;
}
if (dataMinerOutputData.getFileDescription().toLowerCase().contains("outputfile")) {
log.info("The output: "+dataMinerOutputData.getFileDescription()+ " with: "+dataMinerOutputData.getMimeType()+" is the candidate to unzip");
log.info("The output: " + dataMinerOutputData.getFileDescription() + " with: "
+ dataMinerOutputData.getMimeType() + " is the candidate to unzip");
toDMOutputData = dataMinerOutputData;
break;
}
}
if (toDMOutputData == null || toDMOutputData.getPublicURL() == null)
throw new Exception("The response returned by DM service does not contain a file to unzip with name: 'outputcharts' or 'outputfile'. Please report this issue at www.d4science.org/contact-us");
throw new Exception(
"The response returned by DM service does not contain a file to unzip with name: 'outputcharts' or 'outputfile'. Please report this issue at www.d4science.org/contact-us");
String theZipFileURL = toDMOutputData.getPublicURL();
log.info("I'm using the file: " + theZipFileURL);
@ -717,14 +862,14 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
}
}
/**
* Manage outputs for perform fish analysis.
*
* @param output the output
* @param output
* the output
* @return the list
*/
public List<OutputFile> manageOutputsForPerformFishAnalysis(List<OutputFile> output) {
private List<OutputFile> manageOutputsForPerformFishAnalysis(List<OutputFile> output) {
List<OutputFile> newOutputFiles = new ArrayList<OutputFile>();
try {
@ -734,14 +879,17 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
// FINDING THE FILE WIHT THE LEGEND
for (OutputFile outputFile : output) {
log.trace("outputFile: {}", outputFile.getName());
if(outputFile.getName().toLowerCase().contains("legend") && outputFile.getDataType().equals(FileContentType.CSV)){
if (outputFile.getName().toLowerCase().contains("legend")
&& outputFile.getDataType().equals(FileContentType.CSV)) {
log.debug("Found legend file: {}", outputFile.getName());
CSVFile theLegendFile = getCSVFile(outputFile, false);
List<CSVRow> rows = theLegendFile.getValueRows();
// CREATING FROM *_legend_* CSV
//THE LEGEND WITH FIRST VALUE AS KEY AND REMAINING VALUES AS PROPERTIES
// THE LEGEND WITH FIRST VALUE AS KEY AND REMAINING VALUES
// AS PROPERTIES
for (CSVRow csvRow : rows) {
theLegendMap.put(csvRow.getListValues().get(0), csvRow.getListValues().subList(1, csvRow.getListValues().size()));
theLegendMap.put(csvRow.getListValues().get(0),
csvRow.getListValues().subList(1, csvRow.getListValues().size()));
}
break;
}
@ -760,20 +908,26 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
if (outputFile.getDataType().equals(FileContentType.CSV)) {
//FINDING THE FILE *index* TO CREATE A NEW CSV REPLACING THE LABELS 'A','B','C', etc. WITH THE NAMES (THE KPI NAMES) CONTAINED IN THE LEGEND
// FINDING THE FILE *index* TO CREATE A NEW CSV
// REPLACING THE LABELS 'A','B','C', etc. WITH THE NAMES
// (THE KPI NAMES) CONTAINED IN THE LEGEND
if (toNameLower.contains("index")) {
CSVFile theCorrelationMatrixIndexCSVFile = getCSVFile(outputFile, true);
try {
theOutputFile = createCSVWithLegendValues(theCorrelationMatrixIndexCSVFile, theLegendMap);
theOutputFile = createCSVWithLegendValues(theCorrelationMatrixIndexCSVFile,
theLegendMap);
} catch (Exception e) {
log.warn("Error thrown creating the CSV File with legend returning the original output file {}", outputFile);
log.warn(
"Error thrown creating the CSV File with legend returning the original output file {}",
outputFile);
theOutputFile = outputFile;
}
// break;
}
}
//RETURNING ALSO THE file correlation_matrix.csv for applying the COLORS
// RETURNING ALSO THE file correlation_matrix.csv for
// applying the COLORS
newOutputFiles.add(theOutputFile);
}
@ -791,16 +945,19 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
}
/**
* Creates the csv with legend values.
*
* @param theCorrelationFile the the correlation file
* @param theLegendMap the the legend map
* @param theCorrelationFile
* the the correlation file
* @param theLegendMap
* the the legend map
* @return the output file
* @throws Exception the exception
* @throws Exception
* the exception
*/
private OutputFile createCSVWithLegendValues(CSVFile theCorrelationFile, Map<String, List<String>> theLegendMap) throws Exception{
private OutputFile createCSVWithLegendValues(CSVFile theCorrelationFile, Map<String, List<String>> theLegendMap)
throws Exception {
CSVWriter cswWriter = null;
try {
@ -873,7 +1030,8 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
/**
* Removes the last char.
*
* @param str the str
* @param str
* the str
* @return the string
*/
private static String removeLastChar(String str) {
@ -887,14 +1045,16 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
/**
* Gets the CSV file.
*
* @param file the file
* @param deleteAfter the delete after
* @param file
* the file
* @param deleteAfter
* the delete after
* @return the CSV file
* @throws Exception the exception
* @throws Exception
* the exception
*/
@Override
public CSVFile getCSVFile(OutputFile file, boolean deleteAfter) throws Exception {
File theFile = null;
try {
if (file == null || file.getServerLocation() == null || file.getServerLocation().isEmpty()) {
@ -924,13 +1084,14 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
}
}
/**
* Gets the CSV file.
*
* @param theFileURL the the file url
* @param theFileURL
* the the file url
* @return the CSV file
* @throws Exception the exception
* @throws Exception
* the exception
*/
@Override
public CSVFile readCSVFile(String theFileURL) throws Exception {
@ -965,20 +1126,22 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
}
}
/**
* Copy to byte array.
*
* @param url the url
* @param url
* the url
* @return the byte[]
* @throws Exception the exception
* @throws Exception
* the exception
*/
public byte[] copyToByteArray(URL url) throws Exception{
private byte[] copyToByteArray(URL url) throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
InputStream is = null;
try {
is = url.openStream();
byte[] byteChunk = new byte[4096]; // Or whatever size you want to read in at a time.
byte[] byteChunk = new byte[4096]; // Or whatever size you want to
// read in at a time.
int n;
while ((n = is.read(byteChunk)) > 0) {
@ -986,12 +1149,10 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
}
return baos.toByteArray();
}
catch (IOException e) {
} catch (IOException e) {
log.error("Failed while reading bytes from %s: %s", url.toExternalForm(), e.getMessage());
throw new Exception("Copy to byte array error");
}
finally {
} finally {
if (is != null) {
try {
is.close();
@ -1003,15 +1164,14 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
}
}
/**
* Gets the image file.
*
* @param file the file
* @param file
* the file
* @return the image file
* @throws Exception the exception
* @throws Exception
* the exception
*/
@Override
public String getImageFile(OutputFile file) throws Exception {
@ -1024,7 +1184,8 @@ public class PerformFishAnalyticsServiceImpl extends RemoteServiceServlet
theFile = new File(file.getServerLocation());
byte[] imageContent = Files.readAllBytes(theFile.toPath());
// System.out.println("<img src='data:image/png;base64," + DatatypeConverter.printBase64Binary(imageInByte) + "'></img>");
// System.out.println("<img src='data:image/png;base64," +
// DatatypeConverter.printBase64Binary(imageInByte) + "'></img>");
return "<img src='data:image/png;base64," + DatatypeConverter.printBase64Binary(imageContent) + "'></img>";
} catch (Exception e) {